diff --git a/.gitattributes b/.gitattributes index 176a458f9..6313b56c5 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1 @@ -* text=auto +* text=auto eol=lf diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index bf610d104..fc5f08233 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -42,6 +42,7 @@ jobs: pip install pandas==0.25.3 numpy==1.17.5 - name: Test that schema generation has no effect run: | + pip install vl-convert-python python tools/generate_schema_wrapper.py # This gets the paths of all files which were either deleted, modified # or are not yet tracked by Git @@ -60,6 +61,7 @@ jobs: if [ ${#files_cleaned[@]} -gt 0 ]; then echo "The code generation modified the following files:" echo $files + git diff exit 1 fi - name: Test with pytest diff --git a/altair/__init__.py b/altair/__init__.py index ed54e4606..362745386 100644 --- a/altair/__init__.py +++ b/altair/__init__.py @@ -1,5 +1,5 @@ # ruff: noqa -__version__ = "5.4.0dev" +__version__ = "5.5.0dev" # The content of __all__ is automatically written by # tools/update_init_file.py. Do not modify directly. @@ -50,7 +50,6 @@ "ChainedWhen", "Chart", "ChartDataType", - "ChartType", "Color", "ColorDatum", "ColorDef", @@ -301,7 +300,6 @@ "Opacity", "OpacityDatum", "OpacityValue", - "Optional", "Order", "OrderFieldDef", "OrderOnlyDef", @@ -613,7 +611,6 @@ "field", "graticule", "hconcat", - "is_chart_type", "jupyter", "layer", "limit_rows", @@ -636,6 +633,7 @@ "to_json", "to_values", "topo_feature", + "typing", "utils", "v5", "value", @@ -655,7 +653,8 @@ def __dir__(): from altair.vegalite.v5.schema.core import Dict from altair.jupyter import JupyterChart from altair.expr import expr -from altair.utils import AltairDeprecationWarning, parse_shorthand, Optional, Undefined +from altair.utils import AltairDeprecationWarning, parse_shorthand, Undefined +from altair import typing def load_ipython_extension(ipython): diff --git a/altair/expr/consts.py b/altair/expr/consts.py index b90c93d4f..8cb4a57c4 100644 --- a/altair/expr/consts.py +++ b/altair/expr/consts.py @@ -1,6 +1,5 @@ from __future__ import annotations - CONST_LISTING = { "NaN": "not a number (same as JavaScript literal NaN)", "LN10": "the natural log of 10 (alias to Math.LN10)", diff --git a/altair/expr/core.py b/altair/expr/core.py index cfaa9984c..e3bc65a52 100644 --- a/altair/expr/core.py +++ b/altair/expr/core.py @@ -1,6 +1,9 @@ from __future__ import annotations -from typing import Any -from ..utils import SchemaBase + +from typing import Any, Dict, Union +from typing_extensions import TypeAlias + +from altair.utils import SchemaBase class DatumType: @@ -232,3 +235,6 @@ def __init__(self, group, name) -> None: def __repr__(self) -> str: return f"{self.group}[{self.name!r}]" + + +IntoExpression: TypeAlias = Union[bool, None, str, OperatorMixin, Dict[str, Any]] diff --git a/altair/jupyter/js/index.js b/altair/jupyter/js/index.js index 597738a90..58b936091 100644 --- a/altair/jupyter/js/index.js +++ b/altair/jupyter/js/index.js @@ -1,4 +1,4 @@ -import vegaEmbed from "https://esm.sh/vega-embed@6?deps=vega@5&deps=vega-lite@5.19.0"; +import vegaEmbed from "https://esm.sh/vega-embed@6?deps=vega@5&deps=vega-lite@5.20.1"; import lodashDebounce from "https://esm.sh/lodash-es@4.17.21/debounce"; // Note: For offline support, the import lines above are removed and the remaining script diff --git a/altair/jupyter/jupyter_chart.py b/altair/jupyter/jupyter_chart.py index 3b1fa07ab..675cce15f 100644 --- a/altair/jupyter/jupyter_chart.py +++ b/altair/jupyter/jupyter_chart.py @@ -1,17 +1,19 @@ from __future__ import annotations + import json -import anywidget -import traitlets import pathlib from typing import Any +import anywidget +import traitlets + import altair as alt +from altair import TopLevelSpec from altair.utils._vegafusion_data import ( - using_vegafusion, compile_to_vegafusion_chart_state, + using_vegafusion, ) -from altair import TopLevelSpec -from altair.utils.selection import IndexSelection, PointSelection, IntervalSelection +from altair.utils.selection import IndexSelection, IntervalSelection, PointSelection _here = pathlib.Path(__file__).parent @@ -216,7 +218,7 @@ def __init__( ) @traitlets.observe("chart") - def _on_change_chart(self, change): + def _on_change_chart(self, change): # noqa: C901 """Updates the JupyterChart's internal state when the wrapped Chart instance changes.""" new_chart = change.new selection_watches = [] diff --git a/altair/typing.py b/altair/typing.py new file mode 100644 index 000000000..cd8cb1489 --- /dev/null +++ b/altair/typing.py @@ -0,0 +1,96 @@ +"""Public types to ease integrating with `altair`.""" + +from __future__ import annotations + +__all__ = [ + "ChannelAngle", + "ChannelColor", + "ChannelColumn", + "ChannelDescription", + "ChannelDetail", + "ChannelFacet", + "ChannelFill", + "ChannelFillOpacity", + "ChannelHref", + "ChannelKey", + "ChannelLatitude", + "ChannelLatitude2", + "ChannelLongitude", + "ChannelLongitude2", + "ChannelOpacity", + "ChannelOrder", + "ChannelRadius", + "ChannelRadius2", + "ChannelRow", + "ChannelShape", + "ChannelSize", + "ChannelStroke", + "ChannelStrokeDash", + "ChannelStrokeOpacity", + "ChannelStrokeWidth", + "ChannelText", + "ChannelTheta", + "ChannelTheta2", + "ChannelTooltip", + "ChannelUrl", + "ChannelX", + "ChannelX2", + "ChannelXError", + "ChannelXError2", + "ChannelXOffset", + "ChannelY", + "ChannelY2", + "ChannelYError", + "ChannelYError2", + "ChannelYOffset", + "ChartType", + "EncodeKwds", + "Optional", + "is_chart_type", +] + +from altair.utils.schemapi import Optional +from altair.vegalite.v5.api import ChartType, is_chart_type +from altair.vegalite.v5.schema.channels import ( + ChannelAngle, + ChannelColor, + ChannelColumn, + ChannelDescription, + ChannelDetail, + ChannelFacet, + ChannelFill, + ChannelFillOpacity, + ChannelHref, + ChannelKey, + ChannelLatitude, + ChannelLatitude2, + ChannelLongitude, + ChannelLongitude2, + ChannelOpacity, + ChannelOrder, + ChannelRadius, + ChannelRadius2, + ChannelRow, + ChannelShape, + ChannelSize, + ChannelStroke, + ChannelStrokeDash, + ChannelStrokeOpacity, + ChannelStrokeWidth, + ChannelText, + ChannelTheta, + ChannelTheta2, + ChannelTooltip, + ChannelUrl, + ChannelX, + ChannelX2, + ChannelXError, + ChannelXError2, + ChannelXOffset, + ChannelY, + ChannelY2, + ChannelYError, + ChannelYError2, + ChannelYOffset, + EncodeKwds, +) diff --git a/altair/utils/__init__.py b/altair/utils/__init__.py index 4befd99a3..216645c3f 100644 --- a/altair/utils/__init__.py +++ b/altair/utils/__init__.py @@ -1,20 +1,19 @@ from .core import ( - infer_vegalite_type_for_pandas, + SHORTHAND_KEYS, + SchemaBase, + display_traceback, infer_encoding_types, - sanitize_pandas_dataframe, - sanitize_narwhals_dataframe, + infer_vegalite_type_for_pandas, parse_shorthand, - use_signature, + sanitize_narwhals_dataframe, + sanitize_pandas_dataframe, update_nested, - display_traceback, - SchemaBase, - SHORTHAND_KEYS, + use_signature, ) +from .deprecation import AltairDeprecationWarning, deprecated, deprecated_warn from .html import spec_to_html from .plugin_registry import PluginRegistry -from .deprecation import AltairDeprecationWarning, deprecated, deprecated_warn -from .schemapi import Undefined, Optional, is_undefined - +from .schemapi import Optional, Undefined, is_undefined __all__ = ( "SHORTHAND_KEYS", diff --git a/altair/utils/_dfi_types.py b/altair/utils/_dfi_types.py index 12b7761f8..c9aaa0176 100644 --- a/altair/utils/_dfi_types.py +++ b/altair/utils/_dfi_types.py @@ -5,6 +5,7 @@ # # These classes are only for use in type signatures from __future__ import annotations + import enum from typing import Any, Iterable, Protocol diff --git a/altair/utils/_importers.py b/altair/utils/_importers.py index 76656a860..14085ebcf 100644 --- a/altair/utils/_importers.py +++ b/altair/utils/_importers.py @@ -41,7 +41,7 @@ def import_vegafusion() -> ModuleType: def import_vl_convert() -> ModuleType: - min_version = "1.5.0" + min_version = "1.6.0" try: version = importlib_version("vl-convert-python") if Version(version) < Version(min_version): @@ -67,7 +67,7 @@ def import_vl_convert() -> ModuleType: def vl_version_for_vl_convert() -> str: - from ..vegalite import SCHEMA_VERSION + from altair.vegalite import SCHEMA_VERSION # Compute VlConvert's vl_version string (of the form 'v5_2') # from SCHEMA_VERSION (of the form 'v5.2.0') diff --git a/altair/utils/_show.py b/altair/utils/_show.py index 325b9e91a..39345d1ce 100644 --- a/altair/utils/_show.py +++ b/altair/utils/_show.py @@ -1,4 +1,5 @@ from __future__ import annotations + import webbrowser from http.server import BaseHTTPRequestHandler, HTTPServer from typing import Iterable diff --git a/altair/utils/_transformed_data.py b/altair/utils/_transformed_data.py index 5847ad94f..3839a13d2 100644 --- a/altair/utils/_transformed_data.py +++ b/altair/utils/_transformed_data.py @@ -1,34 +1,37 @@ from __future__ import annotations -from typing import Any, Iterable, overload, TYPE_CHECKING, Dict, Tuple + +from typing import TYPE_CHECKING, Any, Dict, Iterable, Tuple, overload from typing_extensions import TypeAlias + from altair import ( Chart, - FacetChart, - LayerChart, - HConcatChart, - VConcatChart, ConcatChart, - TopLevelUnitSpec, + ConcatSpecGenericSpec, + FacetChart, FacetedUnitSpec, - UnitSpec, - UnitSpecWithFrame, - NonNormalizedSpec, - TopLevelLayerSpec, + FacetSpec, + HConcatChart, + HConcatSpecGenericSpec, + LayerChart, LayerSpec, + NonNormalizedSpec, TopLevelConcatSpec, - ConcatSpecGenericSpec, + TopLevelFacetSpec, TopLevelHConcatSpec, - HConcatSpecGenericSpec, + TopLevelLayerSpec, + TopLevelUnitSpec, TopLevelVConcatSpec, + UnitSpec, + UnitSpecWithFrame, + VConcatChart, VConcatSpecGenericSpec, - TopLevelFacetSpec, - FacetSpec, data_transformers, ) from altair.utils._vegafusion_data import get_inline_tables, import_vegafusion from altair.utils.schemapi import Undefined if TYPE_CHECKING: + from altair.typing import ChartType from altair.utils.core import DataFrameLike Scope: TypeAlias = Tuple[int, ...] @@ -154,9 +157,7 @@ def transformed_data(chart, row_limit=None, exclude=None): # The same error appeared when trying it with Protocols for the concat and layer charts. # This function is only used internally and so we accept this inconsistency for now. def name_views( - chart: Chart | FacetChart | LayerChart | HConcatChart | VConcatChart | ConcatChart, - i: int = 0, - exclude: Iterable[str] | None = None, + chart: ChartType, i: int = 0, exclude: Iterable[str] | None = None ) -> list[str]: """ Name unnamed chart views. @@ -193,6 +194,7 @@ def name_views( else: return [] else: + subcharts: list[Any] if isinstance(chart, _chart_class_mapping[LayerChart]): subcharts = chart.layer elif isinstance(chart, _chart_class_mapping[HConcatChart]): @@ -450,7 +452,7 @@ def get_facet_mapping(group: dict[str, Any], scope: Scope = ()) -> FacetMapping: group, facet_data, scope ) if definition_scope is not None: - facet_mapping[(facet_name, group_scope)] = ( + facet_mapping[facet_name, group_scope] = ( facet_data, definition_scope, ) diff --git a/altair/utils/_vegafusion_data.py b/altair/utils/_vegafusion_data.py index 26f79d22e..99779b62e 100644 --- a/altair/utils/_vegafusion_data.py +++ b/altair/utils/_vegafusion_data.py @@ -1,31 +1,33 @@ from __future__ import annotations + import uuid -from weakref import WeakValueDictionary from typing import ( + TYPE_CHECKING, Any, - Union, + Callable, + Final, MutableMapping, TypedDict, - Final, - TYPE_CHECKING, + Union, overload, - Callable, ) +from weakref import WeakValueDictionary import narwhals.stable.v1 as nw from altair.utils._importers import import_vegafusion +from altair.utils.core import DataFrameLike from altair.utils.data import ( DataType, - ToValuesReturnType, MaxRowsError, SupportsGeoInterface, + ToValuesReturnType, ) -from altair.utils.core import DataFrameLike from altair.vegalite.data import default_data_transformer if TYPE_CHECKING: from narwhals.typing import IntoDataFrame + from vegafusion.runtime import ChartState # type: ignore # Temporary storage for dataframes that have been extracted @@ -182,7 +184,7 @@ def compile_to_vegafusion_chart_state( A VegaFusion ChartState object """ # Local import to avoid circular ImportError - from altair import vegalite_compilers, data_transformers + from altair import data_transformers, vegalite_compilers vf = import_vegafusion() @@ -213,7 +215,7 @@ def compile_to_vegafusion_chart_state( return chart_state -def compile_with_vegafusion(vegalite_spec: dict[str, Any]) -> dict: +def compile_with_vegafusion(vegalite_spec: dict[str, Any]) -> dict[str, Any]: """ Compile a Vega-Lite spec to Vega and pre-transform with VegaFusion. @@ -234,7 +236,7 @@ def compile_with_vegafusion(vegalite_spec: dict[str, Any]) -> dict: A Vega spec that has been pre-transformed by VegaFusion """ # Local import to avoid circular ImportError - from altair import vegalite_compilers, data_transformers + from altair import data_transformers, vegalite_compilers vf = import_vegafusion() diff --git a/altair/utils/compiler.py b/altair/utils/compiler.py index cbe0dd62d..a022651e6 100644 --- a/altair/utils/compiler.py +++ b/altair/utils/compiler.py @@ -1,11 +1,12 @@ -from typing import Callable +from typing import Any, Callable, Dict + from altair.utils import PluginRegistry # ============================================================================== # Vega-Lite to Vega compiler registry # ============================================================================== -VegaLiteCompilerType = Callable[[dict], dict] +VegaLiteCompilerType = Callable[[Dict[str, Any]], Dict[str, Any]] -class VegaLiteCompilerRegistry(PluginRegistry[VegaLiteCompilerType, dict]): +class VegaLiteCompilerRegistry(PluginRegistry[VegaLiteCompilerType, Dict[str, Any]]): pass diff --git a/altair/utils/core.py b/altair/utils/core.py index d6f6bad65..f5ef659b1 100644 --- a/altair/utils/core.py +++ b/altair/utils/core.py @@ -2,29 +2,29 @@ from __future__ import annotations -from collections.abc import Mapping, MutableMapping -from copy import deepcopy -import json import itertools +import json import re import sys import traceback import warnings -from typing import Callable, TypeVar, Any, Iterator, cast, Literal, TYPE_CHECKING +from collections.abc import Mapping, MutableMapping +from copy import deepcopy from itertools import groupby from operator import itemgetter +from typing import TYPE_CHECKING, Any, Callable, Iterator, Literal, TypeVar, cast import jsonschema import narwhals.stable.v1 as nw -from narwhals.dependencies import is_pandas_dataframe, get_polars +from narwhals.dependencies import get_polars, is_pandas_dataframe from narwhals.typing import IntoDataFrame from altair.utils.schemapi import SchemaBase, Undefined if sys.version_info >= (3, 12): - from typing import runtime_checkable, Protocol + from typing import Protocol, runtime_checkable else: - from typing_extensions import runtime_checkable, Protocol + from typing_extensions import Protocol, runtime_checkable if sys.version_info >= (3, 10): from typing import ParamSpec else: @@ -32,12 +32,14 @@ if TYPE_CHECKING: - from types import ModuleType import typing as t - from altair.vegalite.v5.schema._typing import StandardType_T as InferredVegaLiteType - from altair.utils._dfi_types import DataFrame as DfiDataFrame - from narwhals.typing import IntoExpr + from types import ModuleType + import pandas as pd + from narwhals.typing import IntoExpr + + from altair.utils._dfi_types import DataFrame as DfiDataFrame + from altair.vegalite.v5.schema._typing import StandardType_T as InferredVegaLiteType V = TypeVar("V") P = ParamSpec("P") @@ -317,7 +319,7 @@ def numpy_is_subtype(dtype: Any, subtype: Any) -> bool: return False -def sanitize_pandas_dataframe(df: pd.DataFrame) -> pd.DataFrame: +def sanitize_pandas_dataframe(df: pd.DataFrame) -> pd.DataFrame: # noqa: C901 """ Sanitize a DataFrame to prepare it for serialization. @@ -337,8 +339,8 @@ def sanitize_pandas_dataframe(df: pd.DataFrame) -> pd.DataFrame: """ # This is safe to import here, as this function is only called on pandas input. # NumPy is a required dependency of pandas so is also safe to import. - import pandas as pd import numpy as np + import pandas as pd df = df.copy() @@ -500,9 +502,9 @@ def to_eager_narwhals_dataframe(data: IntoDataFrame) -> nw.DataFrame[Any]: return data_nw -def parse_shorthand( +def parse_shorthand( # noqa: C901 shorthand: dict[str, Any] | str, - data: pd.DataFrame | DataFrameLike | None = None, + data: IntoDataFrame | None = None, parse_aggregates: bool = True, parse_window_ops: bool = False, parse_timeunits: bool = True, diff --git a/altair/utils/data.py b/altair/utils/data.py index dc263fa1d..1986ec8c5 100644 --- a/altair/utils/data.py +++ b/altair/utils/data.py @@ -1,27 +1,28 @@ from __future__ import annotations + +import hashlib import json import random -import hashlib +import sys +from functools import partial +from pathlib import Path from typing import ( + TYPE_CHECKING, Any, + Callable, + Dict, List, + Literal, MutableMapping, - Sequence, - TYPE_CHECKING, Protocol, + Sequence, TypedDict, - Literal, TypeVar, Union, - Dict, overload, runtime_checkable, - Callable, ) -from typing_extensions import TypeAlias, ParamSpec, Concatenate -from pathlib import Path -from functools import partial -import sys +from typing_extensions import Concatenate, ParamSpec, TypeAlias import narwhals.stable.v1 as nw from narwhals.dependencies import is_pandas_dataframe as _is_pandas_dataframe @@ -29,12 +30,12 @@ from ._importers import import_pyarrow_interchange from .core import ( - sanitize_pandas_dataframe, DataFrameLike, + sanitize_geo_interface, sanitize_narwhals_dataframe, + sanitize_pandas_dataframe, to_eager_narwhals_dataframe, ) -from .core import sanitize_geo_interface from .plugin_registry import PluginRegistry if sys.version_info >= (3, 13): @@ -43,8 +44,8 @@ from typing_extensions import TypeIs if TYPE_CHECKING: - import pyarrow as pa import pandas as pd + import pyarrow as pa @runtime_checkable diff --git a/altair/utils/display.py b/altair/utils/display.py index 3c54c8abd..91727318e 100644 --- a/altair/utils/display.py +++ b/altair/utils/display.py @@ -1,17 +1,17 @@ from __future__ import annotations + import json import pkgutil import textwrap -from typing import Callable, Any, Dict, Tuple, Union -from typing_extensions import TypeAlias import uuid +from typing import Any, Callable, Dict, Tuple, Union +from typing_extensions import TypeAlias from ._vegafusion_data import compile_with_vegafusion, using_vegafusion -from .plugin_registry import PluginRegistry, PluginEnabler from .mimebundle import spec_to_mimebundle +from .plugin_registry import PluginEnabler, PluginRegistry from .schemapi import validate_jsonschema - # ============================================================================== # Renderer registry # ============================================================================== diff --git a/altair/utils/mimebundle.py b/altair/utils/mimebundle.py index 687355f64..029388025 100644 --- a/altair/utils/mimebundle.py +++ b/altair/utils/mimebundle.py @@ -1,9 +1,11 @@ from __future__ import annotations + +import struct from typing import Any, Literal, cast, overload from typing_extensions import TypeAlias -from .html import spec_to_html + from ._importers import import_vl_convert, vl_version_for_vl_convert -import struct +from .html import spec_to_html MimeBundleFormat: TypeAlias = Literal[ "html", "json", "png", "svg", "pdf", "vega", "vega-lite" @@ -108,11 +110,8 @@ def spec_to_mimebundle( The png, svg, pdf, and vega outputs require the vl-convert package """ # Local import to avoid circular ImportError - from altair.utils.display import ( - compile_with_vegafusion, - using_vegafusion, - ) from altair import renderers + from altair.utils.display import compile_with_vegafusion, using_vegafusion if mode != "vega-lite": msg = "mode must be 'vega-lite'" diff --git a/altair/utils/plugin_registry.py b/altair/utils/plugin_registry.py index 4b4f01dbd..996c6623e 100644 --- a/altair/utils/plugin_registry.py +++ b/altair/utils/plugin_registry.py @@ -1,10 +1,9 @@ from __future__ import annotations from functools import partial -from typing import Any, Generic, cast, Callable, TYPE_CHECKING -from typing_extensions import TypeAliasType, TypeVar, TypeIs - from importlib.metadata import entry_points +from typing import TYPE_CHECKING, Any, Callable, Generic, cast +from typing_extensions import TypeAliasType, TypeIs, TypeVar from altair.utils.deprecation import deprecated_warn diff --git a/altair/utils/save.py b/altair/utils/save.py index 1bc77679c..042d457dc 100644 --- a/altair/utils/save.py +++ b/altair/utils/save.py @@ -1,13 +1,15 @@ from __future__ import annotations + import json import pathlib import warnings -from typing import IO, Any, Literal, TYPE_CHECKING +from typing import IO, TYPE_CHECKING, Any, Literal -from .mimebundle import spec_to_mimebundle -from ..vegalite.v5.data import data_transformers from altair.utils._vegafusion_data import using_vegafusion from altair.utils.deprecation import deprecated_warn +from altair.vegalite.v5.data import data_transformers + +from .mimebundle import spec_to_mimebundle if TYPE_CHECKING: from pathlib import Path @@ -138,8 +140,7 @@ def save( version="5.0.0", ) - if json_kwds is None: - json_kwds = {} + json_kwds = json_kwds or {} encoding = kwargs.get("encoding", "utf-8") format = set_inspect_format_argument(format, fp, inline) # type: ignore[assignment] diff --git a/altair/utils/schemapi.py b/altair/utils/schemapi.py index 7726eef70..fdf0d6594 100644 --- a/altair/utils/schemapi.py +++ b/altair/utils/schemapi.py @@ -6,31 +6,33 @@ import copy import inspect import json +import sys import textwrap -from math import ceil from collections import defaultdict +from functools import partial from importlib.metadata import version as importlib_version from itertools import chain, zip_longest -import sys +from math import ceil from typing import ( TYPE_CHECKING, Any, + Dict, Final, Iterable, Iterator, + List, Literal, Sequence, TypeVar, Union, overload, - List, - Dict, ) from typing_extensions import TypeAlias -from functools import partial + import jsonschema import jsonschema.exceptions import jsonschema.validators +import narwhals.stable.v1 as nw from packaging.version import Version # This leads to circular imports with the vegalite module. Currently, this works @@ -39,10 +41,11 @@ from altair import vegalite if TYPE_CHECKING: + from typing import ClassVar + from referencing import Registry - from altair import ChartType - from typing import ClassVar + from altair.typing import ChartType if sys.version_info >= (3, 13): from typing import TypeIs @@ -50,9 +53,9 @@ from typing_extensions import TypeIs if sys.version_info >= (3, 11): - from typing import Self, Never + from typing import Never, Self else: - from typing_extensions import Self, Never + from typing_extensions import Never, Self ValidationErrorList: TypeAlias = List[jsonschema.exceptions.ValidationError] GroupedValidationErrors: TypeAlias = Dict[str, ValidationErrorList] @@ -486,7 +489,15 @@ def _subclasses(cls: type[Any]) -> Iterator[type[Any]]: yield cls -def _todict(obj: Any, context: dict[str, Any] | None, np_opt: Any, pd_opt: Any) -> Any: +def _from_array_like(obj: Iterable[Any], /) -> list[Any]: + try: + ser = nw.from_native(obj, strict=True, series_only=True) + return ser.to_list() + except TypeError: + return list(obj) + + +def _todict(obj: Any, context: dict[str, Any] | None, np_opt: Any, pd_opt: Any) -> Any: # noqa: C901 """Convert an object to a dict representation.""" if np_opt is not None: np = np_opt @@ -510,10 +521,16 @@ def _todict(obj: Any, context: dict[str, Any] | None, np_opt: Any, pd_opt: Any) for k, v in obj.items() if v is not Undefined } - elif hasattr(obj, "to_dict"): + elif ( + hasattr(obj, "to_dict") + and (module_name := obj.__module__) + and module_name.startswith("altair") + ): return obj.to_dict() elif pd_opt is not None and isinstance(obj, pd_opt.Timestamp): return pd_opt.Timestamp(obj).isoformat() + elif _is_iterable(obj, exclude=(str, bytes)): + return _todict(_from_array_like(obj), context, np_opt, pd_opt) else: return obj @@ -744,10 +761,12 @@ def _get_default_error_message( # Add unformatted messages of any remaining errors which were not # considered so far. This is not expected to be used but more exists # as a fallback for cases which were not known during development. - for validator, errors in errors_by_validator.items(): - if validator not in {"enum", "type"}: - message += "\n".join([e.message for e in errors]) - + it = ( + "\n".join(e.message for e in errors) + for validator, errors in errors_by_validator.items() + if validator not in {"enum", "type"} + ) + message += "".join(it) return message @@ -775,7 +794,7 @@ def __repr__(self) -> str: The parameters ``short``, ``long`` accept the same range of types:: # ruff: noqa: UP006, UP007 - from altair import Optional + from altair.typing import Optional def func_1( short: Optional[str | bool | float | dict[str, Any] | SchemaBase] = Undefined, @@ -784,10 +803,12 @@ def func_1( ] = Undefined, ): ... -This is distinct from `typing.Optional `__ as ``altair.Optional`` treats ``None`` like any other type:: +This is distinct from `typing.Optional `__. + +``altair.typing.Optional`` treats ``None`` like any other type:: # ruff: noqa: UP006, UP007 - from altair import Optional + from altair.typing import Optional def func_2( short: Optional[str | float | dict[str, Any] | None | SchemaBase] = Undefined, @@ -849,7 +870,7 @@ def __init__(self, *args: Any, **kwds: Any) -> None: if DEBUG_MODE and self._class_is_valid_at_instantiation: self.to_dict(validate=True) - def copy( + def copy( # noqa: C901 self, deep: bool | Iterable[Any] = True, ignore: list[str] | None = None ) -> Self: """ @@ -1228,6 +1249,12 @@ def _is_list(obj: Any | list[Any]) -> TypeIs[list[Any]]: return isinstance(obj, list) +def _is_iterable( + obj: Any, *, exclude: type | tuple[type, ...] = (str, bytes) +) -> TypeIs[Iterable[Any]]: + return not isinstance(obj, exclude) and isinstance(obj, Iterable) + + def _passthrough(*args: Any, **kwds: Any) -> Any | dict[str, Any]: return args[0] if args else kwds diff --git a/altair/utils/selection.py b/altair/utils/selection.py index 94a763d40..13b9d5b8e 100644 --- a/altair/utils/selection.py +++ b/altair/utils/selection.py @@ -1,7 +1,7 @@ from __future__ import annotations -from dataclasses import dataclass -from typing import List, Dict, Any, NewType +from dataclasses import dataclass +from typing import Any, Dict, List, NewType # Type representing the "{selection}_store" dataset that corresponds to a # Vega-Lite selection diff --git a/altair/utils/server.py b/altair/utils/server.py index 8c57e9e8b..d773b7d25 100644 --- a/altair/utils/server.py +++ b/altair/utils/server.py @@ -5,14 +5,14 @@ https://github.com/mpld3/mpld3/blob/master/mpld3/_server.py """ +import itertools +import random +import socket import sys import threading import webbrowser -import socket from http import server from io import BytesIO as IO -import itertools -import random JUPYTER_WARNING = """ Note: if you're in the Jupyter notebook, Chart.serve() is not the best diff --git a/altair/utils/theme.py b/altair/utils/theme.py index 8a913cdc1..02372e690 100644 --- a/altair/utils/theme.py +++ b/altair/utils/theme.py @@ -1,10 +1,49 @@ """Utilities for registering and working with themes.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Callable + from .plugin_registry import PluginRegistry -from typing import Callable + +if sys.version_info >= (3, 11): + from typing import LiteralString +else: + from typing_extensions import LiteralString + +if TYPE_CHECKING: + from altair.utils.plugin_registry import PluginEnabler + from altair.vegalite.v5.theme import AltairThemes, VegaThemes ThemeType = Callable[..., dict] class ThemeRegistry(PluginRegistry[ThemeType, dict]): - pass + def enable( + self, name: LiteralString | AltairThemes | VegaThemes | None = None, **options + ) -> PluginEnabler: + """ + Enable a theme by name. + + This can be either called directly, or used as a context manager. + + Parameters + ---------- + name : string (optional) + The name of the theme to enable. If not specified, then use the + current active name. + **options : + Any additional parameters will be passed to the theme as keyword + arguments + + Returns + ------- + PluginEnabler: + An object that allows enable() to be used as a context manager + + Notes + ----- + Default `vega` themes can be previewed at https://vega.github.io/vega-themes/ + """ + return super().enable(name, **options) diff --git a/altair/vegalite/__init__.py b/altair/vegalite/__init__.py index 7833afcee..d4582cb14 100644 --- a/altair/vegalite/__init__.py +++ b/altair/vegalite/__init__.py @@ -1,3 +1,4 @@ # ruff: noqa: F403 from .v5 import * -from .v5._api_rfc import agg as agg, field as field +from .v5._api_rfc import agg as agg +from .v5._api_rfc import field as field diff --git a/altair/vegalite/data.py b/altair/vegalite/data.py index 19371fc87..cb2d58a59 100644 --- a/altair/vegalite/data.py +++ b/altair/vegalite/data.py @@ -1,21 +1,22 @@ from __future__ import annotations -from typing import TYPE_CHECKING, overload, Callable -from ..utils.core import sanitize_pandas_dataframe -from ..utils.data import ( +from typing import TYPE_CHECKING, Callable, overload + +from altair.utils.core import sanitize_pandas_dataframe +from altair.utils.data import DataTransformerRegistry as _DataTransformerRegistry +from altair.utils.data import ( MaxRowsError, + check_data_type, limit_rows, sample, to_csv, to_json, to_values, - check_data_type, ) -from ..utils.data import DataTransformerRegistry as _DataTransformerRegistry if TYPE_CHECKING: - from ..utils.plugin_registry import PluginEnabler - from ..utils.data import DataType, ToValuesReturnType + from altair.utils.data import DataType, ToValuesReturnType + from altair.utils.plugin_registry import PluginEnabler @overload diff --git a/altair/vegalite/display.py b/altair/vegalite/display.py index d146c53c7..8e204c5ba 100644 --- a/altair/vegalite/display.py +++ b/altair/vegalite/display.py @@ -1,11 +1,11 @@ -from ..utils.display import ( +from altair.utils.display import ( + DefaultRendererReturnType, Displayable, + HTMLRenderer, + RendererRegistry, default_renderer_base, json_renderer_base, - DefaultRendererReturnType, ) -from ..utils.display import RendererRegistry, HTMLRenderer - __all__ = ( "DefaultRendererReturnType", diff --git a/altair/vegalite/v5/__init__.py b/altair/vegalite/v5/__init__.py index be8d802fb..bc0703ec6 100644 --- a/altair/vegalite/v5/__init__.py +++ b/altair/vegalite/v5/__init__.py @@ -1,26 +1,24 @@ # ruff: noqa: F401, F403 -from .schema import * -from .api import * - from altair.expr.core import datum -from .display import ( - VegaLite, - renderers, - VEGALITE_VERSION, - VEGAEMBED_VERSION, - VEGA_VERSION, -) +from .api import * from .compiler import vegalite_compilers - from .data import ( MaxRowsError, + data_transformers, + default_data_transformer, limit_rows, sample, - to_json, to_csv, + to_json, to_values, - default_data_transformer, - data_transformers, ) +from .display import ( + VEGA_VERSION, + VEGAEMBED_VERSION, + VEGALITE_VERSION, + VegaLite, + renderers, +) +from .schema import * from .theme import themes diff --git a/altair/vegalite/v5/_api_rfc.py b/altair/vegalite/v5/_api_rfc.py index 50b4aa404..62d73584b 100644 --- a/altair/vegalite/v5/_api_rfc.py +++ b/altair/vegalite/v5/_api_rfc.py @@ -14,19 +14,19 @@ from __future__ import annotations from typing import TYPE_CHECKING, Any, Dict, Literal, Mapping, Sequence, Union - from typing_extensions import TypeAlias from altair.utils.core import TYPECODE_MAP as _TYPE_CODE from altair.utils.core import parse_shorthand as _parse from altair.utils.schemapi import Optional, SchemaBase, Undefined from altair.vegalite.v5.api import Parameter, SelectionPredicateComposition +from altair.vegalite.v5.schema import channels from altair.vegalite.v5.schema._typing import ( BinnedTimeUnit_T, + Map, MultiTimeUnit_T, SingleTimeUnit_T, Type_T, - Map, ) from altair.vegalite.v5.schema.core import ( FieldEqualPredicate, @@ -38,7 +38,6 @@ FieldRangePredicate, FieldValidPredicate, ) -from altair.vegalite.v5.schema import channels if TYPE_CHECKING: from altair.utils.core import DataFrameLike diff --git a/altair/vegalite/v5/api.py b/altair/vegalite/v5/api.py index 9458442ca..f050dd1d1 100644 --- a/altair/vegalite/v5/api.py +++ b/altair/vegalite/v5/api.py @@ -1,45 +1,48 @@ from __future__ import annotations -import sys -import warnings +import functools import hashlib import io -import json -import jsonschema import itertools +import json +import operator +import sys +import typing as t +import warnings +from copy import deepcopy as _deepcopy from typing import ( + TYPE_CHECKING, Any, - overload, Literal, - Union, - TYPE_CHECKING, - TypeVar, Protocol, + Sequence, + TypeVar, + Union, + overload, ) from typing_extensions import TypeAlias -import typing as t -import functools -import operator -from copy import deepcopy as _deepcopy -from .schema import core, channels, mixins, SCHEMA_URL +import jsonschema +from altair import utils +from altair.expr import core as _expr_core from altair.utils import Optional, Undefined -from .data import data_transformers -from ... import utils -from ...expr import core as _expr_core -from .display import renderers, VEGALITE_VERSION, VEGAEMBED_VERSION, VEGA_VERSION -from .theme import themes -from .compiler import vegalite_compilers -from ...utils._vegafusion_data import ( - using_vegafusion as _using_vegafusion, +from altair.utils._vegafusion_data import ( compile_with_vegafusion as _compile_with_vegafusion, ) -from altair.utils.data import DataType, is_data_type as _is_data_type +from altair.utils._vegafusion_data import using_vegafusion as _using_vegafusion from altair.utils.core import ( to_eager_narwhals_dataframe as _to_eager_narwhals_dataframe, ) +from altair.utils.data import DataType +from altair.utils.data import is_data_type as _is_data_type + +from .compiler import vegalite_compilers +from .data import data_transformers +from .display import VEGA_VERSION, VEGAEMBED_VERSION, VEGALITE_VERSION, renderers +from .schema import SCHEMA_URL, channels, core, mixins from .schema._typing import Map +from .theme import themes if sys.version_info >= (3, 13): from typing import TypedDict @@ -47,81 +50,93 @@ from typing_extensions import TypedDict if TYPE_CHECKING: - from ...utils.core import DataFrameLike from pathlib import Path - from typing import Iterable, IO, Iterator + from typing import IO, Iterable, Iterator + + from altair.utils.core import DataFrameLike if sys.version_info >= (3, 13): - from typing import TypeIs, Required + from typing import Required, TypeIs else: - from typing_extensions import TypeIs, Required + from typing_extensions import Required, TypeIs if sys.version_info >= (3, 11): - from typing import Self, Never + from typing import Never, Self else: - from typing_extensions import Self, Never + from typing_extensions import Never, Self + + from altair.expr.core import ( + BinaryExpression, + Expression, + GetAttrExpression, + GetItemExpression, + IntoExpression, + ) + from altair.utils.display import MimeBundleType - from .schema.channels import Facet, Row, Column + from .schema._typing import ( + AggregateOp_T, + AutosizeType_T, + ColorName_T, + ImputeMethod_T, + LayoutAlign_T, + MultiTimeUnit_T, + OneOrSeq, + ProjectionType_T, + ResolveMode_T, + SelectionResolution_T, + SelectionType_T, + SingleDefUnitChannel_T, + SingleTimeUnit_T, + StackOffset_T, + ) + from .schema.channels import Column, Facet, Row from .schema.core import ( - SchemaBase, - Expr, - PredicateComposition, + AggregatedFieldDef, + AggregateOp, + AnyMark, + BindCheckbox, Binding, - IntervalSelectionConfig, - PointSelectionConfig, - Mark, - LayerRepeatMapping, - RepeatMapping, - ProjectionType, + BindRadioSelect, + BindRange, + BinParams, + Expr, ExprRef, - Vector2number, - Vector2Vector2number, - Vector3number, - Transform, - AggregatedFieldDef, + FacetedEncoding, + FacetFieldDef, FieldName, - BinParams, - ImputeSequence, + GraticuleGenerator, ImputeMethod, + ImputeSequence, + InlineData, + InlineDataset, + IntervalSelectionConfig, JoinAggregateFieldDef, + LayerRepeatMapping, + LookupSelection, + Mark, + NamedData, ParameterName, + PointSelectionConfig, Predicate, - LookupSelection, - AggregateOp, - SortField, - TimeUnit, - WindowFieldDef, - FacetFieldDef, - FacetedEncoding, - AnyMark, - Step, + PredicateComposition, + ProjectionType, + RepeatMapping, RepeatRef, - UrlData, + SchemaBase, + SelectionParameter, SequenceGenerator, - GraticuleGenerator, + SortField, SphereGenerator, - VariableParameter, + Step, + TimeUnit, TopLevelSelectionParameter, - SelectionParameter, - InlineDataset, - ) - from altair.expr.core import ( - BinaryExpression, - Expression, - GetAttrExpression, - GetItemExpression, - ) - from .schema._typing import ( - ImputeMethod_T, - SelectionType_T, - SelectionResolution_T, - SingleDefUnitChannel_T, - StackOffset_T, - ResolveMode_T, - ProjectionType_T, - AggregateOp_T, - MultiTimeUnit_T, - SingleTimeUnit_T, - OneOrSeq, + Transform, + UrlData, + VariableParameter, + Vector2number, + Vector2Vector2number, + Vector3number, + WindowFieldDef, ) __all__ = [ @@ -130,7 +145,6 @@ "ChainedWhen", "Chart", "ChartDataType", - "ChartType", "ConcatChart", "DataType", "FacetChart", @@ -159,7 +173,6 @@ "condition", "graticule", "hconcat", - "is_chart_type", "layer", "mixins", "param", @@ -183,7 +196,7 @@ # ------------------------------------------------------------------------ # Data Utilities -def _dataset_name(values: dict | list | InlineDataset) -> str: +def _dataset_name(values: dict[str, Any] | list | InlineDataset) -> str: """ Generate a unique hash of the data. @@ -206,7 +219,9 @@ def _dataset_name(values: dict | list | InlineDataset) -> str: return "data-" + hsh -def _consolidate_data(data: Any, context: Any) -> Any: +def _consolidate_data( + data: ChartDataType | UrlData, context: dict[str, Any] +) -> ChartDataType | NamedData | InlineData | UrlData: """ If data is specified inline, then move it to context['datasets']. @@ -235,7 +250,9 @@ def _consolidate_data(data: Any, context: Any) -> Any: return data -def _prepare_data(data, context=None): +def _prepare_data( + data: ChartDataType, context: dict[str, Any] | None = None +) -> ChartDataType | NamedData | InlineData | UrlData | Any: """ Convert input data to data for use within schema. @@ -281,10 +298,10 @@ def _prepare_data(data, context=None): class LookupData(core.LookupData): @utils.use_signature(core.LookupData) - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - def to_dict(self, *args, **kwargs) -> dict: + def to_dict(self, *args: Any, **kwargs: Any) -> dict[str, Any]: """Convert the chart to a dictionary suitable for JSON export.""" copy = self.copy(deep=False) copy.data = _prepare_data(copy.data, kwargs.get("context")) @@ -295,10 +312,10 @@ class FacetMapping(core.FacetMapping): _class_is_valid_at_instantiation = False @utils.use_signature(core.FacetMapping) - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - def to_dict(self, *args, **kwargs) -> dict: + def to_dict(self, *args: Any, **kwargs: Any) -> dict[str, Any]: copy = self.copy(deep=False) context = kwargs.get("context", {}) data = context.get("data", None) @@ -353,11 +370,11 @@ def __init__( alternative="to_dict", message="No need to call '.ref()' anymore.", ) - def ref(self) -> dict: + def ref(self) -> dict[str, Any]: """'ref' is deprecated. No need to call '.ref()' anymore.""" return self.to_dict() - def to_dict(self) -> dict[str, str | dict]: + def to_dict(self) -> dict[str, str | dict[str, Any]]: if self.param_type == "variable": return {"expr": self.name} elif self.param_type == "selection": @@ -367,13 +384,13 @@ def to_dict(self) -> dict[str, str | dict]: msg = f"Unrecognized parameter type: {self.param_type}" raise ValueError(msg) - def __invert__(self): + def __invert__(self) -> SelectionPredicateComposition | Any: if self.param_type == "selection": return SelectionPredicateComposition({"not": {"param": self.name}}) else: return _expr_core.OperatorMixin.__invert__(self) - def __and__(self, other): + def __and__(self, other: Any) -> SelectionPredicateComposition | Any: if self.param_type == "selection": if isinstance(other, Parameter): other = {"param": other.name} @@ -381,7 +398,7 @@ def __and__(self, other): else: return _expr_core.OperatorMixin.__and__(self, other) - def __or__(self, other): + def __or__(self, other: Any) -> SelectionPredicateComposition | Any: if self.param_type == "selection": if isinstance(other, Parameter): other = {"param": other.name} @@ -395,7 +412,7 @@ def __repr__(self) -> str: def _to_expr(self) -> str: return self.name - def _from_expr(self, expr) -> ParameterExpression: + def _from_expr(self, expr: IntoExpression) -> ParameterExpression: return ParameterExpression(expr=expr) def __getattr__(self, field_name: str) -> GetAttrExpression | SelectionExpression: @@ -416,18 +433,18 @@ def __getitem__(self, field_name: str) -> GetItemExpression: # Enables use of ~, &, | with compositions of selection objects. class SelectionPredicateComposition(core.PredicateComposition): - def __invert__(self): + def __invert__(self) -> SelectionPredicateComposition: return SelectionPredicateComposition({"not": self.to_dict()}) - def __and__(self, other): + def __and__(self, other: SchemaBase) -> SelectionPredicateComposition: return SelectionPredicateComposition({"and": [self.to_dict(), other.to_dict()]}) - def __or__(self, other): + def __or__(self, other: SchemaBase) -> SelectionPredicateComposition: return SelectionPredicateComposition({"or": [self.to_dict(), other.to_dict()]}) class ParameterExpression(_expr_core.OperatorMixin): - def __init__(self, expr) -> None: + def __init__(self, expr: IntoExpression) -> None: self.expr = expr def to_dict(self) -> dict[str, str]: @@ -436,12 +453,12 @@ def to_dict(self) -> dict[str, str]: def _to_expr(self) -> str: return repr(self.expr) - def _from_expr(self, expr) -> ParameterExpression: + def _from_expr(self, expr: IntoExpression) -> ParameterExpression: return ParameterExpression(expr=expr) class SelectionExpression(_expr_core.OperatorMixin): - def __init__(self, expr) -> None: + def __init__(self, expr: IntoExpression) -> None: self.expr = expr def to_dict(self) -> dict[str, str]: @@ -450,7 +467,7 @@ def to_dict(self) -> dict[str, str]: def _to_expr(self) -> str: return repr(self.expr) - def _from_expr(self, expr) -> SelectionExpression: + def _from_expr(self, expr: IntoExpression) -> SelectionExpression: return SelectionExpression(expr=expr) @@ -1037,7 +1054,7 @@ def when( ) raise NotImplementedError(msg) - def to_dict(self, *args, **kwds) -> _Conditional[_C]: # type: ignore[override] + def to_dict(self, *args: Any, **kwds: Any) -> _Conditional[_C]: # type: ignore[override] m = super().to_dict(*args, **kwds) return _Conditional(condition=m["condition"]) @@ -1213,7 +1230,7 @@ def when( # Top-Level Functions -def value(value, **kwargs) -> _Value: +def value(value: Any, **kwargs: Any) -> _Value: """Specify a value for use in an encoding.""" return _Value(value=value, **kwargs) # type: ignore[typeddict-item] @@ -1224,7 +1241,7 @@ def param( bind: Optional[Binding] = Undefined, empty: Optional[bool] = Undefined, expr: Optional[str | Expr | Expression] = Undefined, - **kwds, + **kwds: Any, ) -> Parameter: """ Create a named parameter, see https://altair-viz.github.io/user_guide/interactions.html for examples. @@ -1305,7 +1322,7 @@ def param( return parameter -def _selection(type: Optional[SelectionType_T] = Undefined, **kwds) -> Parameter: +def _selection(type: Optional[SelectionType_T] = Undefined, **kwds: Any) -> Parameter: # We separate out the parameter keywords from the selection keywords select_kwds = {"name", "bind", "value", "empty", "init", "views"} @@ -1335,7 +1352,7 @@ def _selection(type: Optional[SelectionType_T] = Undefined, **kwds) -> Parameter alternative="'selection_point()' or 'selection_interval()'", message="These functions also include more helpful docstrings.", ) -def selection(type: Optional[SelectionType_T] = Undefined, **kwds) -> Parameter: +def selection(type: Optional[SelectionType_T] = Undefined, **kwds: Any) -> Parameter: """'selection' is deprecated use 'selection_point' or 'selection_interval' instead, depending on the type of parameter you want to create.""" return _selection(type=type, **kwds) @@ -1353,7 +1370,7 @@ def selection_interval( mark: Optional[Mark] = Undefined, translate: Optional[str | bool] = Undefined, zoom: Optional[str | bool] = Undefined, - **kwds, + **kwds: Any, ) -> Parameter: """ Create an interval selection parameter. Selection parameters define data queries that are driven by direct manipulation from user input (e.g., mouse clicks or drags). Interval selection parameters are used to select a continuous range of data values on drag, whereas point selection parameters (`selection_point`) are used to select multiple discrete data values.). @@ -1466,7 +1483,7 @@ def selection_point( resolve: Optional[SelectionResolution_T] = Undefined, toggle: Optional[str | bool] = Undefined, nearest: Optional[bool] = Undefined, - **kwds, + **kwds: Any, ) -> Parameter: """ Create a point selection parameter. Selection parameters define data queries that are driven by direct manipulation from user input (e.g., mouse clicks or drags). Point selection parameters are used to select multiple discrete data values; the first value is selected on click and additional values toggled on shift-click. To select a continuous range of data values on drag interval selection parameters (`selection_interval`) can be used instead. @@ -1571,43 +1588,43 @@ def selection_point( @utils.deprecated(version="5.0.0", alternative="selection_point") -def selection_multi(**kwargs): +def selection_multi(**kwargs: Any) -> Parameter: """'selection_multi' is deprecated. Use 'selection_point'.""" return _selection(type="point", **kwargs) @utils.deprecated(version="5.0.0", alternative="selection_point") -def selection_single(**kwargs): +def selection_single(**kwargs: Any) -> Parameter: """'selection_single' is deprecated. Use 'selection_point'.""" return _selection(type="point", **kwargs) @utils.use_signature(core.Binding) -def binding(input, **kwargs): +def binding(input: Any, **kwargs: Any) -> Binding: """A generic binding.""" return core.Binding(input=input, **kwargs) @utils.use_signature(core.BindCheckbox) -def binding_checkbox(**kwargs): +def binding_checkbox(**kwargs: Any) -> BindCheckbox: """A checkbox binding.""" return core.BindCheckbox(input="checkbox", **kwargs) @utils.use_signature(core.BindRadioSelect) -def binding_radio(**kwargs): +def binding_radio(**kwargs: Any) -> BindRadioSelect: """A radio button binding.""" return core.BindRadioSelect(input="radio", **kwargs) @utils.use_signature(core.BindRadioSelect) -def binding_select(**kwargs): +def binding_select(**kwargs: Any) -> BindRadioSelect: """A select binding.""" return core.BindRadioSelect(input="select", **kwargs) @utils.use_signature(core.BindRange) -def binding_range(**kwargs): +def binding_range(**kwargs: Any) -> BindRange: """A range binding.""" return core.BindRange(input="range", **kwargs) @@ -1619,7 +1636,7 @@ def condition( if_false: _TSchemaBase, *, empty: Optional[bool] = ..., - **kwargs, + **kwargs: Any, ) -> _TSchemaBase: ... @overload def condition( @@ -1628,7 +1645,7 @@ def condition( if_false: Map | str, *, empty: Optional[bool] = ..., - **kwargs, + **kwargs: Any, ) -> dict[str, _ConditionType | Any]: ... @overload def condition( @@ -1637,11 +1654,11 @@ def condition( if_false: Map, *, empty: Optional[bool] = ..., - **kwargs, + **kwargs: Any, ) -> dict[str, _ConditionType | Any]: ... @overload def condition( - predicate: _PredicateType, if_true: str, if_false: str, **kwargs + predicate: _PredicateType, if_true: str, if_false: str, **kwargs: Any ) -> Never: ... # TODO: update the docstring def condition( @@ -1650,7 +1667,7 @@ def condition( if_false: _StatementType, *, empty: Optional[bool] = Undefined, - **kwargs, + **kwargs: Any, ) -> SchemaBase | dict[str, _ConditionType | Any]: """ A conditional attribute or encoding. @@ -1687,7 +1704,9 @@ def condition( # Top-level objects -def _top_schema_base(obj: Any, /): # -> +def _top_schema_base( # noqa: ANN202 + obj: Any, / +): # -> """ Enforces an intersection type w/ `SchemaBase` & `TopLevelMixin` objects. @@ -1706,14 +1725,14 @@ class TopLevelMixin(mixins.ConfigMethodMixin): _class_is_valid_at_instantiation: bool = False data: Any - def to_dict( + def to_dict( # noqa: C901 self, validate: bool = True, *, format: str = "vega-lite", ignore: list[str] | None = None, context: dict[str, Any] | None = None, - ) -> dict: + ) -> dict[str, Any]: """ Convert the chart to a dictionary suitable for JSON export. @@ -1840,7 +1859,7 @@ def to_json( format: str = "vega-lite", ignore: list[str] | None = None, context: dict[str, Any] | None = None, - **kwargs, + **kwargs: Any, ) -> str: """ Convert a chart to a JSON string. @@ -1885,7 +1904,7 @@ def to_html( fullhtml: bool = True, requirejs: bool = False, inline: bool = False, - **kwargs, + **kwargs: Any, ) -> str: """ Embed a Vega/Vega-Lite spec into an HTML page. @@ -1951,7 +1970,7 @@ def to_url(self, *, fullscreen: bool = False) -> str: fullscreen : bool If True, editor will open chart in fullscreen mode. Default False """ - from ...utils._importers import import_vl_convert + from altair.utils._importers import import_vl_convert vlc = import_vl_convert() if _using_vegafusion(): @@ -1985,8 +2004,8 @@ def save( embed_options: dict | None = None, json_kwds: dict | None = None, engine: str | None = None, - inline=False, - **kwargs, + inline: bool = False, + **kwargs: Any, ) -> None: """ Save a chart to file in a variety of formats. @@ -2041,7 +2060,7 @@ def save( version="5.0.0", ) - from ...utils.save import save + from altair.utils.save import save kwds: dict[str, Any] = dict( chart=self, @@ -2074,27 +2093,27 @@ def __repr__(self) -> str: return f"alt.{self.__class__.__name__}(...)" # Layering and stacking - def __add__(self, other) -> LayerChart: - if not isinstance(other, TopLevelMixin): + def __add__(self, other: ChartType) -> LayerChart: + if not is_chart_type(other): msg = "Only Chart objects can be layered." raise ValueError(msg) - return layer(self, other) + return layer(t.cast("ChartType", self), other) - def __and__(self, other) -> VConcatChart: - if not isinstance(other, TopLevelMixin): + def __and__(self, other: ChartType) -> VConcatChart: + if not is_chart_type(other): msg = "Only Chart objects can be concatenated." raise ValueError(msg) # Too difficult to type check this - return vconcat(self, other) + return vconcat(t.cast("ChartType", self), other) - def __or__(self, other) -> HConcatChart | ConcatChart: - if not isinstance(other, TopLevelMixin): + def __or__(self, other: ChartType) -> HConcatChart | ConcatChart: + if not is_chart_type(other): msg = "Only Chart objects can be concatenated." raise ValueError(msg) elif isinstance(self, ConcatChart): return concat(self, other) else: - return hconcat(self, other) + return hconcat(t.cast("ChartType", self), other) def repeat( self, @@ -2103,7 +2122,7 @@ def repeat( column: Optional[list[str]] = Undefined, layer: Optional[list[str]] = Undefined, columns: Optional[int] = Undefined, - **kwargs, + **kwargs: Any, ) -> RepeatChart: """ Return a RepeatChart built from the chart. @@ -2154,9 +2173,11 @@ def repeat( else: repeat_arg = core.RepeatMapping(row=row, column=column) - return RepeatChart(spec=self, repeat=repeat_arg, columns=columns, **kwargs) + return RepeatChart( + spec=t.cast("ChartType", self), repeat=repeat_arg, columns=columns, **kwargs + ) - def properties(self, **kwargs) -> Self: + def properties(self, **kwargs: Any) -> Self: """ Set top-level properties of the Chart. @@ -2204,7 +2225,7 @@ def project( translate: Optional[ list[float] | Vector2number | ExprRef | Parameter ] = Undefined, - **kwds, + **kwds: Any, ) -> Self: """ Add a geographic projection to the chart. @@ -2419,7 +2440,7 @@ def transform_bin( as_: Optional[str | FieldName | list[str | FieldName]] = Undefined, field: Optional[str | FieldName] = Undefined, bin: Literal[True] | BinParams = True, - **kwargs, + **kwargs: Any, ) -> Self: """ Add a :class:`BinTransform` to the schema. @@ -2629,7 +2650,7 @@ def transform_impute( groupby: Optional[list[str | FieldName]] = Undefined, keyvals: Optional[list[Any] | ImputeSequence] = Undefined, method: Optional[ImputeMethod_T | ImputeMethod] = Undefined, - value=Undefined, + value: Optional[Any] = Undefined, ) -> Self: """ Add an :class:`ImputeTransform` to the schema. @@ -2779,7 +2800,7 @@ def transform_filter( | Parameter | PredicateComposition | dict[str, Predicate | str | list | bool], - **kwargs, + **kwargs: Any, ) -> Self: """ Add a :class:`FilterTransform` to the schema. @@ -2918,7 +2939,7 @@ def transform_lookup( from_: Optional[LookupData | LookupSelection] = Undefined, as_: Optional[str | FieldName | list[str | FieldName]] = Undefined, default: Optional[str] = Undefined, - **kwargs, + **kwargs: Any, ) -> Self: """ Add a :class:`DataLookupTransform` or :class:`SelectionLookupTransform` to the chart. @@ -3376,7 +3397,7 @@ def transform_window( # Display-related methods - def _repr_mimebundle_(self, include=None, exclude=None): + def _repr_mimebundle_(self, *args, **kwds) -> MimeBundleType | None: # type:ignore[return] # noqa: ANN002, ANN003 """Return a MIME bundle for display in Jupyter frontends.""" # Catch errors explicitly to get around issues in Jupyter frontend # see https://github.com/ipython/ipython/issues/11038 @@ -3394,7 +3415,7 @@ def display( renderer: Optional[Literal["canvas", "svg"]] = Undefined, theme: Optional[str] = Undefined, actions: Optional[bool | dict] = Undefined, - **kwargs, + **kwargs: Any, ) -> None: """ Display chart in Jupyter notebook or JupyterLab. @@ -3436,17 +3457,17 @@ def display( @utils.deprecated(version="4.1.0", alternative="show") def serve( self, - ip="127.0.0.1", - port=8888, - n_retries=50, - files=None, - jupyter_warning=True, - open_browser=True, - http_server=None, - **kwargs, - ): + ip="127.0.0.1", # noqa: ANN001 + port=8888, # noqa: ANN001 + n_retries=50, # noqa: ANN001 + files=None, # noqa: ANN001 + jupyter_warning=True, # noqa: ANN001 + open_browser=True, # noqa: ANN001 + http_server=None, # noqa: ANN001 + **kwargs, # noqa: ANN003 + ) -> None: """'serve' is deprecated. Use 'show' instead.""" - from ...utils.server import serve + from altair.utils.server import serve html = io.StringIO() self.save(html, format="html", **kwargs) @@ -3476,7 +3497,7 @@ def show(self) -> None: display(self) @utils.use_signature(core.Resolve) - def _set_resolve(self, **kwargs): + def _set_resolve(self, **kwargs: Any): # noqa: ANN202 """Copy the chart and update the resolve property with kwargs.""" if not hasattr(self, "resolve"): msg = f"{self.__class__} object has no attribute " "'resolve'" @@ -3489,19 +3510,19 @@ def _set_resolve(self, **kwargs): return copy @utils.use_signature(core.AxisResolveMap) - def resolve_axis(self, *args, **kwargs) -> Self: + def resolve_axis(self, *args: Any, **kwargs: Any) -> Self: check = _top_schema_base(self) r = check._set_resolve(axis=core.AxisResolveMap(*args, **kwargs)) return t.cast("Self", r) @utils.use_signature(core.LegendResolveMap) - def resolve_legend(self, *args, **kwargs) -> Self: + def resolve_legend(self, *args: Any, **kwargs: Any) -> Self: check = _top_schema_base(self) r = check._set_resolve(legend=core.LegendResolveMap(*args, **kwargs)) return t.cast("Self", r) @utils.use_signature(core.ScaleResolveMap) - def resolve_scale(self, *args, **kwargs) -> Self: + def resolve_scale(self, *args: Any, **kwargs: Any) -> Self: check = _top_schema_base(self) r = check._set_resolve(scale=core.ScaleResolveMap(*args, **kwargs)) return t.cast("Self", r) @@ -3517,7 +3538,7 @@ def facet( column: Optional[str | FacetFieldDef | Column] = Undefined, data: Optional[ChartDataType] = Undefined, columns: Optional[int] = Undefined, - **kwargs, + **kwargs: Any, ) -> FacetChart: """ Create a facet chart from the current chart. @@ -3644,12 +3665,12 @@ def __init__( mark: Optional[str | AnyMark] = Undefined, width: Optional[int | str | dict | Step] = Undefined, height: Optional[int | str | dict | Step] = Undefined, - **kwargs, + **kwargs: Any, ) -> None: + # Data type hints won't match with what TopLevelUnitSpec expects + # as there is some data processing happening when converting to + # a VL spec super().__init__( - # Data type hints won't match with what TopLevelUnitSpec expects - # as there is some data processing happening when converting to - # a VL spec data=data, # type: ignore[arg-type] encoding=encoding, mark=mark, @@ -3707,7 +3728,7 @@ def to_dict( format: str = "vega-lite", ignore: list[str] | None = None, context: dict[str, Any] | None = None, - ) -> dict: + ) -> dict[str, Any]: """ Convert the chart to a dictionary suitable for JSON export. @@ -3788,7 +3809,7 @@ def add_params(self, *params: Parameter) -> Self: return copy @utils.deprecated(version="5.0.0", alternative="add_params") - def add_selection(self, *params) -> Self: + def add_selection(self, *params) -> Self: # noqa: ANN002 """'add_selection' is deprecated. Use 'add_params' instead.""" return self.add_params(*params) @@ -3823,7 +3844,7 @@ def interactive( def _check_if_valid_subspec( - spec: Optional[SchemaBase | dict], + spec: ConcatType | LayerType, classname: Literal[ "ConcatChart", "FacetChart", @@ -3833,102 +3854,93 @@ def _check_if_valid_subspec( "VConcatChart", ], ) -> None: - """ - Check if the spec is a valid sub-spec. - - If it is not, then raise a ValueError - """ - err = ( - 'Objects with "{0}" attribute cannot be used within {1}. ' - "Consider defining the {0} attribute in the {1} object instead." - ) - - if not isinstance(spec, (core.SchemaBase, dict)): + """Raise a `TypeError` if `spec` is not a valid sub-spec.""" + if not isinstance(spec, core.SchemaBase): msg = f"Only chart objects can be used in {classname}." - raise ValueError(msg) + raise TypeError(msg) for attr in TOPLEVEL_ONLY_KEYS: - if isinstance(spec, core.SchemaBase): - val = getattr(spec, attr, Undefined) - else: - val = spec.get(attr, Undefined) - if val is not Undefined: - raise ValueError(err.format(attr, classname)) - + if spec._get(attr) is not Undefined: + msg = ( + f"Objects with {attr!r} attribute cannot be used within {classname}. " + f"Consider defining the {attr} attribute in the {classname} object instead." + ) + raise TypeError(msg) -def _check_if_can_be_layered(spec: dict | SchemaBase) -> None: - """Check if the spec can be layered.""" - def _get(spec, attr): - if isinstance(spec, core.SchemaBase): - return spec._get(attr) - else: - return spec.get(attr, Undefined) +def _check_if_can_be_layered(spec: LayerType) -> None: + """Raise a `TypeError` if `spec` cannot be layered.""" - def _get_any(spec: dict | SchemaBase, *attrs: str) -> bool: - return any(_get(spec, attr) is not Undefined for attr in attrs) + def _get_any(spec: LayerType, *attrs: str) -> bool: + return any(spec._get(attr) is not Undefined for attr in attrs) base_msg = "charts cannot be layered. Instead, layer the charts before" - encoding = _get(spec, "encoding") - if encoding is not Undefined: + encoding: Any = spec._get("encoding") + if not utils.is_undefined(encoding): for channel in ["row", "column", "facet"]: - if _get(encoding, channel) is not Undefined: + if encoding._get(channel) is not Undefined: msg = f"Faceted {base_msg} faceting." - raise ValueError(msg) + raise TypeError(msg) if isinstance(spec, (Chart, LayerChart)): return - - if not isinstance(spec, (core.SchemaBase, dict)): - msg = "Only chart objects can be layered." - raise ValueError(msg) - if isinstance(spec, FacetChart) or _get(spec, "facet") is not Undefined: - msg = f"Faceted {base_msg} faceting." - raise ValueError(msg) - if isinstance(spec, RepeatChart) or _get(spec, "repeat") is not Undefined: - msg = f"Repeat {base_msg} repeating." - raise ValueError(msg) - _concat = ConcatChart, HConcatChart, VConcatChart - if isinstance(spec, _concat) or _get_any(spec, "concat", "hconcat", "vconcat"): - msg = f"Concatenated {base_msg} concatenating." - raise ValueError(msg) + elif is_chart_type(spec) or _get_any( + spec, "facet", "repeat", "concat", "hconcat", "vconcat" + ): + if isinstance(spec, FacetChart) or spec._get("facet") is not Undefined: + msg = f"Faceted {base_msg} faceting." + elif isinstance(spec, RepeatChart) or spec._get("repeat") is not Undefined: + msg = f"Repeat {base_msg} repeating." + elif isinstance(spec, (ConcatChart, HConcatChart, VConcatChart)) or _get_any( + spec, "concat", "hconcat", "vconcat" + ): + msg = f"Concatenated {base_msg} concatenating." + else: + msg = "Should be unreachable" + raise NotImplementedError(msg) + raise TypeError(msg) class RepeatChart(TopLevelMixin, core.TopLevelRepeatSpec): """A chart repeated across rows and columns with small changes.""" - # Because TopLevelRepeatSpec is defined as a union as of Vega-Lite schema 4.9, - # we set the arguments explicitly here. - # TODO: Should we instead use tools/schemapi/codegen.get_args? - @utils.use_signature(core.TopLevelRepeatSpec) def __init__( self, - repeat=Undefined, - spec=Undefined, - align=Undefined, - autosize=Undefined, - background=Undefined, - bounds=Undefined, - center=Undefined, - columns=Undefined, - config=Undefined, - data=Undefined, - datasets=Undefined, - description=Undefined, - name=Undefined, - padding=Undefined, - params=Undefined, - resolve=Undefined, - spacing=Undefined, - title=Undefined, - transform=Undefined, - usermeta=Undefined, - **kwds, - ): + repeat: Optional[list[str] | LayerRepeatMapping | RepeatMapping] = Undefined, + spec: Optional[ChartType] = Undefined, + align: Optional[dict | SchemaBase | LayoutAlign_T] = Undefined, + autosize: Optional[dict | SchemaBase | AutosizeType_T] = Undefined, + background: Optional[ + str | dict | Parameter | SchemaBase | ColorName_T + ] = Undefined, + bounds: Optional[Literal["full", "flush"]] = Undefined, + center: Optional[bool | dict | SchemaBase] = Undefined, + columns: Optional[int] = Undefined, + config: Optional[dict | SchemaBase] = Undefined, + data: Optional[ChartDataType] = Undefined, + datasets: Optional[dict | SchemaBase] = Undefined, + description: Optional[str] = Undefined, + name: Optional[str] = Undefined, + padding: Optional[dict | float | Parameter | SchemaBase] = Undefined, + params: Optional[Sequence[_Parameter]] = Undefined, + resolve: Optional[dict | SchemaBase] = Undefined, + spacing: Optional[dict | float | SchemaBase] = Undefined, + title: Optional[str | dict | SchemaBase | Sequence[str]] = Undefined, + transform: Optional[Sequence[dict | SchemaBase]] = Undefined, + usermeta: Optional[dict | SchemaBase] = Undefined, + **kwds: Any, + ) -> None: + tp_name = type(self).__name__ + if utils.is_undefined(spec): + msg = f"{tp_name!r} requires a `spec`, but got: {spec!r}" + raise TypeError(msg) _check_if_valid_subspec(spec, "RepeatChart") _spec_as_list = [spec] params, _spec_as_list = _combine_subchart_params(params, _spec_as_list) spec = _spec_as_list[0] if isinstance(spec, (Chart, LayerChart)): + if utils.is_undefined(repeat): + msg = f"{tp_name!r} requires a `repeat`, but got: {repeat!r}" + raise TypeError(msg) params = _repeat_names(params, repeat, spec) super().__init__( repeat=repeat, @@ -4013,7 +4025,7 @@ def add_params(self, *params: Parameter) -> Self: return copy.copy() @utils.deprecated(version="5.0.0", alternative="add_params") - def add_selection(self, *selections) -> Self: + def add_selection(self, *selections) -> Self: # noqa: ANN002 """'add_selection' is deprecated. Use 'add_params' instead.""" return self.add_params(*selections) @@ -4046,22 +4058,30 @@ class ConcatChart(TopLevelMixin, core.TopLevelConcatSpec): """A chart with horizontally-concatenated facets.""" @utils.use_signature(core.TopLevelConcatSpec) - def __init__(self, data=Undefined, concat=(), columns=Undefined, **kwargs): - # TODO: move common data to top level? + def __init__( + self, + data: Optional[ChartDataType] = Undefined, + concat: Sequence[ConcatType] = (), + columns: Optional[float] = Undefined, + **kwargs: Any, + ) -> None: for spec in concat: _check_if_valid_subspec(spec, "ConcatChart") - super().__init__(data=data, concat=list(concat), columns=columns, **kwargs) + super().__init__(data=data, concat=list(concat), columns=columns, **kwargs) # type: ignore[arg-type] + self.concat: list[ChartType] + self.params: Optional[Sequence[_Parameter]] + self.data: Optional[ChartDataType] self.data, self.concat = _combine_subchart_data(self.data, self.concat) self.params, self.concat = _combine_subchart_params(self.params, self.concat) - def __ior__(self, other) -> Self: + def __ior__(self, other: ChartType) -> Self: _check_if_valid_subspec(other, "ConcatChart") self.concat.append(other) self.data, self.concat = _combine_subchart_data(self.data, self.concat) self.params, self.concat = _combine_subchart_params(self.params, self.concat) return self - def __or__(self, other) -> Self: + def __or__(self, other: ChartType) -> Self: copy = self.copy(deep=["concat"]) copy |= other return copy @@ -4129,12 +4149,12 @@ def add_params(self, *params: Parameter) -> Self: return copy @utils.deprecated(version="5.0.0", alternative="add_params") - def add_selection(self, *selections) -> Self: + def add_selection(self, *selections) -> Self: # noqa: ANN002 """'add_selection' is deprecated. Use 'add_params' instead.""" return self.add_params(*selections) -def concat(*charts, **kwargs) -> ConcatChart: +def concat(*charts: ConcatType, **kwargs: Any) -> ConcatChart: """Concatenate charts horizontally.""" return ConcatChart(concat=charts, **kwargs) # pyright: ignore @@ -4143,22 +4163,29 @@ class HConcatChart(TopLevelMixin, core.TopLevelHConcatSpec): """A chart with horizontally-concatenated facets.""" @utils.use_signature(core.TopLevelHConcatSpec) - def __init__(self, data=Undefined, hconcat=(), **kwargs): - # TODO: move common data to top level? + def __init__( + self, + data: Optional[ChartDataType] = Undefined, + hconcat: Sequence[ConcatType] = (), + **kwargs: Any, + ) -> None: for spec in hconcat: _check_if_valid_subspec(spec, "HConcatChart") - super().__init__(data=data, hconcat=list(hconcat), **kwargs) + super().__init__(data=data, hconcat=list(hconcat), **kwargs) # type: ignore[arg-type] + self.hconcat: list[ChartType] + self.params: Optional[Sequence[_Parameter]] + self.data: Optional[ChartDataType] self.data, self.hconcat = _combine_subchart_data(self.data, self.hconcat) self.params, self.hconcat = _combine_subchart_params(self.params, self.hconcat) - def __ior__(self, other) -> Self: + def __ior__(self, other: ChartType) -> Self: _check_if_valid_subspec(other, "HConcatChart") self.hconcat.append(other) self.data, self.hconcat = _combine_subchart_data(self.data, self.hconcat) self.params, self.hconcat = _combine_subchart_params(self.params, self.hconcat) return self - def __or__(self, other) -> Self: + def __or__(self, other: ChartType) -> Self: copy = self.copy(deep=["hconcat"]) copy |= other return copy @@ -4226,12 +4253,12 @@ def add_params(self, *params: Parameter) -> Self: return copy @utils.deprecated(version="5.0.0", alternative="add_params") - def add_selection(self, *selections) -> Self: + def add_selection(self, *selections) -> Self: # noqa: ANN002 """'add_selection' is deprecated. Use 'add_params' instead.""" return self.add_params(*selections) -def hconcat(*charts, **kwargs) -> HConcatChart: +def hconcat(*charts: ConcatType, **kwargs: Any) -> HConcatChart: """Concatenate charts horizontally.""" return HConcatChart(hconcat=charts, **kwargs) # pyright: ignore @@ -4240,22 +4267,29 @@ class VConcatChart(TopLevelMixin, core.TopLevelVConcatSpec): """A chart with vertically-concatenated facets.""" @utils.use_signature(core.TopLevelVConcatSpec) - def __init__(self, data=Undefined, vconcat=(), **kwargs): - # TODO: move common data to top level? + def __init__( + self, + data: Optional[ChartDataType] = Undefined, + vconcat: Sequence[ConcatType] = (), + **kwargs: Any, + ) -> None: for spec in vconcat: _check_if_valid_subspec(spec, "VConcatChart") - super().__init__(data=data, vconcat=list(vconcat), **kwargs) + super().__init__(data=data, vconcat=list(vconcat), **kwargs) # type: ignore[arg-type] + self.vconcat: list[ChartType] + self.params: Optional[Sequence[_Parameter]] + self.data: Optional[ChartDataType] self.data, self.vconcat = _combine_subchart_data(self.data, self.vconcat) self.params, self.vconcat = _combine_subchart_params(self.params, self.vconcat) - def __iand__(self, other) -> Self: + def __iand__(self, other: ChartType) -> Self: _check_if_valid_subspec(other, "VConcatChart") self.vconcat.append(other) self.data, self.vconcat = _combine_subchart_data(self.data, self.vconcat) self.params, self.vconcat = _combine_subchart_params(self.params, self.vconcat) return self - def __and__(self, other) -> Self: + def __and__(self, other: ChartType) -> Self: copy = self.copy(deep=["vconcat"]) copy &= other return copy @@ -4325,12 +4359,12 @@ def add_params(self, *params: Parameter) -> Self: return copy @utils.deprecated(version="5.0.0", alternative="add_params") - def add_selection(self, *selections) -> Self: + def add_selection(self, *selections) -> Self: # noqa: ANN002 """'add_selection' is deprecated. Use 'add_params' instead.""" return self.add_params(*selections) -def vconcat(*charts, **kwargs) -> VConcatChart: +def vconcat(*charts: ConcatType, **kwargs: Any) -> VConcatChart: """Concatenate charts vertically.""" return VConcatChart(vconcat=charts, **kwargs) # pyright: ignore @@ -4339,13 +4373,20 @@ class LayerChart(TopLevelMixin, _EncodingMixin, core.TopLevelLayerSpec): """A Chart with layers within a single panel.""" @utils.use_signature(core.TopLevelLayerSpec) - def __init__(self, data=Undefined, layer=(), **kwargs): - # TODO: move common data to top level? + def __init__( + self, + data: Optional[ChartDataType] = Undefined, + layer: Sequence[LayerType] = (), + **kwargs: Any, + ) -> None: # TODO: check for conflicting interaction for spec in layer: _check_if_valid_subspec(spec, "LayerChart") _check_if_can_be_layered(spec) - super().__init__(data=data, layer=list(layer), **kwargs) + super().__init__(data=data, layer=list(layer), **kwargs) # type: ignore[arg-type] + self.layer: list[ChartType] + self.params: Optional[Sequence[_Parameter]] + self.data: Optional[ChartDataType] self.data, self.layer = _combine_subchart_data(self.data, self.layer) # Currently (Vega-Lite 5.5) the same param can't occur on two layers self.layer = _remove_duplicate_params(self.layer) @@ -4385,7 +4426,7 @@ def transformed_data( return transformed_data(self, row_limit=row_limit, exclude=exclude) - def __iadd__(self, other: LayerChart | Chart) -> Self: + def __iadd__(self, other: ChartType) -> Self: _check_if_valid_subspec(other, "LayerChart") _check_if_can_be_layered(other) self.layer.append(other) @@ -4393,7 +4434,7 @@ def __iadd__(self, other: LayerChart | Chart) -> Self: self.params, self.layer = _combine_subchart_params(self.params, self.layer) return self - def __add__(self, other: LayerChart | Chart) -> Self: + def __add__(self, other: ChartType) -> Self: copy = self.copy(deep=["layer"]) copy += other return copy @@ -4444,12 +4485,12 @@ def add_params(self, *params: Parameter) -> Self: return copy.copy() @utils.deprecated(version="5.0.0", alternative="add_params") - def add_selection(self, *selections) -> Self: + def add_selection(self, *selections) -> Self: # noqa: ANN002 """'add_selection' is deprecated. Use 'add_params' instead.""" return self.add_params(*selections) -def layer(*charts, **kwargs) -> LayerChart: +def layer(*charts: LayerType, **kwargs: Any) -> LayerChart: """Layer multiple charts.""" return LayerChart(layer=charts, **kwargs) # pyright: ignore @@ -4460,17 +4501,23 @@ class FacetChart(TopLevelMixin, core.TopLevelFacetSpec): @utils.use_signature(core.TopLevelFacetSpec) def __init__( self, - data=Undefined, - spec=Undefined, - facet=Undefined, - params=Undefined, - **kwargs, - ): + data: Optional[ChartDataType] = Undefined, + spec: Optional[ChartType] = Undefined, + facet: Optional[dict | SchemaBase] = Undefined, + params: Optional[Sequence[_Parameter]] = Undefined, + **kwargs: Any, + ) -> None: + if utils.is_undefined(spec): + msg = f"{type(self).__name__!r} requires a `spec`, but got: {spec!r}" + raise TypeError(msg) _check_if_valid_subspec(spec, "FacetChart") _spec_as_list = [spec] params, _spec_as_list = _combine_subchart_params(params, _spec_as_list) spec = _spec_as_list[0] - super().__init__(data=data, spec=spec, facet=facet, params=params, **kwargs) + super().__init__(data=data, spec=spec, facet=facet, params=params, **kwargs) # type: ignore[arg-type] + self.data: Optional[ChartDataType] + self.spec: ChartType + self.params: Optional[Sequence[_Parameter]] def transformed_data( self, row_limit: int | None = None, exclude: Iterable[str] | None = None @@ -4532,12 +4579,12 @@ def add_params(self, *params: Parameter) -> Self: return copy.copy() @utils.deprecated(version="5.0.0", alternative="add_params") - def add_selection(self, *selections) -> Self: + def add_selection(self, *selections) -> Self: # noqa: ANN002 """'add_selection' is deprecated. Use 'add_params' instead.""" return self.add_params(*selections) -def topo_feature(url: str, feature: str, **kwargs) -> UrlData: +def topo_feature(url: str, feature: str, **kwargs: Any) -> UrlData: """ A convenience function for extracting features from a topojson url. @@ -4560,8 +4607,10 @@ def topo_feature(url: str, feature: str, **kwargs) -> UrlData: ) -def _combine_subchart_data(data, subcharts): - def remove_data(subchart): +def _combine_subchart_data( + data: Optional[ChartDataType], subcharts: list[ChartType] +) -> tuple[Optional[ChartDataType], list[ChartType]]: + def remove_data(subchart: _TSchemaBase) -> _TSchemaBase: if subchart.data is not Undefined: subchart = subchart.copy() subchart.data = Undefined @@ -4585,13 +4634,18 @@ def remove_data(subchart): return data, subcharts -def _viewless_dict(param: Parameter) -> dict: +_Parameter: TypeAlias = Union[ + core.VariableParameter, core.TopLevelSelectionParameter, core.SelectionParameter +] + + +def _viewless_dict(param: _Parameter) -> dict[str, Any]: d = param.to_dict() d.pop("views", None) return d -def _needs_name(subchart): +def _needs_name(subchart: ChartType) -> bool: # Only `Chart` objects need a name if (subchart.name is not Undefined) or (not isinstance(subchart, Chart)): return False @@ -4601,7 +4655,7 @@ def _needs_name(subchart): # Convert SelectionParameters to TopLevelSelectionParameters with a views property. -def _prepare_to_lift(param: Any) -> Any: +def _prepare_to_lift(param: _Parameter) -> _Parameter: param = param.copy() if isinstance(param, core.VariableParameter): @@ -4616,15 +4670,15 @@ def _prepare_to_lift(param: Any) -> Any: return param -def _remove_duplicate_params(layer): +def _remove_duplicate_params(layer: list[ChartType]) -> list[ChartType]: subcharts = [subchart.copy() for subchart in layer] found_params = [] for subchart in subcharts: - if (not hasattr(subchart, "params")) or (subchart.params is Undefined): + if (not hasattr(subchart, "params")) or (utils.is_undefined(subchart.params)): continue - params = [] + params: list[_Parameter] = [] # Ensure the same selection parameter doesn't appear twice for param in subchart.params: @@ -4647,12 +4701,14 @@ def _remove_duplicate_params(layer): return subcharts -def _combine_subchart_params(params, subcharts): - if params is Undefined: +def _combine_subchart_params( # noqa: C901 + params: Optional[Sequence[_Parameter]], subcharts: list[ChartType] +) -> tuple[Optional[Sequence[_Parameter]], list[ChartType]]: + if utils.is_undefined(params): params = [] # List of triples related to params, (param, dictionary minus views, views) - param_info = [] + param_info: list[tuple[_Parameter, dict[str, Any], list[str]]] = [] # Put parameters already found into `param_info` list. for param in params: @@ -4668,7 +4724,7 @@ def _combine_subchart_params(params, subcharts): subcharts = [subchart.copy() for subchart in subcharts] for subchart in subcharts: - if (not hasattr(subchart, "params")) or (subchart.params is Undefined): + if (not hasattr(subchart, "params")) or (utils.is_undefined(subchart.params)): continue if _needs_name(subchart): @@ -4707,7 +4763,7 @@ def _combine_subchart_params(params, subcharts): if len(v) > 0: p.views = v - subparams = [p for p, _, _ in param_info] + subparams: Any = [p for p, _, _ in param_info] if len(subparams) == 0: subparams = Undefined @@ -4715,7 +4771,9 @@ def _combine_subchart_params(params, subcharts): return subparams, subcharts -def _get_repeat_strings(repeat): +def _get_repeat_strings( + repeat: list[str] | LayerRepeatMapping | RepeatMapping, +) -> list[str]: if isinstance(repeat, list): return repeat elif isinstance(repeat, core.LayerRepeatMapping): @@ -4727,7 +4785,7 @@ def _get_repeat_strings(repeat): return ["".join(s) for s in itertools.product(*rcstrings)] -def _extend_view_name(v, r, spec): +def _extend_view_name(v: str, r: str, spec: Chart | LayerChart) -> str: # prevent the same extension from happening more than once if isinstance(spec, Chart): if v.endswith("child__" + r): @@ -4739,14 +4797,21 @@ def _extend_view_name(v, r, spec): return v else: return f"child__{r}_{v}" + else: + msg = f"Expected 'Chart | LayerChart', but got: {type(spec).__name__!r}" + raise TypeError(msg) -def _repeat_names(params, repeat, spec): - if params is Undefined: +def _repeat_names( + params: Optional[Sequence[_Parameter]], + repeat: list[str] | LayerRepeatMapping | RepeatMapping, + spec: Chart | LayerChart, +) -> Optional[Sequence[_Parameter]]: + if utils.is_undefined(params): return params repeat = _get_repeat_strings(repeat) - params_named = [] + params_named: list[_Parameter] = [] for param in params: if not isinstance(param, core.TopLevelSelectionParameter): @@ -4773,8 +4838,10 @@ def _repeat_names(params, repeat, spec): return params_named -def _remove_layer_props(chart, subcharts, layer_props): - def remove_prop(subchart, prop): +def _remove_layer_props( # noqa: C901 + chart: LayerChart, subcharts: list[ChartType], layer_props: Iterable[str] +) -> tuple[dict[str, Any], list[ChartType]]: + def remove_prop(subchart: ChartType, prop: str) -> ChartType: # If subchart is a UnitSpec, then subchart["height"] raises a KeyError try: if subchart[prop] is not Undefined: @@ -4784,7 +4851,7 @@ def remove_prop(subchart, prop): pass return subchart - output_dict = {} + output_dict: dict[str, Any] = {} if not subcharts: # No subcharts = nothing to do. @@ -4827,7 +4894,11 @@ def remove_prop(subchart, prop): @utils.use_signature(core.SequenceParams) def sequence( - start, stop=None, step=Undefined, as_=Undefined, **kwds + start: Optional[float], + stop: Optional[float | None] = None, + step: Optional[float] = Undefined, + as_: Optional[str] = Undefined, + **kwds: Any, ) -> SequenceGenerator: """Sequence generator.""" if stop is None: @@ -4837,7 +4908,7 @@ def sequence( @utils.use_signature(core.GraticuleParams) -def graticule(**kwds) -> GraticuleGenerator: +def graticule(**kwds: Any) -> GraticuleGenerator: """Graticule generator.""" # graticule: True indicates default parameters graticule: Any = core.GraticuleParams(**kwds) if kwds else True @@ -4849,9 +4920,24 @@ def sphere() -> SphereGenerator: return core.SphereGenerator(sphere=True) -ChartType = Union[ +ChartType: TypeAlias = Union[ Chart, RepeatChart, ConcatChart, HConcatChart, VConcatChart, FacetChart, LayerChart ] +ConcatType: TypeAlias = Union[ + ChartType, + core.FacetSpec, + core.LayerSpec, + core.RepeatSpec, + core.FacetedUnitSpec, + core.LayerRepeatSpec, + core.NonNormalizedSpec, + core.NonLayerRepeatSpec, + core.ConcatSpecGenericSpec, + core.ConcatSpecGenericSpec, + core.HConcatSpecGenericSpec, + core.VConcatSpecGenericSpec, +] +LayerType: TypeAlias = Union[ChartType, core.UnitSpec, core.LayerSpec] def is_chart_type(obj: Any) -> TypeIs[ChartType]: diff --git a/altair/vegalite/v5/compiler.py b/altair/vegalite/v5/compiler.py index b01ee4187..70ccbd3b7 100644 --- a/altair/vegalite/v5/compiler.py +++ b/altair/vegalite/v5/compiler.py @@ -1,8 +1,7 @@ -from ...utils._importers import import_vl_convert -from ...utils.compiler import VegaLiteCompilerRegistry - from typing import Final +from altair.utils._importers import import_vl_convert +from altair.utils.compiler import VegaLiteCompilerRegistry ENTRY_POINT_GROUP: Final = "altair.vegalite.v5.vegalite_compiler" vegalite_compilers = VegaLiteCompilerRegistry(entry_point_group=ENTRY_POINT_GROUP) diff --git a/altair/vegalite/v5/data.py b/altair/vegalite/v5/data.py index 3b69a92c5..6740c9d51 100644 --- a/altair/vegalite/v5/data.py +++ b/altair/vegalite/v5/data.py @@ -1,4 +1,8 @@ -from ..data import ( +from typing import Final + +from altair.utils._vegafusion_data import vegafusion_data_transformer +from altair.vegalite.data import ( + DataTransformerRegistry, MaxRowsError, default_data_transformer, limit_rows, @@ -6,14 +10,8 @@ to_csv, to_json, to_values, - DataTransformerRegistry, ) -from ...utils._vegafusion_data import vegafusion_data_transformer - -from typing import Final - - # ============================================================================== # VegaLite 5 data transformers # ============================================================================== diff --git a/altair/vegalite/v5/display.py b/altair/vegalite/v5/display.py index f6a85835c..aead52a4d 100644 --- a/altair/vegalite/v5/display.py +++ b/altair/vegalite/v5/display.py @@ -1,20 +1,21 @@ from __future__ import annotations + from pathlib import Path -from typing import Final, TYPE_CHECKING +from typing import TYPE_CHECKING, Final -from ...utils.mimebundle import spec_to_mimebundle -from ..display import ( +from altair.utils.mimebundle import spec_to_mimebundle +from altair.vegalite.display import ( Displayable, + HTMLRenderer, + RendererRegistry, default_renderer_base, json_renderer_base, - RendererRegistry, - HTMLRenderer, ) from .schema import SCHEMA_VERSION if TYPE_CHECKING: - from ..display import DefaultRendererReturnType + from altair.vegalite.display import DefaultRendererReturnType VEGALITE_VERSION: Final = SCHEMA_VERSION.lstrip("v") diff --git a/altair/vegalite/v5/schema/__init__.py b/altair/vegalite/v5/schema/__init__.py index 29c19160d..1f099eaca 100644 --- a/altair/vegalite/v5/schema/__init__.py +++ b/altair/vegalite/v5/schema/__init__.py @@ -3,6 +3,6 @@ from .core import * from .channels import * -SCHEMA_VERSION = "v5.19.0" +SCHEMA_VERSION = "v5.20.1" -SCHEMA_URL = "https://vega.github.io/schema/vega-lite/v5.19.0.json" +SCHEMA_URL = "https://vega.github.io/schema/vega-lite/v5.20.1.json" diff --git a/altair/vegalite/v5/schema/_typing.py b/altair/vegalite/v5/schema/_typing.py index 80467c45b..bcf6d5090 100644 --- a/altair/vegalite/v5/schema/_typing.py +++ b/altair/vegalite/v5/schema/_typing.py @@ -5,7 +5,6 @@ from __future__ import annotations from typing import Any, Literal, Mapping, Sequence, TypeVar, Union - from typing_extensions import TypeAlias, TypeAliasType __all__ = [ @@ -57,6 +56,7 @@ "TitleOrient_T", "TypeForShape_T", "Type_T", + "VegaThemes", "WindowOnlyOp_T", ] @@ -77,6 +77,22 @@ def func( ): ... """ +VegaThemes: TypeAlias = Literal[ + "carbong10", + "carbong100", + "carbong90", + "carbonwhite", + "dark", + "excel", + "fivethirtyeight", + "ggplot2", + "googlecharts", + "latimes", + "powerbi", + "quartz", + "urbaninstitute", + "vox", +] Map: TypeAlias = Mapping[str, Any] AggregateOp_T: TypeAlias = Literal[ "argmax", diff --git a/altair/vegalite/v5/schema/channels.py b/altair/vegalite/v5/schema/channels.py index dace3ef9f..2ff4190b3 100644 --- a/altair/vegalite/v5/schema/channels.py +++ b/altair/vegalite/v5/schema/channels.py @@ -11,24 +11,24 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Literal, Sequence, overload +from typing import TYPE_CHECKING, Any, Literal, Sequence, TypedDict, Union, overload +from typing_extensions import TypeAlias -from narwhals.dependencies import is_pandas_dataframe as _is_pandas_dataframe +import narwhals.stable.v1 as nw from altair.utils import infer_encoding_types as _infer_encoding_types from altair.utils import parse_shorthand from altair.utils.schemapi import Undefined, with_property_setters from . import core +from ._typing import * # noqa: F403 # ruff: noqa: F405 if TYPE_CHECKING: from typing_extensions import Self from altair import Parameter, SchemaBase - from altair.utils.schemapi import Optional - - from ._typing import * # noqa: F403 + from altair.typing import Optional __all__ = [ @@ -170,7 +170,8 @@ def to_dict( if shorthand is Undefined: parsed = {} elif isinstance(shorthand, (str, dict)): - parsed = parse_shorthand(shorthand, data=context.get("data", None)) + data: nw.DataFrame | Any = context.get("data", None) + parsed = parse_shorthand(shorthand, data=data) type_required = "type" in self._kwds # type: ignore[attr-defined] type_in_shorthand = "type" in parsed type_defined_explicitly = self._get("type") is not Undefined # type: ignore[attr-defined] @@ -179,7 +180,7 @@ def to_dict( # We still parse it out of the shorthand, but drop it here. parsed.pop("type", None) elif not (type_in_shorthand or type_defined_explicitly): - if _is_pandas_dataframe(context.get("data", None)): + if isinstance(data, nw.DataFrame): msg = ( f'Unable to determine data type for the field "{shorthand}";' " verify that the field name is not misspelled." @@ -30897,6 +30898,58 @@ def __init__(self, value, **kwds): super().__init__(value=value, **kwds) +ChannelAngle: TypeAlias = Union[str, Angle, Map, AngleDatum, AngleValue] +ChannelColor: TypeAlias = Union[str, Color, Map, ColorDatum, ColorValue] +ChannelColumn: TypeAlias = Union[str, Column, Map] +ChannelDescription: TypeAlias = Union[str, Description, Map, DescriptionValue] +ChannelDetail: TypeAlias = OneOrSeq[Union[str, Detail, Map]] +ChannelFacet: TypeAlias = Union[str, Facet, Map] +ChannelFill: TypeAlias = Union[str, Fill, Map, FillDatum, FillValue] +ChannelFillOpacity: TypeAlias = Union[ + str, FillOpacity, Map, FillOpacityDatum, FillOpacityValue +] +ChannelHref: TypeAlias = Union[str, Href, Map, HrefValue] +ChannelKey: TypeAlias = Union[str, Key, Map] +ChannelLatitude: TypeAlias = Union[str, Latitude, Map, LatitudeDatum] +ChannelLatitude2: TypeAlias = Union[str, Latitude2, Map, Latitude2Datum, Latitude2Value] +ChannelLongitude: TypeAlias = Union[str, Longitude, Map, LongitudeDatum] +ChannelLongitude2: TypeAlias = Union[ + str, Longitude2, Map, Longitude2Datum, Longitude2Value +] +ChannelOpacity: TypeAlias = Union[str, Opacity, Map, OpacityDatum, OpacityValue] +ChannelOrder: TypeAlias = OneOrSeq[Union[str, Order, Map, OrderValue]] +ChannelRadius: TypeAlias = Union[str, Radius, Map, RadiusDatum, RadiusValue] +ChannelRadius2: TypeAlias = Union[str, Radius2, Map, Radius2Datum, Radius2Value] +ChannelRow: TypeAlias = Union[str, Row, Map] +ChannelShape: TypeAlias = Union[str, Shape, Map, ShapeDatum, ShapeValue] +ChannelSize: TypeAlias = Union[str, Size, Map, SizeDatum, SizeValue] +ChannelStroke: TypeAlias = Union[str, Stroke, Map, StrokeDatum, StrokeValue] +ChannelStrokeDash: TypeAlias = Union[ + str, StrokeDash, Map, StrokeDashDatum, StrokeDashValue +] +ChannelStrokeOpacity: TypeAlias = Union[ + str, StrokeOpacity, Map, StrokeOpacityDatum, StrokeOpacityValue +] +ChannelStrokeWidth: TypeAlias = Union[ + str, StrokeWidth, Map, StrokeWidthDatum, StrokeWidthValue +] +ChannelText: TypeAlias = Union[str, Text, Map, TextDatum, TextValue] +ChannelTheta: TypeAlias = Union[str, Theta, Map, ThetaDatum, ThetaValue] +ChannelTheta2: TypeAlias = Union[str, Theta2, Map, Theta2Datum, Theta2Value] +ChannelTooltip: TypeAlias = OneOrSeq[Union[str, Tooltip, Map, TooltipValue]] +ChannelUrl: TypeAlias = Union[str, Url, Map, UrlValue] +ChannelX: TypeAlias = Union[str, X, Map, XDatum, XValue] +ChannelX2: TypeAlias = Union[str, X2, Map, X2Datum, X2Value] +ChannelXError: TypeAlias = Union[str, XError, Map, XErrorValue] +ChannelXError2: TypeAlias = Union[str, XError2, Map, XError2Value] +ChannelXOffset: TypeAlias = Union[str, XOffset, Map, XOffsetDatum, XOffsetValue] +ChannelY: TypeAlias = Union[str, Y, Map, YDatum, YValue] +ChannelY2: TypeAlias = Union[str, Y2, Map, Y2Datum, Y2Value] +ChannelYError: TypeAlias = Union[str, YError, Map, YErrorValue] +ChannelYError2: TypeAlias = Union[str, YError2, Map, YError2Value] +ChannelYOffset: TypeAlias = Union[str, YOffset, Map, YOffsetDatum, YOffsetValue] + + class _EncodingMixin: def encode( self, @@ -30905,7 +30958,7 @@ def encode( color: Optional[str | Color | Map | ColorDatum | ColorValue] = Undefined, column: Optional[str | Column | Map] = Undefined, description: Optional[str | Description | Map | DescriptionValue] = Undefined, - detail: Optional[str | Detail | Map | list] = Undefined, + detail: Optional[OneOrSeq[str | Detail | Map]] = Undefined, facet: Optional[str | Facet | Map] = Undefined, fill: Optional[str | Fill | Map | FillDatum | FillValue] = Undefined, fillOpacity: Optional[ @@ -30924,7 +30977,7 @@ def encode( opacity: Optional[ str | Opacity | Map | OpacityDatum | OpacityValue ] = Undefined, - order: Optional[str | Order | Map | list | OrderValue] = Undefined, + order: Optional[OneOrSeq[str | Order | Map | OrderValue]] = Undefined, radius: Optional[str | Radius | Map | RadiusDatum | RadiusValue] = Undefined, radius2: Optional[ str | Radius2 | Map | Radius2Datum | Radius2Value @@ -30945,7 +30998,7 @@ def encode( text: Optional[str | Text | Map | TextDatum | TextValue] = Undefined, theta: Optional[str | Theta | Map | ThetaDatum | ThetaValue] = Undefined, theta2: Optional[str | Theta2 | Map | Theta2Datum | Theta2Value] = Undefined, - tooltip: Optional[str | Tooltip | Map | list | TooltipValue] = Undefined, + tooltip: Optional[OneOrSeq[str | Tooltip | Map | TooltipValue]] = Undefined, url: Optional[str | Url | Map | UrlValue] = Undefined, x: Optional[str | X | Map | XDatum | XValue] = Undefined, x2: Optional[str | X2 | Map | X2Datum | X2Value] = Undefined, @@ -31189,3 +31242,245 @@ def encode( encoding.update(kwargs) copy.encoding = core.FacetedEncoding(**encoding) return copy + + +class EncodeKwds(TypedDict, total=False): + """ + Encoding channels map properties of the data to visual properties of the chart. + + Parameters + ---------- + angle + Rotation angle of point and text marks. + color + Color of the marks - either fill or stroke color based on the ``filled`` property + of mark definition. By default, ``color`` represents fill color for ``"area"``, + ``"bar"``, ``"tick"``, ``"text"``, ``"trail"``, ``"circle"``, and ``"square"`` / + stroke color for ``"line"`` and ``"point"``. + + **Default value:** If undefined, the default color depends on `mark config + `__'s ``color`` + property. + + *Note:* 1) For fine-grained control over both fill and stroke colors of the marks, + please use the ``fill`` and ``stroke`` channels. The ``fill`` or ``stroke`` + encodings have higher precedence than ``color``, thus may override the ``color`` + encoding if conflicting encodings are specified. 2) See the scale documentation for + more information about customizing `color scheme + `__. + column + A field definition for the horizontal facet of trellis plots. + description + A text description of this mark for ARIA accessibility (SVG output only). For SVG + output the ``"aria-label"`` attribute will be set to this description. + detail + Additional levels of detail for grouping data in aggregate views and in line, trail, + and area marks without mapping data to a specific visual channel. + facet + A field definition for the (flexible) facet of trellis plots. + + If either ``row`` or ``column`` is specified, this channel will be ignored. + fill + Fill color of the marks. **Default value:** If undefined, the default color depends + on `mark config `__'s + ``color`` property. + + *Note:* The ``fill`` encoding has higher precedence than ``color``, thus may + override the ``color`` encoding if conflicting encodings are specified. + fillOpacity + Fill opacity of the marks. + + **Default value:** If undefined, the default opacity depends on `mark config + `__'s ``fillOpacity`` + property. + href + A URL to load upon mouse click. + key + A data field to use as a unique key for data binding. When a visualization's data is + updated, the key value will be used to match data elements to existing mark + instances. Use a key channel to enable object constancy for transitions over dynamic + data. + latitude + Latitude position of geographically projected marks. + latitude2 + Latitude-2 position for geographically projected ranged ``"area"``, ``"bar"``, + ``"rect"``, and ``"rule"``. + longitude + Longitude position of geographically projected marks. + longitude2 + Longitude-2 position for geographically projected ranged ``"area"``, ``"bar"``, + ``"rect"``, and ``"rule"``. + opacity + Opacity of the marks. + + **Default value:** If undefined, the default opacity depends on `mark config + `__'s ``opacity`` + property. + order + Order of the marks. + + * For stacked marks, this ``order`` channel encodes `stack order + `__. + * For line and trail marks, this ``order`` channel encodes order of data points in + the lines. This can be useful for creating `a connected scatterplot + `__. Setting + ``order`` to ``{"value": null}`` makes the line marks use the original order in + the data sources. + * Otherwise, this ``order`` channel encodes layer order of the marks. + + **Note**: In aggregate plots, ``order`` field should be ``aggregate``d to avoid + creating additional aggregation grouping. + radius + The outer radius in pixels of arc marks. + radius2 + The inner radius in pixels of arc marks. + row + A field definition for the vertical facet of trellis plots. + shape + Shape of the mark. + + 1. For ``point`` marks the supported values include: - plotting shapes: + ``"circle"``, ``"square"``, ``"cross"``, ``"diamond"``, ``"triangle-up"``, + ``"triangle-down"``, ``"triangle-right"``, or ``"triangle-left"``. - the line + symbol ``"stroke"`` - centered directional shapes ``"arrow"``, ``"wedge"``, or + ``"triangle"`` - a custom `SVG path string + `__ (For correct + sizing, custom shape paths should be defined within a square bounding box with + coordinates ranging from -1 to 1 along both the x and y dimensions.) + + 2. For ``geoshape`` marks it should be a field definition of the geojson data + + **Default value:** If undefined, the default shape depends on `mark config + `__'s ``shape`` + property. (``"circle"`` if unset.) + size + Size of the mark. + + * For ``"point"``, ``"square"`` and ``"circle"``, - the symbol size, or pixel area + of the mark. + * For ``"bar"`` and ``"tick"`` - the bar and tick's size. + * For ``"text"`` - the text's font size. + * Size is unsupported for ``"line"``, ``"area"``, and ``"rect"``. (Use ``"trail"`` + instead of line with varying size) + stroke + Stroke color of the marks. **Default value:** If undefined, the default color + depends on `mark config + `__'s ``color`` + property. + + *Note:* The ``stroke`` encoding has higher precedence than ``color``, thus may + override the ``color`` encoding if conflicting encodings are specified. + strokeDash + Stroke dash of the marks. + + **Default value:** ``[1,0]`` (No dash). + strokeOpacity + Stroke opacity of the marks. + + **Default value:** If undefined, the default opacity depends on `mark config + `__'s + ``strokeOpacity`` property. + strokeWidth + Stroke width of the marks. + + **Default value:** If undefined, the default stroke width depends on `mark config + `__'s ``strokeWidth`` + property. + text + Text of the ``text`` mark. + theta + * For arc marks, the arc length in radians if theta2 is not specified, otherwise the + start arc angle. (A value of 0 indicates up or “north”, increasing values proceed + clockwise.) + + * For text marks, polar coordinate angle in radians. + theta2 + The end angle of arc marks in radians. A value of 0 indicates up or “north”, + increasing values proceed clockwise. + tooltip + The tooltip text to show upon mouse hover. Specifying ``tooltip`` encoding overrides + `the tooltip property in the mark definition + `__. + + See the `tooltip `__ + documentation for a detailed discussion about tooltip in Vega-Lite. + url + The URL of an image mark. + x + X coordinates of the marks, or width of horizontal ``"bar"`` and ``"area"`` without + specified ``x2`` or ``width``. + + The ``value`` of this channel can be a number or a string ``"width"`` for the width + of the plot. + x2 + X2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``. + + The ``value`` of this channel can be a number or a string ``"width"`` for the width + of the plot. + xError + Error value of x coordinates for error specified ``"errorbar"`` and ``"errorband"``. + xError2 + Secondary error value of x coordinates for error specified ``"errorbar"`` and + ``"errorband"``. + xOffset + Offset of x-position of the marks + y + Y coordinates of the marks, or height of vertical ``"bar"`` and ``"area"`` without + specified ``y2`` or ``height``. + + The ``value`` of this channel can be a number or a string ``"height"`` for the + height of the plot. + y2 + Y2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``. + + The ``value`` of this channel can be a number or a string ``"height"`` for the + height of the plot. + yError + Error value of y coordinates for error specified ``"errorbar"`` and ``"errorband"``. + yError2 + Secondary error value of y coordinates for error specified ``"errorbar"`` and + ``"errorband"``. + yOffset + Offset of y-position of the marks + """ + + angle: str | Angle | Map | AngleDatum | AngleValue + color: str | Color | Map | ColorDatum | ColorValue + column: str | Column | Map + description: str | Description | Map | DescriptionValue + detail: OneOrSeq[str | Detail | Map] + facet: str | Facet | Map + fill: str | Fill | Map | FillDatum | FillValue + fillOpacity: str | FillOpacity | Map | FillOpacityDatum | FillOpacityValue + href: str | Href | Map | HrefValue + key: str | Key | Map + latitude: str | Latitude | Map | LatitudeDatum + latitude2: str | Latitude2 | Map | Latitude2Datum | Latitude2Value + longitude: str | Longitude | Map | LongitudeDatum + longitude2: str | Longitude2 | Map | Longitude2Datum | Longitude2Value + opacity: str | Opacity | Map | OpacityDatum | OpacityValue + order: OneOrSeq[str | Order | Map | OrderValue] + radius: str | Radius | Map | RadiusDatum | RadiusValue + radius2: str | Radius2 | Map | Radius2Datum | Radius2Value + row: str | Row | Map + shape: str | Shape | Map | ShapeDatum | ShapeValue + size: str | Size | Map | SizeDatum | SizeValue + stroke: str | Stroke | Map | StrokeDatum | StrokeValue + strokeDash: str | StrokeDash | Map | StrokeDashDatum | StrokeDashValue + strokeOpacity: str | StrokeOpacity | Map | StrokeOpacityDatum | StrokeOpacityValue + strokeWidth: str | StrokeWidth | Map | StrokeWidthDatum | StrokeWidthValue + text: str | Text | Map | TextDatum | TextValue + theta: str | Theta | Map | ThetaDatum | ThetaValue + theta2: str | Theta2 | Map | Theta2Datum | Theta2Value + tooltip: OneOrSeq[str | Tooltip | Map | TooltipValue] + url: str | Url | Map | UrlValue + x: str | X | Map | XDatum | XValue + x2: str | X2 | Map | X2Datum | X2Value + xError: str | XError | Map | XErrorValue + xError2: str | XError2 | Map | XError2Value + xOffset: str | XOffset | Map | XOffsetDatum | XOffsetValue + y: str | Y | Map | YDatum | YValue + y2: str | Y2 | Map | Y2Datum | Y2Value + yError: str | YError | Map | YErrorValue + yError2: str | YError2 | Map | YError2Value + yOffset: str | YOffset | Map | YOffsetDatum | YOffsetValue diff --git a/altair/vegalite/v5/schema/core.py b/altair/vegalite/v5/schema/core.py index 1a1db541d..1de4c83de 100644 --- a/altair/vegalite/v5/schema/core.py +++ b/altair/vegalite/v5/schema/core.py @@ -17,7 +17,7 @@ # ruff: noqa: F405 if TYPE_CHECKING: from altair import Parameter - from altair.utils.schemapi import Optional + from altair.typing import Optional from ._typing import * # noqa: F403 @@ -18532,6 +18532,10 @@ class ScaleConfig(VegaLiteSchema): round : bool, dict, :class:`ExprRef` If true, rounds numeric output values to integers. This can be helpful for snapping to the pixel grid. (Only available for ``x``, ``y``, and ``size`` scales.) + tickBandPaddingInner : dict, float, :class:`ExprRef` + Default inner padding for ``x`` and ``y`` band-ordinal scales of ``"tick"`` marks. + + **Default value:** ``0.25`` useUnaggregatedDomain : bool Use the source data range before aggregation as scale domain instead of aggregated data for aggregate axis. @@ -18596,6 +18600,9 @@ def __init__( dict | float | Parameter | SchemaBase ] = Undefined, round: Optional[bool | dict | Parameter | SchemaBase] = Undefined, + tickBandPaddingInner: Optional[ + dict | float | Parameter | SchemaBase + ] = Undefined, useUnaggregatedDomain: Optional[bool] = Undefined, xReverse: Optional[bool | dict | Parameter | SchemaBase] = Undefined, zero: Optional[bool] = Undefined, @@ -18627,6 +18634,7 @@ def __init__( quantizeCount=quantizeCount, rectBandPaddingInner=rectBandPaddingInner, round=round, + tickBandPaddingInner=tickBandPaddingInner, useUnaggregatedDomain=useUnaggregatedDomain, xReverse=xReverse, zero=zero, @@ -26730,6 +26738,13 @@ class VariableParameter(TopLevelParameter): An expression for the value of the parameter. This expression may include other parameters, in which case the parameter will automatically update in response to upstream parameter changes. + react : bool + A boolean flag (default ``true``) indicating if the update expression should be + automatically re-evaluated when any upstream signal dependencies update. If + ``false``, the update expression will not register any dependencies on other + signals, even for initialization. + + **Default value:** ``true`` value : Any The `initial value `__ of the parameter. @@ -26744,10 +26759,13 @@ def __init__( name: Optional[str | SchemaBase] = Undefined, bind: Optional[dict | SchemaBase] = Undefined, expr: Optional[str | SchemaBase] = Undefined, + react: Optional[bool] = Undefined, value: Optional[Any] = Undefined, **kwds, ): - super().__init__(name=name, bind=bind, expr=expr, value=value, **kwds) + super().__init__( + name=name, bind=bind, expr=expr, react=react, value=value, **kwds + ) class Vector10string(VegaLiteSchema): diff --git a/altair/vegalite/v5/schema/mixins.py b/altair/vegalite/v5/schema/mixins.py index 6e11e590c..940164158 100644 --- a/altair/vegalite/v5/schema/mixins.py +++ b/altair/vegalite/v5/schema/mixins.py @@ -14,7 +14,6 @@ if TYPE_CHECKING: from altair import Parameter, SchemaBase - if sys.version_info >= (3, 11): from typing import Self else: @@ -23,7 +22,7 @@ # ruff: noqa: F405 if TYPE_CHECKING: - from altair.utils.schemapi import Optional + from altair.typing import Optional from ._typing import * # noqa: F403 diff --git a/altair/vegalite/v5/schema/vega-lite-schema.json b/altair/vegalite/v5/schema/vega-lite-schema.json index ec1d9ffc3..b6c6b5a1f 100644 --- a/altair/vegalite/v5/schema/vega-lite-schema.json +++ b/altair/vegalite/v5/schema/vega-lite-schema.json @@ -22237,6 +22237,19 @@ ], "description": "If true, rounds numeric output values to integers. This can be helpful for snapping to the pixel grid. (Only available for `x`, `y`, and `size` scales.)" }, + "tickBandPaddingInner": { + "anyOf": [ + { + "type": "number" + }, + { + "$ref": "#/definitions/ExprRef" + } + ], + "description": "Default inner padding for `x` and `y` band-ordinal scales of `\"tick\"` marks.\n\n__Default value:__ `0.25`", + "maximum": 1, + "minimum": 0 + }, "useUnaggregatedDomain": { "description": "Use the source data range before aggregation as scale domain instead of aggregated data for aggregate axis.\n\nThis is equivalent to setting `domain` to `\"unaggregate\"` for aggregated _quantitative_ fields by default.\n\nThis property only works with aggregate functions that produce values within the raw data domain (`\"mean\"`, `\"average\"`, `\"median\"`, `\"q1\"`, `\"q3\"`, `\"min\"`, `\"max\"`). For other aggregations that produce values outside of the raw data domain (e.g. `\"count\"`, `\"sum\"`), this property is ignored.\n\n__Default value:__ `false`", "type": "boolean" @@ -31631,6 +31644,10 @@ "$ref": "#/definitions/ParameterName", "description": "A unique name for the variable parameter. Parameter names should be valid JavaScript identifiers: they should contain only alphanumeric characters (or \"$\", or \"_\") and may not start with a digit. Reserved keywords that may not be used as parameter names are \"datum\", \"event\", \"item\", and \"parent\"." }, + "react": { + "description": "A boolean flag (default `true`) indicating if the update expression should be automatically re-evaluated when any upstream signal dependencies update. If `false`, the update expression will not register any dependencies on other signals, even for initialization.\n\n __Default value:__ `true`", + "type": "boolean" + }, "value": { "description": "The [initial value](http://vega.github.io/vega-lite/docs/value.html) of the parameter.\n\n__Default value:__ `undefined`" } diff --git a/altair/vegalite/v5/schema/vega-themes.json b/altair/vegalite/v5/schema/vega-themes.json new file mode 100644 index 000000000..22d8664b8 --- /dev/null +++ b/altair/vegalite/v5/schema/vega-themes.json @@ -0,0 +1,1301 @@ +{ + "carbong10": { + "arc": { + "fill": "#6929c4" + }, + "area": { + "fill": "#6929c4" + }, + "axis": { + "grid": true, + "gridColor": "#e0e0e0", + "labelAngle": 0, + "labelColor": "#525252", + "labelFont": "IBM Plex Sans Condensed, system-ui, -apple-system, BlinkMacSystemFont, \".SFNSText-Regular\", sans-serif", + "labelFontSize": 12, + "labelFontWeight": 400, + "titleColor": "#161616", + "titleFontSize": 12, + "titleFontWeight": 600 + }, + "axisX": { + "titlePadding": 10 + }, + "axisY": { + "titlePadding": 2.5 + }, + "background": "#f4f4f4", + "circle": { + "fill": "#6929c4" + }, + "group": { + "fill": "#ffffff" + }, + "path": { + "stroke": "#6929c4" + }, + "range": { + "category": [ + "#6929c4", + "#1192e8", + "#005d5d", + "#9f1853", + "#fa4d56", + "#570408", + "#198038", + "#002d9c", + "#ee538b", + "#b28600", + "#009d9a", + "#012749", + "#8a3800", + "#a56eff" + ], + "diverging": [ + "#750e13", + "#a2191f", + "#da1e28", + "#fa4d56", + "#ff8389", + "#ffb3b8", + "#ffd7d9", + "#fff1f1", + "#e5f6ff", + "#bae6ff", + "#82cfff", + "#33b1ff", + "#1192e8", + "#0072c3", + "#00539a", + "#003a6d" + ], + "heatmap": [ + "#f6f2ff", + "#e8daff", + "#d4bbff", + "#be95ff", + "#a56eff", + "#8a3ffc", + "#6929c4", + "#491d8b", + "#31135e", + "#1c0f30" + ] + }, + "rect": { + "fill": "#6929c4" + }, + "shape": { + "stroke": "#6929c4" + }, + "style": { + "guide-label": { + "fill": "#525252", + "font": "IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,\".sfnstext-regular\",sans-serif", + "fontWeight": 400 + }, + "guide-title": { + "fill": "#525252", + "font": "IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,\".sfnstext-regular\",sans-serif", + "fontWeight": 400 + } + }, + "symbol": { + "stroke": "#6929c4" + }, + "title": { + "anchor": "start", + "color": "#161616", + "dy": -15, + "font": "IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,\".sfnstext-regular\",sans-serif", + "fontSize": 16, + "fontWeight": 600 + }, + "view": { + "fill": "#ffffff", + "stroke": "#ffffff" + } + }, + "carbong100": { + "arc": { + "fill": "#d4bbff" + }, + "area": { + "fill": "#d4bbff" + }, + "axis": { + "grid": true, + "gridColor": "#393939", + "labelAngle": 0, + "labelColor": "#c6c6c6", + "labelFont": "IBM Plex Sans Condensed, system-ui, -apple-system, BlinkMacSystemFont, \".SFNSText-Regular\", sans-serif", + "labelFontSize": 12, + "labelFontWeight": 400, + "titleColor": "#f4f4f4", + "titleFontSize": 12, + "titleFontWeight": 600 + }, + "axisX": { + "titlePadding": 10 + }, + "axisY": { + "titlePadding": 2.5 + }, + "background": "#161616", + "circle": { + "fill": "#d4bbff" + }, + "group": { + "fill": "#161616" + }, + "path": { + "stroke": "#d4bbff" + }, + "range": { + "category": [ + "#8a3ffc", + "#33b1ff", + "#007d79", + "#ff7eb6", + "#fa4d56", + "#fff1f1", + "#6fdc8c", + "#4589ff", + "#d12771", + "#d2a106", + "#08bdba", + "#bae6ff", + "#ba4e00", + "#d4bbff" + ], + "diverging": [ + "#750e13", + "#a2191f", + "#da1e28", + "#fa4d56", + "#ff8389", + "#ffb3b8", + "#ffd7d9", + "#fff1f1", + "#e5f6ff", + "#bae6ff", + "#82cfff", + "#33b1ff", + "#1192e8", + "#0072c3", + "#00539a", + "#003a6d" + ], + "heatmap": [ + "#f6f2ff", + "#e8daff", + "#d4bbff", + "#be95ff", + "#a56eff", + "#8a3ffc", + "#6929c4", + "#491d8b", + "#31135e", + "#1c0f30" + ] + }, + "rect": { + "fill": "#d4bbff" + }, + "shape": { + "stroke": "#d4bbff" + }, + "style": { + "guide-label": { + "fill": "#c6c6c6", + "font": "IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,\".sfnstext-regular\",sans-serif", + "fontWeight": 400 + }, + "guide-title": { + "fill": "#c6c6c6", + "font": "IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,\".sfnstext-regular\",sans-serif", + "fontWeight": 400 + } + }, + "symbol": { + "stroke": "#d4bbff" + }, + "title": { + "anchor": "start", + "color": "#f4f4f4", + "dy": -15, + "font": "IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,\".sfnstext-regular\",sans-serif", + "fontSize": 16, + "fontWeight": 600 + }, + "view": { + "fill": "#161616", + "stroke": "#161616" + } + }, + "carbong90": { + "arc": { + "fill": "#d4bbff" + }, + "area": { + "fill": "#d4bbff" + }, + "axis": { + "grid": true, + "gridColor": "#525252", + "labelAngle": 0, + "labelColor": "#c6c6c6", + "labelFont": "IBM Plex Sans Condensed, system-ui, -apple-system, BlinkMacSystemFont, \".SFNSText-Regular\", sans-serif", + "labelFontSize": 12, + "labelFontWeight": 400, + "titleColor": "#f4f4f4", + "titleFontSize": 12, + "titleFontWeight": 600 + }, + "axisX": { + "titlePadding": 10 + }, + "axisY": { + "titlePadding": 2.5 + }, + "background": "#262626", + "circle": { + "fill": "#d4bbff" + }, + "group": { + "fill": "#161616" + }, + "path": { + "stroke": "#d4bbff" + }, + "range": { + "category": [ + "#8a3ffc", + "#33b1ff", + "#007d79", + "#ff7eb6", + "#fa4d56", + "#fff1f1", + "#6fdc8c", + "#4589ff", + "#d12771", + "#d2a106", + "#08bdba", + "#bae6ff", + "#ba4e00", + "#d4bbff" + ], + "diverging": [ + "#750e13", + "#a2191f", + "#da1e28", + "#fa4d56", + "#ff8389", + "#ffb3b8", + "#ffd7d9", + "#fff1f1", + "#e5f6ff", + "#bae6ff", + "#82cfff", + "#33b1ff", + "#1192e8", + "#0072c3", + "#00539a", + "#003a6d" + ], + "heatmap": [ + "#f6f2ff", + "#e8daff", + "#d4bbff", + "#be95ff", + "#a56eff", + "#8a3ffc", + "#6929c4", + "#491d8b", + "#31135e", + "#1c0f30" + ] + }, + "rect": { + "fill": "#d4bbff" + }, + "shape": { + "stroke": "#d4bbff" + }, + "style": { + "guide-label": { + "fill": "#c6c6c6", + "font": "IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,\".sfnstext-regular\",sans-serif", + "fontWeight": 400 + }, + "guide-title": { + "fill": "#c6c6c6", + "font": "IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,\".sfnstext-regular\",sans-serif", + "fontWeight": 400 + } + }, + "symbol": { + "stroke": "#d4bbff" + }, + "title": { + "anchor": "start", + "color": "#f4f4f4", + "dy": -15, + "font": "IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,\".sfnstext-regular\",sans-serif", + "fontSize": 16, + "fontWeight": 600 + }, + "view": { + "fill": "#161616", + "stroke": "#161616" + } + }, + "carbonwhite": { + "arc": { + "fill": "#6929c4" + }, + "area": { + "fill": "#6929c4" + }, + "axis": { + "grid": true, + "gridColor": "#e0e0e0", + "labelAngle": 0, + "labelColor": "#525252", + "labelFont": "IBM Plex Sans Condensed, system-ui, -apple-system, BlinkMacSystemFont, \".SFNSText-Regular\", sans-serif", + "labelFontSize": 12, + "labelFontWeight": 400, + "titleColor": "#161616", + "titleFontSize": 12, + "titleFontWeight": 600 + }, + "axisX": { + "titlePadding": 10 + }, + "axisY": { + "titlePadding": 2.5 + }, + "background": "#ffffff", + "circle": { + "fill": "#6929c4" + }, + "group": { + "fill": "#ffffff" + }, + "path": { + "stroke": "#6929c4" + }, + "range": { + "category": [ + "#6929c4", + "#1192e8", + "#005d5d", + "#9f1853", + "#fa4d56", + "#570408", + "#198038", + "#002d9c", + "#ee538b", + "#b28600", + "#009d9a", + "#012749", + "#8a3800", + "#a56eff" + ], + "diverging": [ + "#750e13", + "#a2191f", + "#da1e28", + "#fa4d56", + "#ff8389", + "#ffb3b8", + "#ffd7d9", + "#fff1f1", + "#e5f6ff", + "#bae6ff", + "#82cfff", + "#33b1ff", + "#1192e8", + "#0072c3", + "#00539a", + "#003a6d" + ], + "heatmap": [ + "#f6f2ff", + "#e8daff", + "#d4bbff", + "#be95ff", + "#a56eff", + "#8a3ffc", + "#6929c4", + "#491d8b", + "#31135e", + "#1c0f30" + ] + }, + "rect": { + "fill": "#6929c4" + }, + "shape": { + "stroke": "#6929c4" + }, + "style": { + "guide-label": { + "fill": "#525252", + "font": "IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,\".sfnstext-regular\",sans-serif", + "fontWeight": 400 + }, + "guide-title": { + "fill": "#525252", + "font": "IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,\".sfnstext-regular\",sans-serif", + "fontWeight": 400 + } + }, + "symbol": { + "stroke": "#6929c4" + }, + "title": { + "anchor": "start", + "color": "#161616", + "dy": -15, + "font": "IBM Plex Sans,system-ui,-apple-system,BlinkMacSystemFont,\".sfnstext-regular\",sans-serif", + "fontSize": 16, + "fontWeight": 600 + }, + "view": { + "fill": "#ffffff", + "stroke": "#ffffff" + } + }, + "dark": { + "axis": { + "domainColor": "#fff", + "gridColor": "#888", + "tickColor": "#fff" + }, + "background": "#333", + "style": { + "guide-label": { + "fill": "#fff" + }, + "guide-title": { + "fill": "#fff" + } + }, + "title": { + "color": "#fff", + "subtitleColor": "#fff" + }, + "view": { + "stroke": "#888" + } + }, + "excel": { + "arc": { + "fill": "#4572a7" + }, + "area": { + "fill": "#4572a7" + }, + "axis": { + "bandPosition": 0.5, + "grid": true, + "gridColor": "#000000", + "gridOpacity": 1, + "gridWidth": 0.5, + "labelPadding": 10, + "tickSize": 5, + "tickWidth": 0.5 + }, + "axisBand": { + "grid": false, + "tickExtra": true + }, + "background": "#fff", + "legend": { + "labelBaseline": "middle", + "labelFontSize": 11, + "symbolSize": 50, + "symbolType": "square" + }, + "line": { + "stroke": "#4572a7", + "strokeWidth": 2 + }, + "path": { + "stroke": "#4572a7" + }, + "range": { + "category": [ + "#4572a7", + "#aa4643", + "#8aa453", + "#71598e", + "#4598ae", + "#d98445", + "#94aace", + "#d09393", + "#b9cc98", + "#a99cbc" + ] + }, + "rect": { + "fill": "#4572a7" + }, + "shape": { + "stroke": "#4572a7" + }, + "symbol": { + "fill": "#4572a7", + "size": 50, + "strokeWidth": 1.5 + } + }, + "fivethirtyeight": { + "arc": { + "fill": "#30a2da" + }, + "area": { + "fill": "#30a2da" + }, + "axis": { + "domainColor": "#cbcbcb", + "grid": true, + "gridColor": "#cbcbcb", + "gridWidth": 1, + "labelColor": "#999", + "labelFontSize": 10, + "labelPadding": 4, + "tickColor": "#cbcbcb", + "tickSize": 10, + "titleColor": "#333", + "titleFontSize": 14, + "titlePadding": 10 + }, + "axisBand": { + "grid": false + }, + "background": "#f0f0f0", + "bar": { + "binSpacing": 2, + "fill": "#30a2da", + "stroke": null + }, + "group": { + "fill": "#f0f0f0" + }, + "legend": { + "labelColor": "#333", + "labelFontSize": 11, + "padding": 1, + "symbolSize": 30, + "symbolType": "square", + "titleColor": "#333", + "titleFontSize": 14, + "titlePadding": 10 + }, + "line": { + "stroke": "#30a2da", + "strokeWidth": 2 + }, + "path": { + "stroke": "#30a2da", + "strokeWidth": 0.5 + }, + "point": { + "filled": true, + "shape": "circle" + }, + "range": { + "category": [ + "#30a2da", + "#fc4f30", + "#e5ae38", + "#6d904f", + "#8b8b8b", + "#b96db8", + "#ff9e27", + "#56cc60", + "#52d2ca", + "#52689e", + "#545454", + "#9fe4f8" + ], + "diverging": [ + "#cc0020", + "#e77866", + "#f6e7e1", + "#d6e8ed", + "#91bfd9", + "#1d78b5" + ], + "heatmap": [ + "#d6e8ed", + "#cee0e5", + "#91bfd9", + "#549cc6", + "#1d78b5" + ] + }, + "rect": { + "fill": "#30a2da" + }, + "shape": { + "stroke": "#30a2da" + }, + "title": { + "anchor": "start", + "fontSize": 24, + "fontWeight": 600, + "offset": 20 + } + }, + "ggplot2": { + "arc": { + "fill": "#000" + }, + "area": { + "fill": "#000" + }, + "axis": { + "domain": false, + "grid": true, + "gridColor": "#FFFFFF", + "gridOpacity": 1, + "labelColor": "#7F7F7F", + "labelPadding": 4, + "tickColor": "#7F7F7F", + "tickSize": 5.67, + "titleFontSize": 16, + "titleFontWeight": "normal" + }, + "group": { + "fill": "#e5e5e5" + }, + "legend": { + "labelBaseline": "middle", + "labelFontSize": 11, + "symbolSize": 40 + }, + "line": { + "stroke": "#000" + }, + "path": { + "stroke": "#000" + }, + "range": { + "category": [ + "#000000", + "#7F7F7F", + "#1A1A1A", + "#999999", + "#333333", + "#B0B0B0", + "#4D4D4D", + "#C9C9C9", + "#666666", + "#DCDCDC" + ] + }, + "rect": { + "fill": "#000" + }, + "shape": { + "stroke": "#000" + }, + "symbol": { + "fill": "#000", + "size": 40 + } + }, + "googlecharts": { + "arc": { + "fill": "#3366CC" + }, + "area": { + "fill": "#3366CC" + }, + "axis": { + "domain": false, + "grid": true, + "gridColor": "#ccc", + "tickColor": "#ccc" + }, + "background": "#fff", + "circle": { + "fill": "#3366CC" + }, + "padding": { + "bottom": 10, + "left": 10, + "right": 10, + "top": 10 + }, + "path": { + "stroke": "#3366CC" + }, + "range": { + "category": [ + "#4285F4", + "#DB4437", + "#F4B400", + "#0F9D58", + "#AB47BC", + "#00ACC1", + "#FF7043", + "#9E9D24", + "#5C6BC0", + "#F06292", + "#00796B", + "#C2185B" + ], + "heatmap": [ + "#c6dafc", + "#5e97f6", + "#2a56c6" + ] + }, + "rect": { + "fill": "#3366CC" + }, + "shape": { + "stroke": "#3366CC" + }, + "style": { + "group-title": { + "font": "Arial, sans-serif", + "fontSize": 12 + }, + "guide-label": { + "font": "Arial, sans-serif", + "fontSize": 12 + }, + "guide-title": { + "font": "Arial, sans-serif", + "fontSize": 12 + } + }, + "symbol": { + "stroke": "#3366CC" + }, + "title": { + "anchor": "start", + "dy": -3, + "font": "Arial, sans-serif", + "fontSize": 14, + "fontWeight": "bold" + } + }, + "latimes": { + "arc": { + "fill": "#82c6df" + }, + "area": { + "fill": "#82c6df" + }, + "axis": { + "labelFont": "Benton Gothic, sans-serif", + "labelFontSize": 11.5, + "labelFontWeight": "normal", + "titleFont": "Benton Gothic Bold, sans-serif", + "titleFontSize": 13, + "titleFontWeight": "normal" + }, + "axisX": { + "labelAngle": 0, + "labelPadding": 4, + "tickSize": 3 + }, + "axisY": { + "labelBaseline": "middle", + "maxExtent": 45, + "minExtent": 45, + "tickSize": 2, + "titleAlign": "left", + "titleAngle": 0, + "titleX": -45, + "titleY": -11 + }, + "background": "#ffffff", + "legend": { + "labelFont": "Benton Gothic, sans-serif", + "labelFontSize": 11.5, + "symbolType": "square", + "titleFont": "Benton Gothic Bold, sans-serif", + "titleFontSize": 13, + "titleFontWeight": "normal" + }, + "line": { + "stroke": "#82c6df", + "strokeWidth": 2 + }, + "path": { + "stroke": "#82c6df" + }, + "range": { + "category": [ + "#ec8431", + "#829eb1", + "#c89d29", + "#3580b1", + "#adc839", + "#ab7fb4" + ], + "diverging": [ + "#e68a4f", + "#f4bb6a", + "#f9e39c", + "#dadfe2", + "#a6b7c6", + "#849eae" + ], + "heatmap": [ + "#fbf2c7", + "#f9e39c", + "#f8d36e", + "#f4bb6a", + "#e68a4f", + "#d15a40", + "#ab4232" + ], + "ordinal": [ + "#fbf2c7", + "#f9e39c", + "#f8d36e", + "#f4bb6a", + "#e68a4f", + "#d15a40", + "#ab4232" + ], + "ramp": [ + "#fbf2c7", + "#f9e39c", + "#f8d36e", + "#f4bb6a", + "#e68a4f", + "#d15a40", + "#ab4232" + ] + }, + "rect": { + "fill": "#82c6df" + }, + "shape": { + "stroke": "#82c6df" + }, + "symbol": { + "fill": "#82c6df", + "size": 30 + }, + "title": { + "anchor": "start", + "color": "#000000", + "font": "Benton Gothic Bold, sans-serif", + "fontSize": 22, + "fontWeight": "normal" + } + }, + "powerbi": { + "arc": { + "fill": "#118DFF" + }, + "area": { + "fill": "#118DFF", + "line": true, + "opacity": 0.6 + }, + "axis": { + "domain": false, + "grid": false, + "labelColor": "#605E5C", + "labelFontSize": 12, + "ticks": false, + "titleColor": "#252423", + "titleFont": "wf_standard-font, helvetica, arial, sans-serif", + "titleFontSize": 16, + "titleFontWeight": "normal" + }, + "axisBand": { + "tickExtra": true + }, + "axisQuantitative": { + "grid": true, + "gridColor": "#C8C6C4", + "gridDash": [ + 1, + 5 + ], + "labelFlush": false, + "tickCount": 3 + }, + "axisX": { + "labelPadding": 5 + }, + "axisY": { + "labelPadding": 10 + }, + "background": "transparent", + "bar": { + "fill": "#118DFF" + }, + "font": "Segoe UI", + "header": { + "labelColor": "#605E5C", + "labelFont": "Segoe UI", + "labelFontSize": 13.333333333333332, + "titleColor": "#252423", + "titleFont": "wf_standard-font, helvetica, arial, sans-serif", + "titleFontSize": 16 + }, + "legend": { + "labelColor": "#605E5C", + "labelFont": "Segoe UI", + "labelFontSize": 13.333333333333332, + "symbolSize": 75, + "symbolType": "circle", + "titleColor": "#605E5C", + "titleFont": "Segoe UI", + "titleFontWeight": "bold" + }, + "line": { + "stroke": "#118DFF", + "strokeCap": "round", + "strokeJoin": "round", + "strokeWidth": 3 + }, + "path": { + "stroke": "#118DFF" + }, + "point": { + "fill": "#118DFF", + "filled": true, + "size": 75 + }, + "range": { + "category": [ + "#118DFF", + "#12239E", + "#E66C37", + "#6B007B", + "#E044A7", + "#744EC2", + "#D9B300", + "#D64550" + ], + "diverging": [ + "#DEEFFF", + "#118DFF" + ], + "heatmap": [ + "#DEEFFF", + "#118DFF" + ], + "ordinal": [ + "#DEEFFF", + "#c7e4ff", + "#b0d9ff", + "#9aceff", + "#83c3ff", + "#6cb9ff", + "#55aeff", + "#3fa3ff", + "#2898ff", + "#118DFF" + ] + }, + "rect": { + "fill": "#118DFF" + }, + "shape": { + "stroke": "#118DFF" + }, + "symbol": { + "fill": "#118DFF", + "size": 50, + "strokeWidth": 1.5 + }, + "text": { + "fill": "#605E5C", + "font": "Segoe UI", + "fontSize": 12 + }, + "view": { + "stroke": "transparent" + } + }, + "quartz": { + "arc": { + "fill": "#ab5787" + }, + "area": { + "fill": "#ab5787" + }, + "axis": { + "domainColor": "#979797", + "domainWidth": 0.5, + "gridWidth": 0.2, + "labelColor": "#979797", + "tickColor": "#979797", + "tickWidth": 0.2, + "titleColor": "#979797" + }, + "axisBand": { + "grid": false + }, + "axisX": { + "grid": true, + "tickSize": 10 + }, + "axisY": { + "domain": false, + "grid": true, + "tickSize": 0 + }, + "background": "#f9f9f9", + "legend": { + "labelFontSize": 11, + "padding": 1, + "symbolSize": 30, + "symbolType": "square" + }, + "line": { + "stroke": "#ab5787" + }, + "path": { + "stroke": "#ab5787" + }, + "range": { + "category": [ + "#ab5787", + "#51b2e5", + "#703c5c", + "#168dd9", + "#d190b6", + "#00609f", + "#d365ba", + "#154866", + "#666666", + "#c4c4c4" + ] + }, + "rect": { + "fill": "#ab5787" + }, + "shape": { + "stroke": "#ab5787" + }, + "symbol": { + "fill": "#ab5787", + "size": 30 + } + }, + "urbaninstitute": { + "arc": { + "fill": "#1696d2" + }, + "area": { + "fill": "#1696d2" + }, + "axisX": { + "domain": true, + "domainColor": "#000000", + "domainWidth": 1, + "grid": false, + "labelAngle": 0, + "labelFont": "Lato", + "labelFontSize": 12, + "tickColor": "#000000", + "tickSize": 5, + "titleFont": "Lato", + "titleFontSize": 12, + "titlePadding": 10 + }, + "axisY": { + "domain": false, + "domainWidth": 1, + "grid": true, + "gridColor": "#DEDDDD", + "gridWidth": 1, + "labelFont": "Lato", + "labelFontSize": 12, + "labelPadding": 8, + "ticks": false, + "titleAngle": 0, + "titleFont": "Lato", + "titleFontSize": 12, + "titlePadding": 10, + "titleX": 18, + "titleY": -10 + }, + "background": "#FFFFFF", + "legend": { + "labelFont": "Lato", + "labelFontSize": 12, + "offset": 10, + "orient": "right", + "symbolSize": 100, + "titleFont": "Lato", + "titleFontSize": 12, + "titlePadding": 10 + }, + "line": { + "color": "#1696d2", + "stroke": "#1696d2", + "strokeWidth": 5 + }, + "path": { + "stroke": "#1696d2", + "strokeWidth": 0.5 + }, + "point": { + "filled": true + }, + "range": { + "category": [ + "#1696d2", + "#ec008b", + "#fdbf11", + "#000000", + "#d2d2d2", + "#55b748" + ], + "diverging": [ + "#ca5800", + "#fdbf11", + "#fdd870", + "#fff2cf", + "#cfe8f3", + "#73bfe2", + "#1696d2", + "#0a4c6a" + ], + "heatmap": [ + "#ca5800", + "#fdbf11", + "#fdd870", + "#fff2cf", + "#cfe8f3", + "#73bfe2", + "#1696d2", + "#0a4c6a" + ], + "ordinal": [ + "#cfe8f3", + "#a2d4ec", + "#73bfe2", + "#46abdb", + "#1696d2", + "#12719e" + ], + "ramp": [ + "#CFE8F3", + "#A2D4EC", + "#73BFE2", + "#46ABDB", + "#1696D2", + "#12719E", + "#0A4C6A", + "#062635" + ] + }, + "rect": { + "fill": "#1696d2" + }, + "shape": { + "stroke": "#1696d2" + }, + "style": { + "bar": { + "fill": "#1696d2", + "stroke": null + } + }, + "symbol": { + "fill": "#1696d2", + "size": 30 + }, + "text": { + "align": "center", + "color": "#1696d2", + "font": "Lato", + "fontSize": 11, + "fontWeight": 400, + "size": 11 + }, + "title": { + "anchor": "start", + "font": "Lato", + "fontSize": 18 + }, + "trail": { + "color": "#1696d2", + "size": 1, + "stroke": "#1696d2", + "strokeWidth": 0 + }, + "view": { + "stroke": "transparent" + } + }, + "vox": { + "arc": { + "fill": "#3e5c69" + }, + "area": { + "fill": "#3e5c69" + }, + "axis": { + "domainWidth": 0.5, + "grid": true, + "labelPadding": 2, + "tickSize": 5, + "tickWidth": 0.5, + "titleFontWeight": "normal" + }, + "axisBand": { + "grid": false + }, + "axisX": { + "gridWidth": 0.2 + }, + "axisY": { + "gridDash": [ + 3 + ], + "gridWidth": 0.4 + }, + "background": "#fff", + "legend": { + "labelFontSize": 11, + "padding": 1, + "symbolType": "square" + }, + "line": { + "stroke": "#3e5c69" + }, + "path": { + "stroke": "#3e5c69" + }, + "range": { + "category": [ + "#3e5c69", + "#6793a6", + "#182429", + "#0570b0", + "#3690c0", + "#74a9cf", + "#a6bddb", + "#e2ddf2" + ] + }, + "rect": { + "fill": "#3e5c69" + }, + "shape": { + "stroke": "#3e5c69" + }, + "symbol": { + "fill": "#3e5c69" + } + } +} \ No newline at end of file diff --git a/altair/vegalite/v5/theme.py b/altair/vegalite/v5/theme.py index 0b0273629..c98826826 100644 --- a/altair/vegalite/v5/theme.py +++ b/altair/vegalite/v5/theme.py @@ -1,22 +1,22 @@ """Tools for enabling and registering chart themes.""" from __future__ import annotations -from typing import Final -from ...utils.theme import ThemeRegistry +from typing import TYPE_CHECKING, Final, Literal, get_args -VEGA_THEMES = [ - "ggplot2", - "quartz", - "vox", - "fivethirtyeight", - "dark", - "latimes", - "urbaninstitute", - "excel", - "googlecharts", - "powerbi", -] +from altair.utils.theme import ThemeRegistry +from altair.vegalite.v5.schema._typing import VegaThemes + +if TYPE_CHECKING: + import sys + + if sys.version_info >= (3, 10): + from typing import TypeAlias + else: + from typing_extensions import TypeAlias + +AltairThemes: TypeAlias = Literal["default", "opaque"] +VEGA_THEMES: list[str] = list(get_args(VegaThemes)) class VegaTheme: @@ -37,7 +37,7 @@ def __repr__(self) -> str: # The entry point group that can be used by other packages to declare other # themes that will be auto-detected. Explicit registration is also -# allowed by the PluginRegistery API. +# allowed by the PluginRegistry API. ENTRY_POINT_GROUP: Final = "altair.vegalite.v5.theme" themes = ThemeRegistry(entry_point_group=ENTRY_POINT_GROUP) diff --git a/doc/about/roadmap.rst b/doc/about/roadmap.rst index 50dd29feb..1916cd9a6 100644 --- a/doc/about/roadmap.rst +++ b/doc/about/roadmap.rst @@ -13,3 +13,4 @@ The roadmap for Vega-Altair and related projects can be found in code_of_conduct governance citing + versioning diff --git a/doc/about/versioning.rst b/doc/about/versioning.rst new file mode 100644 index 000000000..f4855ed78 --- /dev/null +++ b/doc/about/versioning.rst @@ -0,0 +1,46 @@ +Versioning +========== +Vega-Altair has historically released major versions that coincide with those of Vega-Lite_. + +As the projects have matured, and major versions become less frequent, there has been a growing need to introduce breaking changes between these major versions. +Such changes would allow Vega-Altair to address technical debt and improve upon API ergonomics. + +To ensure future releases clearly communicate changes, Vega-Altair will be working towards adopting SemVer_. + +Public API +---------- +Functionality documented in :ref:`api` defines the Vega-Altair public API. + +Version numbers +--------------- + +A Vega-Altair release number is composed of ``MAJOR.MINOR.PATCH``. + +* Backward incompatible API changes increment **MAJOR** version (``4.2.2`` - ``5.0.0``) +* New backward compatible functionality increment **MINOR** version (``5.2.0`` - ``5.3.0``) +* Backward compatible bug fixes increment **PATCH** version (``5.1.1`` - ``5.1.2``) + +**MAJOR** versions will *likely* continue to increase with a **MAJOR** increment to Vega-Lite_. + +Deprecation +----------- +Deprecation warnings may be introduced in **MAJOR** and **MINOR** versions, +but the removal of deprecated functionality will not occur until *at least* the next **MAJOR** version. + +For upstream breaking changes that trigger a **MAJOR** version, +we *may* provide a deprecation warning if we consider the change especially disruptive. + +Starting in version ``5.4.0``, all deprecation warnings *must* specify: + +* the version number they were introduced + +Where possible, deprecation warnings *may* specify: + +* an alternative function/method/parameter/class to use instead +* an explanation for why this change had to be made + +Deprecated functionality *may* be removed from the Vega-Altair documentation, if there is a +suitable replacement and we believe inclusion of both could confuse new users. + +.. _Vega-Lite: https://github.com/vega/vega-lite +.. _SemVer: https://semver.org/ \ No newline at end of file diff --git a/doc/conf.py b/doc/conf.py index dd0699b3f..3e210a3fa 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -78,7 +78,7 @@ # built documents. # # The short X.Y version. -version = "5.4.0dev" +version = "5.5.0dev" # The full version, including alpha/beta/rc tags. release = f"{version}" diff --git a/doc/getting_started/resources.rst b/doc/getting_started/resources.rst index 2c5008fa7..bfba0c280 100644 --- a/doc/getting_started/resources.rst +++ b/doc/getting_started/resources.rst @@ -135,7 +135,7 @@ nx_altair is a library for drawing NetworkX_ graphs using Altair. It offers a si Altair Ally is a companion package to Altair, which provides a few shortcuts to create common plots for exploratory data analysis, particularly those involving visualization of an entire dataframe. .. List of links. -.. _`Altair Ally`: https://joelostblom.github.io/altair_ally +.. _`Altair Ally`: https://github.com/vega/altair_ally gif_ ~~~~ diff --git a/doc/user_guide/api.rst b/doc/user_guide/api.rst index f6dac987e..eaa9cb602 100644 --- a/doc/user_guide/api.rst +++ b/doc/user_guide/api.rst @@ -152,7 +152,6 @@ API Functions condition graticule hconcat - is_chart_type layer param repeat @@ -638,3 +637,57 @@ API Utility Classes When Then ChainedWhen + +Typing +------ +.. currentmodule:: altair.typing + +.. autosummary:: + :toctree: generated/typing/ + :nosignatures: + + ChannelAngle + ChannelColor + ChannelColumn + ChannelDescription + ChannelDetail + ChannelFacet + ChannelFill + ChannelFillOpacity + ChannelHref + ChannelKey + ChannelLatitude + ChannelLatitude2 + ChannelLongitude + ChannelLongitude2 + ChannelOpacity + ChannelOrder + ChannelRadius + ChannelRadius2 + ChannelRow + ChannelShape + ChannelSize + ChannelStroke + ChannelStrokeDash + ChannelStrokeOpacity + ChannelStrokeWidth + ChannelText + ChannelTheta + ChannelTheta2 + ChannelTooltip + ChannelUrl + ChannelX + ChannelX2 + ChannelXError + ChannelXError2 + ChannelXOffset + ChannelY + ChannelY2 + ChannelYError + ChannelYError2 + ChannelYOffset + ChartType + EncodeKwds + Optional + is_chart_type + diff --git a/doc/user_guide/customization.rst b/doc/user_guide/customization.rst index 9759669bd..1bd3ff658 100644 --- a/doc/user_guide/customization.rst +++ b/doc/user_guide/customization.rst @@ -763,6 +763,8 @@ If you would like to use any theme just for a single chart, you can use the Currently Altair does not offer many built-in themes, but we plan to add more options in the future. +See `Vega Theme Test`_ for an interactive demo of themes inherited from `Vega Themes`_. + Defining a Custom Theme ~~~~~~~~~~~~~~~~~~~~~~~ The theme registry also allows defining and registering custom themes. @@ -861,3 +863,4 @@ The configured localization settings persist upon saving. .. _Vega Themes: https://github.com/vega/vega-themes/ .. _`D3's localization support`: https://d3-wiki.readthedocs.io/zh-cn/master/Localization/ +.. _Vega Theme Test: https://vega.github.io/vega-themes/?renderer=canvas \ No newline at end of file diff --git a/doc/user_guide/encodings/index.rst b/doc/user_guide/encodings/index.rst index 264bacc02..b693b93f4 100644 --- a/doc/user_guide/encodings/index.rst +++ b/doc/user_guide/encodings/index.rst @@ -420,7 +420,7 @@ options available to change the sort order: - Passing the name of an encoding channel to ``sort``, such as ``"x"`` or ``"y"``, allows for sorting by that channel. An optional minus prefix can be used for a descending sort. For example ``sort='-x'`` would sort by the x channel in descending order. -- Passing a list to ``sort`` allows you to explicitly set the order in which +- Passing a `Sequence `_ to ``sort`` allows you to explicitly set the order in which you would like the encoding to appear - Using the ``field`` and ``op`` parameters to specify a field and aggregation operation to sort by. diff --git a/pyproject.toml b/pyproject.toml index 7af04f2d3..e2bdf7783 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,7 +56,7 @@ Source = "https://github.com/vega/altair" [project.optional-dependencies] all = [ "vega_datasets>=0.9.0", - "vl-convert-python>=1.5.0", + "vl-convert-python>=1.6.0", "pandas>=0.25.3", "numpy", "pyarrow>=11", @@ -66,7 +66,7 @@ all = [ ] dev = [ "hatch", - "ruff>=0.5.3", + "ruff>=0.5.7", "ibis-framework[polars]", "ipython[kernel]", "pandas>=0.25.3", @@ -82,7 +82,7 @@ dev = [ "polars>=0.20.3", ] doc = [ - "sphinx", + "sphinx>=8.0.0", "docutils", "sphinxext_altair", "jinja2", @@ -229,7 +229,9 @@ extend-safe-fixes=[ # escape-sequence-in-docstring "D301", # ends-in-period - "D400" + "D400", + # missing-return-type-special-method + "ANN204" ] # https://docs.astral.sh/ruff/preview/#using-rules-that-are-in-preview @@ -307,6 +309,12 @@ select = [ "D213", # numpy-specific-rules "NPY", + # flake8-annotations + "ANN", + # unsorted-imports + "I001", + # complex-structure + "C901", ] ignore = [ # Whitespace before ':' @@ -315,8 +323,6 @@ ignore = [ "E266", # Line too long "E501", - # Relative imports are banned - "TID252", # zip() without an explicit strict= parameter set. # python>=3.10 only "B905", @@ -351,15 +357,39 @@ ignore = [ "D413", # doc-line-too-long "W505", + # Any as annotation + "ANN401" ] # https://docs.astral.sh/ruff/settings/#lintpydocstyle pydocstyle={ convention="numpy" } -mccabe={ max-complexity=18 } +mccabe={ max-complexity=10 } +[tool.ruff.lint.isort] +classes = ["expr", "datum"] +extra-standard-library = ["typing_extensions"] +known-first-party=[ + "altair_tiles", + "sphinxext_altair", + "vega_datasets", + "vegafusion", + "vl_convert", +] +split-on-trailing-comma = false [tool.ruff.lint.flake8-tidy-imports.banned-api] # https://docs.astral.sh/ruff/settings/#lint_flake8-tidy-imports_banned-api -"typing.Optional".msg = "Use `Union[T, None]` instead.\n`typing.Optional` is likely to be confused with `altair.Optional`, which have a similar but different semantic meaning.\nSee https://github.com/vega/altair/pull/3449" +"typing.Optional".msg = """ +Use `Union[T, None]` instead. +`typing.Optional` is likely to be confused with `altair.typing.Optional`, \ +which have a similar but different semantic meaning. +See https://github.com/vega/altair/pull/3449 +""" + +[tool.ruff.lint.per-file-ignores] +# Only enforce type annotation rules on public api +"!altair/vegalite/v5/api.py" = ["ANN"] +# Allow complex if/elif branching during tests +"tests/**/*.py"= ["C901"] [tool.ruff.format] @@ -404,3 +434,4 @@ ignore_missing_imports = true extraPaths=["./tools"] pythonPlatform="All" pythonVersion="3.8" +reportUnusedExpression="none" diff --git a/sphinxext/altairgallery.py b/sphinxext/altairgallery.py index 4ce2ab629..062807d61 100644 --- a/sphinxext/altairgallery.py +++ b/sphinxext/altairgallery.py @@ -1,33 +1,32 @@ from __future__ import annotations +import collections import hashlib import json -from pathlib import Path import random -import collections -from operator import itemgetter -import warnings import shutil -from typing import Any, TYPE_CHECKING +import warnings +from operator import itemgetter +from pathlib import Path +from typing import TYPE_CHECKING, Any import jinja2 - from docutils import nodes -from docutils.statemachine import ViewList from docutils.parsers.rst import Directive from docutils.parsers.rst.directives import flag - +from docutils.statemachine import ViewList from sphinx.util.nodes import nested_parse_with_titles +from altair.utils.execeval import eval_block +from tests.examples_arguments_syntax import iter_examples_arguments_syntax +from tests.examples_methods_syntax import iter_examples_methods_syntax + from .utils import ( + create_generic_image, + create_thumbnail, get_docstring_and_rest, prev_this_next, - create_thumbnail, - create_generic_image, ) -from altair.utils.execeval import eval_block -from tests.examples_arguments_syntax import iter_examples_arguments_syntax -from tests.examples_methods_syntax import iter_examples_methods_syntax if TYPE_CHECKING: from docutils.nodes import Node diff --git a/sphinxext/schematable.py b/sphinxext/schematable.py index 04d69ef6f..f27622fb8 100644 --- a/sphinxext/schematable.py +++ b/sphinxext/schematable.py @@ -1,15 +1,17 @@ from __future__ import annotations + import importlib import re -from typing import Any, Iterator, Sequence import warnings -from docutils import nodes, utils, frontend +from typing import Any, Iterator, Sequence + +from docutils import frontend, nodes, utils from docutils.parsers.rst import Directive from docutils.parsers.rst.directives import flag from myst_parser.docutils_ import Parser from sphinx import addnodes -from tools.schemapi.utils import fix_docstring_issues, SchemaInfo +from tools.schemapi.utils import SchemaInfo, fix_docstring_issues def type_description(schema: dict[str, Any]) -> str: diff --git a/sphinxext/utils.py b/sphinxext/utils.py index 5851fe5e6..a27e4d340 100644 --- a/sphinxext/utils.py +++ b/sphinxext/utils.py @@ -4,8 +4,8 @@ import hashlib import itertools import json -from pathlib import Path import re +from pathlib import Path from typing import Any @@ -38,8 +38,8 @@ def create_generic_image( filename: Path, shape: tuple[float, float] = (200, 300), gradient: bool = True ) -> None: """Create a generic image.""" - from PIL import Image import numpy as np + from PIL import Image assert len(shape) == 2 diff --git a/tests/examples_arguments_syntax/bump_chart.py b/tests/examples_arguments_syntax/bump_chart.py index 1586b956e..0d1fb99b3 100644 --- a/tests/examples_arguments_syntax/bump_chart.py +++ b/tests/examples_arguments_syntax/bump_chart.py @@ -12,7 +12,7 @@ import pandas as pd stocks = data.stocks() -source = stocks.groupby([pd.Grouper(key="date", freq="6M"),"symbol"]).mean().reset_index() +source = stocks.groupby([pd.Grouper(key="date", freq="6MS"),"symbol"]).mean().reset_index() alt.Chart(source).mark_line(point = True).encode( x = alt.X("date:O", timeUnit="yearmonth", title="date"), diff --git a/tests/examples_methods_syntax/bump_chart.py b/tests/examples_methods_syntax/bump_chart.py index 03f88c8ac..8178668d8 100644 --- a/tests/examples_methods_syntax/bump_chart.py +++ b/tests/examples_methods_syntax/bump_chart.py @@ -12,7 +12,7 @@ import pandas as pd stocks = data.stocks() -source = stocks.groupby([pd.Grouper(key="date", freq="6M"),"symbol"]).mean().reset_index() +source = stocks.groupby([pd.Grouper(key="date", freq="6MS"),"symbol"]).mean().reset_index() alt.Chart(source).mark_line(point=True).encode( x=alt.X("date:O").timeUnit("yearmonth").title("date"), diff --git a/tests/expr/test_expr.py b/tests/expr/test_expr.py index bd3b0d9f9..8842e0ced 100644 --- a/tests/expr/test_expr.py +++ b/tests/expr/test_expr.py @@ -8,7 +8,7 @@ import pytest from jsonschema.exceptions import ValidationError -from altair import ExprRef, datum, expr +from altair import datum, expr, ExprRef from altair.expr import _ConstExpressionType # This maps vega expression function names to the Python name diff --git a/tests/test_examples.py b/tests/test_examples.py index be46a885c..02dd1e23c 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -21,15 +21,15 @@ import io import pkgutil +import re import sys from typing import Any, Iterable, Iterator -import re + import pytest import altair as alt from altair.utils.execeval import eval_block -from tests import examples_arguments_syntax -from tests import examples_methods_syntax +from tests import examples_arguments_syntax, examples_methods_syntax try: import vl_convert as vlc # noqa: F401, RUF100 @@ -82,7 +82,6 @@ def id_func(val) -> str: @pytest.mark.filterwarnings( - "ignore:'M' is deprecated.*:FutureWarning", "ignore:DataFrameGroupBy.apply.*:DeprecationWarning", ) @pytest.mark.parametrize(("source", "filename"), distributed_examples, ids=id_func) @@ -102,7 +101,6 @@ def test_render_examples_to_chart(source, filename) -> None: @pytest.mark.filterwarnings( - "ignore:'M' is deprecated.*:FutureWarning", "ignore:DataFrameGroupBy.apply.*:DeprecationWarning", ) @pytest.mark.parametrize(("source", "filename"), distributed_examples, ids=id_func) @@ -134,7 +132,6 @@ def test_from_and_to_json_roundtrip(source, filename) -> None: @pytest.mark.filterwarnings( - "ignore:'M' is deprecated.*:FutureWarning", "ignore:DataFrameGroupBy.apply.*:DeprecationWarning", ) @pytest.mark.parametrize(("source", "filename"), distributed_examples, ids=id_func) diff --git a/tests/test_jupyter_chart.py b/tests/test_jupyter_chart.py index 999b0e56d..cec67153f 100644 --- a/tests/test_jupyter_chart.py +++ b/tests/test_jupyter_chart.py @@ -1,8 +1,9 @@ -import altair as alt -from vega_datasets import data import pandas as pd import pytest +import altair as alt +from vega_datasets import data + # If anywidget is not installed, we will skip the tests in this file. try: import anywidget # noqa: F401 diff --git a/tests/test_magics.py b/tests/test_magics.py index 11b7f7d57..e1775aaf5 100644 --- a/tests/test_magics.py +++ b/tests/test_magics.py @@ -1,4 +1,5 @@ import json + import pytest try: @@ -10,7 +11,6 @@ from altair.vegalite.v5 import VegaLite - DATA_RECORDS = [ {"amount": 28, "category": "A"}, {"amount": 55, "category": "B"}, diff --git a/tests/test_toplevel.py b/tests/test_toplevel.py index 2fb583c91..186407472 100644 --- a/tests/test_toplevel.py +++ b/tests/test_toplevel.py @@ -1,5 +1,4 @@ import altair as alt - from tools import update_init_file diff --git a/tests/test_transformed_data.py b/tests/test_transformed_data.py index d44fb7153..392359264 100644 --- a/tests/test_transformed_data.py +++ b/tests/test_transformed_data.py @@ -17,7 +17,6 @@ """Use as an `xfail` condition, if running in parallel may cause the test to fail.""" @pytest.mark.filterwarnings( - "ignore:'M' is deprecated.*:FutureWarning", "ignore:DataFrameGroupBy.apply.*:DeprecationWarning" ) @pytest.mark.skipif(vf is None, reason="vegafusion not installed") @@ -29,7 +28,7 @@ ("bar_chart_faceted_compact.py", 27, ["p", "p_end"]), ("beckers_barley_facet.py", 120, ["year", "site"]), ("beckers_barley_wrapped_facet.py", 120, ["site", "median_yield"]), - ("bump_chart.py", 100, ["rank", "yearmonth_date"]), + ("bump_chart.py", 96, ["rank", "yearmonth_date"]), ("comet_chart.py", 120, ["variety", "delta"]), ("diverging_stacked_bar_chart.py", 40, ["value", "percentage_start"]), ("donut_chart.py", 6, ["value_start", "value_end"]), diff --git a/tests/utils/test_compiler.py b/tests/utils/test_compiler.py index 0840da4a8..7a989d2e9 100644 --- a/tests/utils/test_compiler.py +++ b/tests/utils/test_compiler.py @@ -1,6 +1,8 @@ import json + import pytest -from altair import vegalite_compilers, Chart + +from altair import Chart, vegalite_compilers try: import vl_convert as vlc diff --git a/tests/utils/test_core.py b/tests/utils/test_core.py index d71e4d822..2b74398b0 100644 --- a/tests/utils/test_core.py +++ b/tests/utils/test_core.py @@ -1,14 +1,14 @@ import types -from packaging.version import Version from importlib.metadata import version as importlib_version import numpy as np import pandas as pd import pytest +from packaging.version import Version +from pandas.api.types import infer_dtype import altair as alt -from altair.utils.core import parse_shorthand, update_nested, infer_encoding_types -from pandas.api.types import infer_dtype +from altair.utils.core import infer_encoding_types, parse_shorthand, update_nested json_schema_specification = alt.load_schema()["$schema"] json_schema_dict_str = f'{{"$schema": "{json_schema_specification}"}}' diff --git a/tests/utils/test_data.py b/tests/utils/test_data.py index 0bbba764b..673c2852c 100644 --- a/tests/utils/test_data.py +++ b/tests/utils/test_data.py @@ -1,17 +1,18 @@ from pathlib import Path - from typing import Any, Callable -import pytest + +import narwhals.stable.v1 as nw import pandas as pd import polars as pl -import narwhals.stable.v1 as nw +import pytest + from altair.utils.data import ( - limit_rows, MaxRowsError, + limit_rows, sample, - to_values, - to_json, to_csv, + to_json, + to_values, ) diff --git a/tests/utils/test_deprecation.py b/tests/utils/test_deprecation.py index 205652292..3970f4794 100644 --- a/tests/utils/test_deprecation.py +++ b/tests/utils/test_deprecation.py @@ -1,5 +1,7 @@ -import pytest import re + +import pytest + from altair.utils.deprecation import ( AltairDeprecationWarning, deprecated, diff --git a/tests/utils/test_plugin_registry.py b/tests/utils/test_plugin_registry.py index 632cad027..bd741d2eb 100644 --- a/tests/utils/test_plugin_registry.py +++ b/tests/utils/test_plugin_registry.py @@ -1,6 +1,7 @@ -from altair.utils.plugin_registry import PluginRegistry from typing import Callable +from altair.utils.plugin_registry import PluginRegistry + class TypedCallableRegistry(PluginRegistry[Callable[[int], int], int]): pass diff --git a/tests/utils/test_schemapi.py b/tests/utils/test_schemapi.py index 4a12a2479..a6107601a 100644 --- a/tests/utils/test_schemapi.py +++ b/tests/utils/test_schemapi.py @@ -1,28 +1,40 @@ # ruff: noqa: W291 +from __future__ import annotations + import copy -import io import inspect +import io import json -import jsonschema -import jsonschema.exceptions import pickle +import types import warnings +from collections import deque +from functools import partial +from typing import TYPE_CHECKING, Any, Callable, Iterable, Sequence +import jsonschema +import jsonschema.exceptions import numpy as np import pandas as pd +import polars as pl import pytest -from vega_datasets import data import altair as alt from altair import load_schema from altair.utils.schemapi import ( - UndefinedType, + _DEFAULT_JSON_SCHEMA_DRAFT_URL, SchemaBase, + SchemaValidationError, Undefined, + UndefinedType, _FromDict, - SchemaValidationError, - _DEFAULT_JSON_SCHEMA_DRAFT_URL, ) +from altair.vegalite.v5.schema.channels import X +from altair.vegalite.v5.schema.core import FieldOneOfPredicate, Legend +from vega_datasets import data + +if TYPE_CHECKING: + from narwhals.typing import IntoDataFrame _JSON_SCHEMA_DRAFT_URL = load_schema()["$schema"] # Make tests inherit from _TestSchema, so that when we test from_dict it won't @@ -531,9 +543,7 @@ def chart_error_example__wrong_tooltip_type_in_faceted_chart(): def chart_error_example__wrong_tooltip_type_in_layered_chart(): # Error: Wrong data type to pass to tooltip - return alt.layer( - alt.Chart().mark_point().encode(tooltip=[{"wrong"}]), - ) + return alt.layer(alt.Chart().mark_point().encode(tooltip=[{"wrong"}])) def chart_error_example__two_errors_in_layered_chart(): @@ -629,13 +639,24 @@ def chart_error_example__four_errors(): ) -@pytest.mark.parametrize( - ("chart_func", "expected_error_message"), - [ - ( - chart_error_example__invalid_y_option_value_unknown_x_option, - inspect.cleandoc( - r"""Multiple errors were found. +def id_func(val) -> str: + """ + Ensures the generated test-id name uses only `chart_func` and not `expected_error_message`. + + Without this, the name is ``test_chart_validation_errors[chart_func-expected_error_message]`` + """ + if isinstance(val, types.FunctionType): + return val.__name__ + else: + return "" + + +# NOTE: Avoids all cases appearing in a failure traceback +# At the time of writing, this is over 300 lines +chart_funcs_error_message: list[tuple[Callable[..., Any], str]] = [ + ( + chart_error_example__invalid_y_option_value_unknown_x_option, + r"""Multiple errors were found. Error 1: `X` has no parameter named 'unknown' @@ -650,27 +671,21 @@ def chart_error_example__four_errors(): Error 2: 'asdf' is an invalid value for `stack`. Valid values are: - One of \['zero', 'center', 'normalize'\] - - Of type 'null' or 'boolean'$""" - ), - ), - ( - chart_error_example__wrong_tooltip_type_in_faceted_chart, - inspect.cleandoc( - r"""'{'wrong'}' is an invalid value for `field`. Valid values are of type 'string' or 'object'.$""" - ), - ), - ( - chart_error_example__wrong_tooltip_type_in_layered_chart, - inspect.cleandoc( - r"""'{'wrong'}' is an invalid value for `field`. Valid values are of type 'string' or 'object'.$""" - ), - ), - ( - chart_error_example__two_errors_in_layered_chart, - inspect.cleandoc( - r"""Multiple errors were found. - - Error 1: '{'wrong'}' is an invalid value for `field`. Valid values are of type 'string' or 'object'. + - Of type 'null' or 'boolean'$""", + ), + ( + chart_error_example__wrong_tooltip_type_in_faceted_chart, + r"""'\['wrong'\]' is an invalid value for `field`. Valid values are of type 'string' or 'object'.$""", + ), + ( + chart_error_example__wrong_tooltip_type_in_layered_chart, + r"""'\['wrong'\]' is an invalid value for `field`. Valid values are of type 'string' or 'object'.$""", + ), + ( + chart_error_example__two_errors_in_layered_chart, + r"""Multiple errors were found. + + Error 1: '\['wrong'\]' is an invalid value for `field`. Valid values are of type 'string' or 'object'. Error 2: `Color` has no parameter named 'invalidArgument' @@ -679,25 +694,21 @@ def chart_error_example__four_errors(): aggregate condition scale title bandPosition field sort type - See the help for `Color` to read the full description of these parameters$""" - ), - ), - ( - chart_error_example__two_errors_in_complex_concat_layered_chart, - inspect.cleandoc( - r"""Multiple errors were found. + See the help for `Color` to read the full description of these parameters$""", + ), + ( + chart_error_example__two_errors_in_complex_concat_layered_chart, + r"""Multiple errors were found. - Error 1: '{'wrong'}' is an invalid value for `field`. Valid values are of type 'string' or 'object'. + Error 1: '\['wrong'\]' is an invalid value for `field`. Valid values are of type 'string' or 'object'. - Error 2: '4' is an invalid value for `bandPosition`. Valid values are of type 'number'.$""" - ), - ), - ( - chart_error_example__three_errors_in_complex_concat_layered_chart, - inspect.cleandoc( - r"""Multiple errors were found. + Error 2: '4' is an invalid value for `bandPosition`. Valid values are of type 'number'.$""", + ), + ( + chart_error_example__three_errors_in_complex_concat_layered_chart, + r"""Multiple errors were found. - Error 1: '{'wrong'}' is an invalid value for `field`. Valid values are of type 'string' or 'object'. + Error 1: '\['wrong'\]' is an invalid value for `field`. Valid values are of type 'string' or 'object'. Error 2: `Color` has no parameter named 'invalidArgument' @@ -708,13 +719,11 @@ def chart_error_example__four_errors(): See the help for `Color` to read the full description of these parameters - Error 3: '4' is an invalid value for `bandPosition`. Valid values are of type 'number'.$""" - ), - ), - ( - chart_error_example__two_errors_with_one_in_nested_layered_chart, - inspect.cleandoc( - r"""Multiple errors were found. + Error 3: '4' is an invalid value for `bandPosition`. Valid values are of type 'number'.$""", + ), + ( + chart_error_example__two_errors_with_one_in_nested_layered_chart, + r"""Multiple errors were found. Error 1: `Scale` has no parameter named 'invalidOption' @@ -734,13 +743,11 @@ def chart_error_example__four_errors(): aggregate condition scale title bandPosition field sort type - See the help for `Color` to read the full description of these parameters$""" - ), - ), - ( - chart_error_example__layer, - inspect.cleandoc( - r"""`VConcatChart` has no parameter named 'width' + See the help for `Color` to read the full description of these parameters$""", + ), + ( + chart_error_example__layer, + r"""`VConcatChart` has no parameter named 'width' Existing parameter names are: vconcat center description params title @@ -748,37 +755,29 @@ def chart_error_example__four_errors(): background data padding spacing usermeta bounds datasets - See the help for `VConcatChart` to read the full description of these parameters$""" - ), - ), - ( - chart_error_example__invalid_y_option_value, - inspect.cleandoc( - r"""'asdf' is an invalid value for `stack`. Valid values are: + See the help for `VConcatChart` to read the full description of these parameters$""", + ), + ( + chart_error_example__invalid_y_option_value, + r"""'asdf' is an invalid value for `stack`. Valid values are: - One of \['zero', 'center', 'normalize'\] - - Of type 'null' or 'boolean'$""" - ), - ), - ( - chart_error_example__invalid_y_option_value_with_condition, - inspect.cleandoc( - r"""'asdf' is an invalid value for `stack`. Valid values are: + - Of type 'null' or 'boolean'$""", + ), + ( + chart_error_example__invalid_y_option_value_with_condition, + r"""'asdf' is an invalid value for `stack`. Valid values are: - One of \['zero', 'center', 'normalize'\] - - Of type 'null' or 'boolean'$""" - ), - ), - ( - chart_error_example__hconcat, - inspect.cleandoc( - r"""'{'text': 'Horsepower', 'align': 'right'}' is an invalid value for `title`. Valid values are of type 'string', 'array', or 'null'.$""" - ), - ), - ( - chart_error_example__invalid_timeunit_value, - inspect.cleandoc( - r"""'invalid_value' is an invalid value for `timeUnit`. Valid values are: + - Of type 'null' or 'boolean'$""", + ), + ( + chart_error_example__hconcat, + r"""'{'text': 'Horsepower', 'align': 'right'}' is an invalid value for `title`. Valid values are of type 'string', 'array', or 'null'.$""", + ), + ( + chart_error_example__invalid_timeunit_value, + r"""'invalid_value' is an invalid value for `timeUnit`. Valid values are: - One of \['year', 'quarter', 'month', 'week', 'day', 'dayofyear', 'date', 'hours', 'minutes', 'seconds', 'milliseconds'\] - One of \['utcyear', 'utcquarter', 'utcmonth', 'utcweek', 'utcday', 'utcdayofyear', 'utcdate', 'utchours', 'utcminutes', 'utcseconds', 'utcmilliseconds'\] @@ -786,36 +785,28 @@ def chart_error_example__four_errors(): - One of \['utcyearquarter', 'utcyearquartermonth', 'utcyearmonth', 'utcyearmonthdate', 'utcyearmonthdatehours', 'utcyearmonthdatehoursminutes', 'utcyearmonthdatehoursminutesseconds', 'utcyearweek', 'utcyearweekday', 'utcyearweekdayhours', 'utcyearweekdayhoursminutes', 'utcyearweekdayhoursminutesseconds', 'utcyeardayofyear', 'utcquartermonth', 'utcmonthdate', 'utcmonthdatehours', 'utcmonthdatehoursminutes', 'utcmonthdatehoursminutesseconds', 'utcweekday', 'utcweekdayhours', 'utcweekdayhoursminutes', 'utcweekdayhoursminutesseconds', 'utcdayhours', 'utcdayhoursminutes', 'utcdayhoursminutesseconds', 'utchoursminutes', 'utchoursminutesseconds', 'utcminutesseconds', 'utcsecondsmilliseconds'\] - One of \['binnedyear', 'binnedyearquarter', 'binnedyearquartermonth', 'binnedyearmonth', 'binnedyearmonthdate', 'binnedyearmonthdatehours', 'binnedyearmonthdatehoursminutes', 'binnedyearmonthdatehoursminutesseconds', 'binnedyearweek', 'binnedyearweekday', 'binnedyearweekdayhours', 'binnedyearweekdayhoursminutes', 'binnedyearweekdayhoursminutesseconds', 'binnedyeardayofyear'\] - One of \['binnedutcyear', 'binnedutcyearquarter', 'binnedutcyearquartermonth', 'binnedutcyearmonth', 'binnedutcyearmonthdate', 'binnedutcyearmonthdatehours', 'binnedutcyearmonthdatehoursminutes', 'binnedutcyearmonthdatehoursminutesseconds', 'binnedutcyearweek', 'binnedutcyearweekday', 'binnedutcyearweekdayhours', 'binnedutcyearweekdayhoursminutes', 'binnedutcyearweekdayhoursminutesseconds', 'binnedutcyeardayofyear'\] - - Of type 'object'$""" - ), - ), - ( - chart_error_example__invalid_sort_value, - inspect.cleandoc( - r"""'invalid_value' is an invalid value for `sort`. Valid values are: + - Of type 'object'$""", + ), + ( + chart_error_example__invalid_sort_value, + r"""'invalid_value' is an invalid value for `sort`. Valid values are: - One of \['ascending', 'descending'\] - One of \['x', 'y', 'color', 'fill', 'stroke', 'strokeWidth', 'size', 'shape', 'fillOpacity', 'strokeOpacity', 'opacity', 'text'\] - One of \['-x', '-y', '-color', '-fill', '-stroke', '-strokeWidth', '-size', '-shape', '-fillOpacity', '-strokeOpacity', '-opacity', '-text'\] - - Of type 'array', 'object', or 'null'$""" - ), - ), - ( - chart_error_example__invalid_bandposition_value, - inspect.cleandoc( - r"""'4' is an invalid value for `bandPosition`. Valid values are of type 'number'.$""" - ), - ), - ( - chart_error_example__invalid_type, - inspect.cleandoc( - r"""'unknown' is an invalid value for `type`. Valid values are one of \['quantitative', 'ordinal', 'temporal', 'nominal', 'geojson'\].$""" - ), - ), - ( - chart_error_example__additional_datum_argument, - inspect.cleandoc( - r"""`X` has no parameter named 'wrong_argument' + - Of type 'array', 'object', or 'null'$""", + ), + ( + chart_error_example__invalid_bandposition_value, + r"""'4' is an invalid value for `bandPosition`. Valid values are of type 'number'.$""", + ), + ( + chart_error_example__invalid_type, + r"""'unknown' is an invalid value for `type`. Valid values are one of \['quantitative', 'ordinal', 'temporal', 'nominal', 'geojson'\].$""", + ), + ( + chart_error_example__additional_datum_argument, + r"""`X` has no parameter named 'wrong_argument' Existing parameter names are: shorthand bin scale timeUnit @@ -823,19 +814,15 @@ def chart_error_example__four_errors(): axis impute stack type bandPosition - See the help for `X` to read the full description of these parameters$""" - ), - ), - ( - chart_error_example__invalid_value_type, - inspect.cleandoc( - r"""'1' is an invalid value for `value`. Valid values are of type 'object', 'string', or 'null'.$""" - ), - ), - ( - chart_error_example__four_errors, - inspect.cleandoc( - r"""Multiple errors were found. + See the help for `X` to read the full description of these parameters$""", + ), + ( + chart_error_example__invalid_value_type, + r"""'1' is an invalid value for `value`. Valid values are of type 'object', 'string', or 'null'.$""", + ), + ( + chart_error_example__four_errors, + r"""Multiple errors were found. Error 1: `Color` has no parameter named 'another_unknown' @@ -863,10 +850,13 @@ def chart_error_example__four_errors(): axis impute stack type bandPosition - See the help for `X` to read the full description of these parameters$""" - ), - ), - ], + See the help for `X` to read the full description of these parameters$""", + ), +] + + +@pytest.mark.parametrize( + ("chart_func", "expected_error_message"), chart_funcs_error_message, ids=id_func ) def test_chart_validation_errors(chart_func, expected_error_message): # For some wrong chart specifications such as an unknown encoding channel, @@ -876,6 +866,7 @@ def test_chart_validation_errors(chart_func, expected_error_message): with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning) chart = chart_func() + expected_error_message = inspect.cleandoc(expected_error_message) with pytest.raises(SchemaValidationError, match=expected_error_message): chart.to_dict() @@ -884,14 +875,25 @@ def test_multiple_field_strings_in_condition(): selection = alt.selection_point() expected_error_message = "A field cannot be used for both the `if_true` and `if_false` values of a condition. One of them has to specify a `value` or `datum` definition." with pytest.raises(ValueError, match=expected_error_message): - ( + chart = ( # noqa: F841 alt.Chart(data.cars()) .mark_circle() .add_params(selection) - .encode( - color=alt.condition(selection, "Origin", "Origin"), - ) - ).to_dict() + .encode(color=alt.condition(selection, "Origin", "Origin")) + .to_dict() + ) + + +@pytest.mark.parametrize("tp", [pd.DataFrame, pl.DataFrame]) +def test_non_existent_column_name(tp: Callable[..., IntoDataFrame]) -> None: + df = tp({"a": [1, 2], "b": [4, 5]}) + msg = ( + 'Unable to determine data type for the field "c"; verify that the field name ' + "is not misspelled. If you are referencing a field from a transform, also " + "confirm that the data type is specified correctly." + ) + with pytest.raises(ValueError, match=msg): + alt.Chart(df).mark_line().encode(x="a", y="c").to_json() def test_serialize_numpy_types(): @@ -948,3 +950,71 @@ def test_to_dict_expand_mark_spec(): chart = alt.Chart().mark_bar() assert chart.to_dict()["mark"] == {"type": "bar"} assert chart.mark == "bar" + + +@pytest.mark.parametrize( + "expected", + [list("cdfabe"), [0, 3, 4, 5, 8]], +) +@pytest.mark.parametrize( + "tp", + [ + tuple, + list, + deque, + pl.Series, + pd.Series, + pd.Index, + pd.Categorical, + pd.CategoricalIndex, + np.array, + ], +) +@pytest.mark.parametrize( + "schema_param", + [ + (partial(X, "x:N"), "sort"), + (partial(FieldOneOfPredicate, "name"), "oneOf"), + (Legend, "values"), + ], +) +def test_to_dict_iterables( + tp: Callable[..., Iterable[Any]], + expected: Sequence[Any], + schema_param: tuple[Callable[..., SchemaBase], str], +) -> None: + """ + Confirm `SchemaBase` can convert common `(Sequence|Iterable)` types to `list`. + + Parameters + ---------- + tp + Constructor for test `Iterable`. + expected + Values wrapped by `tp`. + schema_param + Constructor for `SchemaBase` subclass, and target parameter name. + + Notes + ----- + `partial` can be used to reshape the `SchemaBase` constructor. + + References + ---------- + - https://github.com/vega/altair/issues/2808 + - https://github.com/vega/altair/issues/2877 + """ + tp_schema, param = schema_param + validated = tp_schema(**{param: tp(expected)}).to_dict() + actual = validated[param] + assert actual == expected + + +@pytest.mark.parametrize( + "tp", [range, np.arange, partial(pl.int_range, eager=True), pd.RangeIndex] +) +def test_to_dict_range(tp) -> None: + expected = [0, 1, 2, 3, 4] + x_dict = alt.X("x:O", sort=tp(0, 5)).to_dict() + actual = x_dict["sort"] # type: ignore + assert actual == expected diff --git a/tests/utils/test_server.py b/tests/utils/test_server.py index 6f72cff29..cf5a0f364 100644 --- a/tests/utils/test_server.py +++ b/tests/utils/test_server.py @@ -1,6 +1,6 @@ """Test http server.""" -from altair.utils.server import serve, MockServer +from altair.utils.server import MockServer, serve def test_serve(): diff --git a/tests/utils/test_to_values_narwhals.py b/tests/utils/test_to_values_narwhals.py index c636b7aea..1a96c6775 100644 --- a/tests/utils/test_to_values_narwhals.py +++ b/tests/utils/test_to_values_narwhals.py @@ -1,9 +1,10 @@ +import sys from datetime import datetime from pathlib import Path + +import narwhals.stable.v1 as nw import pandas as pd import pytest -import sys -import narwhals.stable.v1 as nw try: import pyarrow as pa diff --git a/tests/utils/test_utils.py b/tests/utils/test_utils.py index e1e5eaeb3..0e22ec7e9 100644 --- a/tests/utils/test_utils.py +++ b/tests/utils/test_utils.py @@ -10,8 +10,8 @@ from altair.utils import ( infer_vegalite_type_for_pandas, - sanitize_pandas_dataframe, sanitize_narwhals_dataframe, + sanitize_pandas_dataframe, ) try: @@ -45,7 +45,6 @@ def _check(arr, typ): _check([], "nominal") -@pytest.mark.filterwarnings("ignore:'H' is deprecated.*:FutureWarning") def test_sanitize_dataframe(): # create a dataframe with various types df = pd.DataFrame( @@ -129,7 +128,6 @@ def test_sanitize_dataframe_arrow_columns(): json.dumps(records) -@pytest.mark.filterwarnings("ignore:'H' is deprecated.*:FutureWarning") @pytest.mark.skipif(pa is None, reason="pyarrow not installed") @pytest.mark.xfail( sys.platform == "win32", reason="Timezone database is not installed on Windows" diff --git a/tests/vegalite/test_common.py b/tests/vegalite/test_common.py index 997231b74..1e3e83daa 100644 --- a/tests/vegalite/test_common.py +++ b/tests/vegalite/test_common.py @@ -1,8 +1,7 @@ """Tests of functionality that should work in all vegalite versions.""" -import pytest - import pandas as pd +import pytest from altair.vegalite import v5 diff --git a/tests/vegalite/v5/test__api_rfc.py b/tests/vegalite/v5/test__api_rfc.py index 8042fd98d..aa2fe6297 100644 --- a/tests/vegalite/v5/test__api_rfc.py +++ b/tests/vegalite/v5/test__api_rfc.py @@ -1,15 +1,14 @@ from __future__ import annotations -from typing import TYPE_CHECKING - # ruff: noqa: F401 import re +from typing import TYPE_CHECKING + import pytest import altair as alt - -from altair.vegalite.v5._api_rfc import agg, field, EncodeType -from altair.utils.core import TYPECODE_MAP, INV_TYPECODE_MAP +from altair.utils.core import INV_TYPECODE_MAP, TYPECODE_MAP +from altair.vegalite.v5._api_rfc import EncodeType, agg, field if TYPE_CHECKING: from altair.vegalite.v5.schema._typing import AggregateOp_T diff --git a/tests/vegalite/v5/test_api.py b/tests/vegalite/v5/test_api.py index ff652d51e..29d68d1ea 100644 --- a/tests/vegalite/v5/test_api.py +++ b/tests/vegalite/v5/test_api.py @@ -2,28 +2,27 @@ from __future__ import annotations - -from datetime import date import io -import ibis -import sys import json import operator import os import pathlib import re +import sys import tempfile +from datetime import date from importlib.metadata import version as importlib_version -from packaging.version import Version +import ibis import jsonschema import narwhals.stable.v1 as nw -import pytest import pandas as pd import polars as pl +import pytest +from packaging.version import Version import altair as alt -from altair.utils.schemapi import Undefined +from altair.utils.schemapi import Optional, Undefined try: import vl_convert as vlc @@ -329,11 +328,10 @@ def test_multiple_encodings(args, kwargs): assert dct["encoding"]["tooltip"] == encoding_dct -@pytest.mark.filterwarnings("ignore:'Y' is deprecated.*:FutureWarning") def test_chart_operations(): data = pd.DataFrame( { - "x": pd.date_range("2012", periods=10, freq="Y"), + "x": pd.date_range("2012", periods=10, freq="YS"), "y": range(10), "c": list("abcabcabca"), } @@ -528,6 +526,7 @@ def test_when_labels_position_based_on_condition() -> None: """ import numpy as np import pandas as pd + from altair.utils.schemapi import SchemaValidationError rand = np.random.RandomState(42) @@ -667,9 +666,9 @@ def test_when_multiple_fields(): alt.selection_point(fields=["Horsepower"]), ], ) -@pytest.mark.parametrize("empty", [alt.Undefined, True, False]) +@pytest.mark.parametrize("empty", [Undefined, True, False]) def test_when_condition_parity( - cars, channel: str, when, empty: alt.Optional[bool], then, otherwise + cars, channel: str, when, empty: Optional[bool], then, otherwise ): params = [when] if isinstance(when, alt.Parameter) else () kwds = {"x": "Cylinders:N", "y": "Origin:N"} @@ -812,19 +811,15 @@ def test_to_url(basic_chart): pytest.skip("vl_convert is not installed") share_url = basic_chart.to_url() - expected_vegalite_encoding = "N4Igxg9gdgZglgcxALlANzgUwO4tJKAFzigFcJSBnAdTgBNCALFAZgAY2AacaYsiygAlMiRoVYcAvpO50AhoTl4QUOQFtMKEPMUBaAOwA2ABwAWFi1NyTcgEb7TtuabAswc-XTZhMczLdNDAEYQGRA1OQAnAGtlQgBPAAdNZBAnSNDuTChIOhIkVBAAD2V4TAAbOi0lbgTkrSgINRI5csyQeNKsSq1bEFqklJAAR1I5IjhFYjRNaW4AEkowRkwIrTFCRMpkAHodmYQ5ADoEScZSWyO4CB2llYj9zEPdcsnMfYBWI6CATiO2I4AK0o0H62gUckomEIlGUOjkBhM5ks1mMdgcThcbg8Xh8fgCwRQAG1QEpUgBBMF9ZAAJmMMlJWgAQlSUB8PgyQGSQABhVnIcyc7kAEX5PyCQq0AFF+cYJZxGakAGL8j4sSWpADi-N+GpAgll+j1AElVTTJABdaRAA" - assert ( - share_url - == f"https://vega.github.io/editor/#/url/vega-lite/{expected_vegalite_encoding}" - ) + assert share_url.startswith("https://vega.github.io/editor/#/url/vega-lite/") # Check fullscreen fullscreen_share_url = basic_chart.to_url(fullscreen=True) - assert ( - fullscreen_share_url - == f"https://vega.github.io/editor/#/url/vega-lite/{expected_vegalite_encoding}/view" + assert fullscreen_share_url.startswith( + "https://vega.github.io/editor/#/url/vega-lite/" ) + assert fullscreen_share_url.endswith("/view") def test_facet_basic(): @@ -1404,39 +1399,20 @@ def test_layer_errors(): simple_chart = alt.Chart("data.txt").mark_point() - with pytest.raises(ValueError) as err: # noqa: PT011 + with pytest.raises(TypeError, match=r".config. attribute cannot.+LayerChart"): toplevel_chart + simple_chart - assert str(err.value).startswith( - 'Objects with "config" attribute cannot be used within LayerChart.' - ) - with pytest.raises(ValueError) as err: # noqa: PT011 + with pytest.raises(TypeError, match=r"Concat.+cannot.+layered.+before concat"): alt.hconcat(simple_chart) + simple_chart - assert ( - str(err.value) - == "Concatenated charts cannot be layered. Instead, layer the charts before concatenating." - ) - with pytest.raises(ValueError) as err: # noqa: PT011 + with pytest.raises(TypeError, match=r"Repeat.+cannot.+layered.+before repeat"): repeat_chart + simple_chart - assert ( - str(err.value) - == "Repeat charts cannot be layered. Instead, layer the charts before repeating." - ) - with pytest.raises(ValueError) as err: # noqa: PT011 + with pytest.raises(TypeError, match=r"Facet.+.+cannot.+layered.+before facet"): facet_chart1 + simple_chart - assert ( - str(err.value) - == "Faceted charts cannot be layered. Instead, layer the charts before faceting." - ) - with pytest.raises(ValueError) as err: # noqa: PT011 + with pytest.raises(TypeError, match=r"Facet.+.+cannot.+layered.+before facet"): alt.layer(simple_chart) + facet_chart2 - assert ( - str(err.value) - == "Faceted charts cannot be layered. Instead, layer the charts before faceting." - ) @pytest.mark.parametrize( diff --git a/tests/vegalite/v5/test_geo_interface.py b/tests/vegalite/v5/test_geo_interface.py index bc3dc6444..6a689a5d5 100644 --- a/tests/vegalite/v5/test_geo_interface.py +++ b/tests/vegalite/v5/test_geo_interface.py @@ -1,4 +1,5 @@ import pytest + import altair.vegalite.v5 as alt diff --git a/tests/vegalite/v5/test_params.py b/tests/vegalite/v5/test_params.py index c741adcf4..a5ea3c001 100644 --- a/tests/vegalite/v5/test_params.py +++ b/tests/vegalite/v5/test_params.py @@ -1,11 +1,10 @@ """Tests for variable parameters and selection parameters.""" -import pandas as pd - +import re import warnings -import pytest -import re +import pandas as pd +import pytest import altair.vegalite.v5 as alt from altair.utils.deprecation import AltairDeprecationWarning diff --git a/tests/vegalite/v5/test_renderers.py b/tests/vegalite/v5/test_renderers.py index 0b02ef214..c0c1333a7 100644 --- a/tests/vegalite/v5/test_renderers.py +++ b/tests/vegalite/v5/test_renderers.py @@ -6,7 +6,6 @@ import altair.vegalite.v5 as alt - try: import vl_convert as vlc except ImportError: diff --git a/tools/__init__.py b/tools/__init__.py index 09251b2a1..052b8e9c0 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -1,4 +1,4 @@ -from tools import schemapi, generate_api_docs, generate_schema_wrapper, update_init_file +from tools import generate_api_docs, generate_schema_wrapper, schemapi, update_init_file __all__ = [ "generate_api_docs", diff --git a/tools/generate_api_docs.py b/tools/generate_api_docs.py index 54facf960..d3771d6b7 100644 --- a/tools/generate_api_docs.py +++ b/tools/generate_api_docs.py @@ -1,10 +1,11 @@ """Fills the contents of doc/user_guide/api.rst based on the updated Altair schema.""" from __future__ import annotations -from pathlib import Path + import types -from typing import Final, Iterator +from pathlib import Path from types import ModuleType +from typing import Final, Iterator import altair as alt @@ -71,6 +72,17 @@ :nosignatures: {api_classes} + +Typing +------ +.. currentmodule:: altair.typing + +.. autosummary:: + :toctree: generated/typing/ + :nosignatures: + + {typing_objects} + """ @@ -108,7 +120,8 @@ def api_functions() -> list[str]: altair_api_functions = [ obj_name for obj_name in iter_objects(alt.api, restrict_to_type=types.FunctionType) # type: ignore[attr-defined] - if obj_name not in {"cast", "overload", "NamedTuple", "TypedDict"} + if obj_name + not in {"cast", "overload", "NamedTuple", "TypedDict", "is_chart_type"} ] return sorted(altair_api_functions) @@ -118,6 +131,10 @@ def api_classes() -> list[str]: return ["expr", "When", "Then", "ChainedWhen"] +def type_hints() -> list[str]: + return [s for s in sorted(iter_objects(alt.typing)) if s != "annotations"] + + def lowlevel_wrappers() -> list[str]: objects = sorted(iter_objects(alt.schema.core, restrict_to_subclass=alt.SchemaBase)) # type: ignore[attr-defined] # The names of these two classes are also used for classes in alt.channels. Due to @@ -139,6 +156,7 @@ def write_api_file() -> None: encoding_wrappers=sep.join(encoding_wrappers()), lowlevel_wrappers=sep.join(lowlevel_wrappers()), api_classes=sep.join(api_classes()), + typing_objects=sep.join(type_hints()), ), encoding="utf-8", ) diff --git a/tools/generate_schema_wrapper.py b/tools/generate_schema_wrapper.py index e9de9117f..20730ab22 100644 --- a/tools/generate_schema_wrapper.py +++ b/tools/generate_schema_wrapper.py @@ -1,32 +1,36 @@ """Generate a schema wrapper from a schema.""" from __future__ import annotations + import argparse import copy import json -from pathlib import Path import re import sys import textwrap from dataclasses import dataclass -from typing import Final, Iterable, Literal, Iterator from itertools import chain +from pathlib import Path +from typing import Final, Iterable, Iterator, Literal from urllib import request +import vl_convert as vlc + sys.path.insert(0, str(Path.cwd())) -from tools.schemapi import codegen, CodeSnippet, SchemaInfo +from tools.schemapi import CodeSnippet, SchemaInfo, codegen from tools.schemapi.utils import ( + TypeAliasTracer, get_valid_identifier, + indent_docstring, resolve_references, - ruff_format_py, + rst_parse, rst_syntax_for_class, - indent_docstring, + ruff_format_py, ruff_write_lint_format_str, - rst_parse, + spell_literal, ) - -SCHEMA_VERSION: Final = "v5.19.0" +SCHEMA_VERSION: Final = "v5.20.1" reLink = re.compile(r"(?<=\[)([^\]]+)(?=\]\([^\)]+\))", re.MULTILINE) reSpecial = re.compile(r"[*_]{2,3}|`", re.MULTILINE) @@ -100,7 +104,8 @@ def to_dict( if shorthand is Undefined: parsed = {} elif isinstance(shorthand, (str, dict)): - parsed = parse_shorthand(shorthand, data=context.get("data", None)) + data: nw.DataFrame | Any = context.get("data", None) + parsed = parse_shorthand(shorthand, data=data) type_required = "type" in self._kwds # type: ignore[attr-defined] type_in_shorthand = "type" in parsed type_defined_explicitly = self._get("type") is not Undefined # type: ignore[attr-defined] @@ -109,7 +114,7 @@ def to_dict( # We still parse it out of the shorthand, but drop it here. parsed.pop("type", None) elif not (type_in_shorthand or type_defined_explicitly): - if _is_pandas_dataframe(context.get("data", None)): + if isinstance(data, nw.DataFrame): msg = ( f'Unable to determine data type for the field "{shorthand}";' " verify that the field name is not misspelled." @@ -209,7 +214,7 @@ def configure_{prop}(self, *args, **kwargs) -> Self: ENCODE_METHOD: Final = ''' class _EncodingMixin: - def encode({encode_method_args}) -> Self: + def encode({method_args}) -> Self: """Map properties of the data to visual properties of the chart (see :class:`FacetedEncoding`) {docstring}""" # Compat prep for `infer_encoding_types` signature @@ -233,6 +238,14 @@ def encode({encode_method_args}) -> Self: return copy ''' +ENCODE_TYPED_DICT: Final = ''' +class EncodeKwds(TypedDict, total=False): + """Encoding channels map properties of the data to visual properties of the chart. + {docstring}""" + {channels} + +''' + # NOTE: Not yet reasonable to generalize `TypeAliasType`, `TypeVar` # Revisit if this starts to become more common TYPING_EXTRA: Final = ''' @@ -359,6 +372,15 @@ def download_schemafile( return fp +def update_vega_themes(fp: Path, /, indent: str | int | None = 2) -> None: + themes = vlc.get_themes() + data = json.dumps(themes, indent=indent, sort_keys=True) + fp.write_text(data, encoding="utf8") + + theme_names = sorted(iter(themes)) + TypeAliasTracer.update_aliases(("VegaThemes", spell_literal(theme_names))) + + def load_schema_with_shorthand_properties(schemapath: Path) -> dict: with schemapath.open(encoding="utf8") as f: schema = json.load(f) @@ -529,7 +551,7 @@ def generate_vegalite_schema_wrapper(schema_file: Path) -> str: "from altair.utils.schemapi import SchemaBase, Undefined, UndefinedType, _subclasses # noqa: F401\n", _type_checking_only_imports( "from altair import Parameter", - "from altair.utils.schemapi import Optional", + "from altair.typing import Optional", "from ._typing import * # noqa: F403", ), "\n" f"__all__ = {all_}\n", @@ -562,18 +584,29 @@ def _type_checking_only_imports(*imports: str) -> str: class ChannelInfo: supports_arrays: bool deep_description: str - field_class_name: str | None = None + field_class_name: str datum_class_name: str | None = None value_class_name: str | None = None + @property + def is_field_only(self) -> bool: + return not (self.datum_class_name or self.value_class_name) + @property def all_names(self) -> Iterator[str]: - if self.field_class_name: - yield self.field_class_name - if self.datum_class_name: - yield self.datum_class_name - if self.value_class_name: - yield self.value_class_name + """All channels are expected to have a field class.""" + yield self.field_class_name + yield from self.non_field_names + + @property + def non_field_names(self) -> Iterator[str]: + if self.is_field_only: + yield from () + else: + if self.datum_class_name: + yield self.datum_class_name + if self.value_class_name: + yield self.value_class_name def generate_vegalite_channel_wrappers( @@ -595,50 +628,37 @@ def generate_vegalite_channel_wrappers( supports_arrays = any( schema_info.is_array() for schema_info in propschema.anyOf ) + classname: str = prop[0].upper() + prop[1:] channel_info = ChannelInfo( supports_arrays=supports_arrays, deep_description=propschema.deep_description, + field_class_name=classname, ) for encoding_spec, definition in def_dict.items(): - classname = prop[0].upper() + prop[1:] basename = definition.rsplit("/", maxsplit=1)[-1] basename = get_valid_identifier(basename) + gen: SchemaGenerator defschema = {"$ref": definition} - - Generator: ( - type[FieldSchemaGenerator] - | type[DatumSchemaGenerator] - | type[ValueSchemaGenerator] - ) + kwds = { + "basename": basename, + "schema": defschema, + "rootschema": schema, + "encodingname": prop, + "haspropsetters": True, + } if encoding_spec == "field": - Generator = FieldSchemaGenerator - nodefault = [] - channel_info.field_class_name = classname - + gen = FieldSchemaGenerator(classname, nodefault=[], **kwds) elif encoding_spec == "datum": - Generator = DatumSchemaGenerator - classname += "Datum" - nodefault = ["datum"] - channel_info.datum_class_name = classname - + temp_name = f"{classname}Datum" + channel_info.datum_class_name = temp_name + gen = DatumSchemaGenerator(temp_name, nodefault=["datum"], **kwds) elif encoding_spec == "value": - Generator = ValueSchemaGenerator - classname += "Value" - nodefault = ["value"] - channel_info.value_class_name = classname - - gen = Generator( - classname=classname, - basename=basename, - schema=defschema, - rootschema=schema, - encodingname=prop, - nodefault=nodefault, - haspropsetters=True, - altair_classes_prefix="core", - ) + temp_name = f"{classname}Value" + channel_info.value_class_name = temp_name + gen = ValueSchemaGenerator(temp_name, nodefault=["value"], **kwds) + class_defs.append(gen.schema_class()) channel_infos[prop] = channel_info @@ -656,12 +676,14 @@ def generate_vegalite_channel_wrappers( imports = imports or [ "from __future__ import annotations\n", - "from typing import Any, overload, Sequence, List, Literal, Union, TYPE_CHECKING", - "from narwhals.dependencies import is_pandas_dataframe as _is_pandas_dataframe", + "from typing import Any, overload, Sequence, List, Literal, Union, TYPE_CHECKING, TypedDict", + "from typing_extensions import TypeAlias", + "import narwhals.stable.v1 as nw", "from altair.utils.schemapi import Undefined, with_property_setters", "from altair.utils import infer_encoding_types as _infer_encoding_types", "from altair.utils import parse_shorthand", "from . import core", + "from ._typing import * # noqa: F403", ] contents = [ HEADER, @@ -669,18 +691,14 @@ def generate_vegalite_channel_wrappers( *imports, _type_checking_only_imports( "from altair import Parameter, SchemaBase", - "from altair.utils.schemapi import Optional", - "from ._typing import * # noqa: F403", + "from altair.typing import Optional", "from typing_extensions import Self", ), "\n" f"__all__ = {sorted(all_)}\n", CHANNEL_MIXINS, *class_defs, + *generate_encoding_artifacts(channel_infos, ENCODE_METHOD, ENCODE_TYPED_DICT), ] - - # Generate the type signature for the encode method - encode_signature = _create_encode_signature(channel_infos) - contents.append(encode_signature) return "\n".join(contents) @@ -786,6 +804,10 @@ def vegalite_main(skip_download: bool = False) -> None: skip_download=skip_download, ) + fp_themes = schemapath / "vega-themes.json" + print(f"Updating themes\n {schemafile!s}\n ->{fp_themes!s}") + update_vega_themes(fp_themes) + # Generate __init__.py file outfile = schemapath / "__init__.py" print(f"Writing {outfile!s}") @@ -832,7 +854,7 @@ def vegalite_main(skip_download: bool = False) -> None: "\n\n", _type_checking_only_imports( "from altair import Parameter, SchemaBase", - "from altair.utils.schemapi import Optional", + "from altair.typing import Optional", "from ._typing import * # noqa: F403", ), "\n\n\n", @@ -843,8 +865,6 @@ def vegalite_main(skip_download: bool = False) -> None: files[fp_mixins] = content_mixins # Write `_typing.py` TypeAlias, for import in generated modules - from tools.schemapi.utils import TypeAliasTracer - fp_typing = schemapath / "_typing.py" msg = ( f"Generating\n {schemafile!s}\n ->{fp_typing!s}\n" @@ -861,59 +881,68 @@ def vegalite_main(skip_download: bool = False) -> None: ruff_write_lint_format_str(fp, contents) -def _create_encode_signature( - channel_infos: dict[str, ChannelInfo], -) -> str: +def generate_encoding_artifacts( + channel_infos: dict[str, ChannelInfo], fmt_method: str, fmt_typed_dict: str +) -> Iterator[str]: + """ + Generate ``Chart.encode()`` and related typing structures. + + - `TypeAlias`(s) for each parameter to ``Chart.encode()`` + - Mixin class that provides the ``Chart.encode()`` method + - `TypedDict`, utilising/describing these structures as part of https://github.com/pola-rs/polars/pull/17995. + + Notes + ----- + - `Map`/`Dict` stands for the return types of `alt.(datum|value)`, and any encoding channel class. + - See discussions in https://github.com/vega/altair/pull/3208 + - We could be more specific about what types are accepted in the `List` + - but this translates poorly to an IDE + - `info.supports_arrays` + """ signature_args: list[str] = ["self", "*args: Any"] - docstring_parameters: list[str] = ["", "Parameters", "----------"] + type_aliases: list[str] = [] + typed_dict_args: list[str] = [] + signature_doc_params: list[str] = ["", "Parameters", "----------"] + typed_dict_doc_params: list[str] = ["", "Parameters", "----------"] + for channel, info in channel_infos.items(): - field_class_name = info.field_class_name - assert ( - field_class_name is not None - ), "All channels are expected to have a field class" - datum_and_value_class_names = [] - if info.datum_class_name is not None: - datum_and_value_class_names.append(info.datum_class_name) - - if info.value_class_name is not None: - datum_and_value_class_names.append(info.value_class_name) - - # dict stands for the return types of alt.datum, alt.value as well as - # the dictionary representation of an encoding channel class. See - # discussions in https://github.com/vega/altair/pull/3208 - # for more background. - union_types = ["str", field_class_name, "Map"] - docstring_union_types = ["str", rst_syntax_for_class(field_class_name), "Dict"] + alias_name: str = f"Channel{channel[0].upper()}{channel[1:]}" + + it: Iterator[str] = info.all_names + it_rst_names: Iterator[str] = (rst_syntax_for_class(c) for c in info.all_names) + + docstring_types: list[str] = ["str", next(it_rst_names), "Dict"] + tp_inner: str = ", ".join(chain(("str", next(it), "Map"), it)) + tp_inner = f"Union[{tp_inner}]" + if info.supports_arrays: - # We could be more specific about what types are accepted in the list - # but then the signatures would get rather long and less useful - # to a user when it shows up in their IDE. - union_types.append("list") - docstring_union_types.append("List") - - union_types = union_types + datum_and_value_class_names - docstring_union_types = docstring_union_types + [ - rst_syntax_for_class(c) for c in datum_and_value_class_names - ] + docstring_types.append("List") + tp_inner = f"OneOrSeq[{tp_inner}]" - signature_args.append( - f"{channel}: Optional[Union[{', '.join(union_types)}]] = Undefined" - ) + doc_types_flat: str = ", ".join(chain(docstring_types, it_rst_names)) - docstring_parameters.extend( - ( - f"{channel} : {', '.join(docstring_union_types)}", - f" {process_description(info.deep_description)}", - ) - ) - if len(docstring_parameters) > 1: - docstring_parameters += [""] - docstring = indent_docstring( - docstring_parameters, indent_level=8, width=100, lstrip=False + type_aliases.append(f"{alias_name}: TypeAlias = {tp_inner}") + # We use the full type hints instead of the alias in the signatures below + # as IDEs such as VS Code would else show the name of the alias instead + # of the expanded full type hints. The later are more useful to users. + typed_dict_args.append(f"{channel}: {tp_inner}") + signature_args.append(f"{channel}: Optional[{tp_inner}] = Undefined") + + description: str = f" {process_description(info.deep_description)}" + + signature_doc_params.extend((f"{channel} : {doc_types_flat}", description)) + typed_dict_doc_params.extend((f"{channel}", description)) + + method: str = fmt_method.format( + method_args=", ".join(signature_args), + docstring=indent_docstring(signature_doc_params, indent_level=8, lstrip=False), ) - return ENCODE_METHOD.format( - encode_method_args=", ".join(signature_args), docstring=docstring + typed_dict: str = fmt_typed_dict.format( + channels="\n ".join(typed_dict_args), + docstring=indent_docstring(typed_dict_doc_params, indent_level=4, lstrip=False), ) + artifacts: Iterable[str] = *type_aliases, method, typed_dict + yield from artifacts def main() -> None: diff --git a/tools/schemapi/__init__.py b/tools/schemapi/__init__.py index 394d5f9b6..023a9a2af 100644 --- a/tools/schemapi/__init__.py +++ b/tools/schemapi/__init__.py @@ -1,8 +1,8 @@ """schemapi: tools for generating Python APIs from JSON schemas.""" -from tools.schemapi.schemapi import SchemaBase, Undefined -from tools.schemapi.utils import SchemaInfo from tools.schemapi import codegen, utils from tools.schemapi.codegen import CodeSnippet +from tools.schemapi.schemapi import SchemaBase, Undefined +from tools.schemapi.utils import SchemaInfo __all__ = ["CodeSnippet", "SchemaBase", "SchemaInfo", "Undefined", "codegen", "utils"] diff --git a/tools/schemapi/codegen.py b/tools/schemapi/codegen.py index a0e632f56..0533964b4 100644 --- a/tools/schemapi/codegen.py +++ b/tools/schemapi/codegen.py @@ -1,17 +1,18 @@ """Code generation utilities.""" from __future__ import annotations + import re import textwrap -from typing import Final from dataclasses import dataclass +from typing import Final from .utils import ( SchemaInfo, - is_valid_identifier, + flatten, indent_docstring, + is_valid_identifier, jsonschema_to_python_types, - flatten, ) @@ -104,7 +105,6 @@ class SchemaGenerator: rootschemarepr : CodeSnippet or object, optional An object whose repr will be used in the place of the explicit root schema. - altair_classes_prefix : string, optional **kwargs : dict Additional keywords for derived classes. """ @@ -140,7 +140,6 @@ def __init__( rootschemarepr: object | None = None, nodefault: list[str] | None = None, haspropsetters: bool = False, - altair_classes_prefix: str | None = None, **kwargs, ) -> None: self.classname = classname @@ -152,7 +151,6 @@ def __init__( self.nodefault = nodefault or () self.haspropsetters = haspropsetters self.kwargs = kwargs - self.altair_classes_prefix = altair_classes_prefix def subclasses(self) -> list[str]: """Return a list of subclass names, if any.""" @@ -225,16 +223,9 @@ def docstring(self, indent: int = 0) -> str: ): propinfo = info.properties[prop] doc += [ - "{} : {}".format( - prop, - propinfo.get_python_type_representation( - altair_classes_prefix=self.altair_classes_prefix, - ), - ), + f"{prop} : {propinfo.get_python_type_representation()}", f" {self._process_description(propinfo.deep_description)}", ] - if len(doc) > 1: - doc += [""] return indent_docstring(doc, indent_level=indent, width=100, lstrip=True) def init_code(self, indent: int = 0) -> str: @@ -278,9 +269,7 @@ def init_args( [ *additional_types, *info.properties[p].get_python_type_representation( - for_type_hints=True, - altair_classes_prefix=self.altair_classes_prefix, - return_as_str=False, + for_type_hints=True, return_as_str=False ), ] ) @@ -314,9 +303,7 @@ def get_args(self, si: SchemaInfo) -> list[str]: [ f"{p}: " + info.get_python_type_representation( - for_type_hints=True, - altair_classes_prefix=self.altair_classes_prefix, - additional_type_hints=["UndefinedType"], + for_type_hints=True, additional_type_hints=["UndefinedType"] ) + " = Undefined" for p, info in prop_infos.items() diff --git a/tools/schemapi/schemapi.py b/tools/schemapi/schemapi.py index 43ecdc73f..1c756c2a2 100644 --- a/tools/schemapi/schemapi.py +++ b/tools/schemapi/schemapi.py @@ -4,31 +4,33 @@ import copy import inspect import json +import sys import textwrap -from math import ceil from collections import defaultdict +from functools import partial from importlib.metadata import version as importlib_version from itertools import chain, zip_longest -import sys +from math import ceil from typing import ( TYPE_CHECKING, Any, + Dict, Final, Iterable, Iterator, + List, Literal, Sequence, TypeVar, Union, overload, - List, - Dict, ) from typing_extensions import TypeAlias -from functools import partial + import jsonschema import jsonschema.exceptions import jsonschema.validators +import narwhals.stable.v1 as nw from packaging.version import Version # This leads to circular imports with the vegalite module. Currently, this works @@ -37,10 +39,11 @@ from altair import vegalite if TYPE_CHECKING: + from typing import ClassVar + from referencing import Registry - from altair import ChartType - from typing import ClassVar + from altair.typing import ChartType if sys.version_info >= (3, 13): from typing import TypeIs @@ -48,9 +51,9 @@ from typing_extensions import TypeIs if sys.version_info >= (3, 11): - from typing import Self, Never + from typing import Never, Self else: - from typing_extensions import Self, Never + from typing_extensions import Never, Self ValidationErrorList: TypeAlias = List[jsonschema.exceptions.ValidationError] GroupedValidationErrors: TypeAlias = Dict[str, ValidationErrorList] @@ -484,7 +487,15 @@ def _subclasses(cls: type[Any]) -> Iterator[type[Any]]: yield cls -def _todict(obj: Any, context: dict[str, Any] | None, np_opt: Any, pd_opt: Any) -> Any: +def _from_array_like(obj: Iterable[Any], /) -> list[Any]: + try: + ser = nw.from_native(obj, strict=True, series_only=True) + return ser.to_list() + except TypeError: + return list(obj) + + +def _todict(obj: Any, context: dict[str, Any] | None, np_opt: Any, pd_opt: Any) -> Any: # noqa: C901 """Convert an object to a dict representation.""" if np_opt is not None: np = np_opt @@ -508,10 +519,16 @@ def _todict(obj: Any, context: dict[str, Any] | None, np_opt: Any, pd_opt: Any) for k, v in obj.items() if v is not Undefined } - elif hasattr(obj, "to_dict"): + elif ( + hasattr(obj, "to_dict") + and (module_name := obj.__module__) + and module_name.startswith("altair") + ): return obj.to_dict() elif pd_opt is not None and isinstance(obj, pd_opt.Timestamp): return pd_opt.Timestamp(obj).isoformat() + elif _is_iterable(obj, exclude=(str, bytes)): + return _todict(_from_array_like(obj), context, np_opt, pd_opt) else: return obj @@ -742,10 +759,12 @@ def _get_default_error_message( # Add unformatted messages of any remaining errors which were not # considered so far. This is not expected to be used but more exists # as a fallback for cases which were not known during development. - for validator, errors in errors_by_validator.items(): - if validator not in {"enum", "type"}: - message += "\n".join([e.message for e in errors]) - + it = ( + "\n".join(e.message for e in errors) + for validator, errors in errors_by_validator.items() + if validator not in {"enum", "type"} + ) + message += "".join(it) return message @@ -773,7 +792,7 @@ def __repr__(self) -> str: The parameters ``short``, ``long`` accept the same range of types:: # ruff: noqa: UP006, UP007 - from altair import Optional + from altair.typing import Optional def func_1( short: Optional[str | bool | float | dict[str, Any] | SchemaBase] = Undefined, @@ -782,10 +801,12 @@ def func_1( ] = Undefined, ): ... -This is distinct from `typing.Optional `__ as ``altair.Optional`` treats ``None`` like any other type:: +This is distinct from `typing.Optional `__. + +``altair.typing.Optional`` treats ``None`` like any other type:: # ruff: noqa: UP006, UP007 - from altair import Optional + from altair.typing import Optional def func_2( short: Optional[str | float | dict[str, Any] | None | SchemaBase] = Undefined, @@ -847,7 +868,7 @@ def __init__(self, *args: Any, **kwds: Any) -> None: if DEBUG_MODE and self._class_is_valid_at_instantiation: self.to_dict(validate=True) - def copy( + def copy( # noqa: C901 self, deep: bool | Iterable[Any] = True, ignore: list[str] | None = None ) -> Self: """ @@ -1226,6 +1247,12 @@ def _is_list(obj: Any | list[Any]) -> TypeIs[list[Any]]: return isinstance(obj, list) +def _is_iterable( + obj: Any, *, exclude: type | tuple[type, ...] = (str, bytes) +) -> TypeIs[Iterable[Any]]: + return not isinstance(obj, exclude) and isinstance(obj, Iterable) + + def _passthrough(*args: Any, **kwds: Any) -> Any | dict[str, Any]: return args[0] if args else kwds diff --git a/tools/schemapi/utils.py b/tools/schemapi/utils.py index eead00038..21617538d 100644 --- a/tools/schemapi/utils.py +++ b/tools/schemapi/utils.py @@ -1,15 +1,25 @@ """Utilities for working with schemas.""" from __future__ import annotations -from itertools import chain + import keyword import re import subprocess import textwrap import urllib -from typing import Any, Final, Iterable, TYPE_CHECKING, Iterator, Sequence -from operator import itemgetter from html import unescape +from itertools import chain +from operator import itemgetter +from typing import ( + TYPE_CHECKING, + Any, + Final, + Iterable, + Iterator, + Literal, + Sequence, + overload, +) import mistune from mistune.renderers.rst import RSTRenderer as _RSTRenderer @@ -17,9 +27,10 @@ from tools.schemapi.schemapi import _resolve_references as resolve_references if TYPE_CHECKING: - from mistune import BlockState - from typing_extensions import LiteralString from pathlib import Path + from typing_extensions import LiteralString + + from mistune import BlockState EXCLUDE_KEYS: Final = ("definitions", "title", "description", "$schema", "id") @@ -361,21 +372,30 @@ def title(self) -> str: else: return "" + @overload def get_python_type_representation( + self, + for_type_hints: bool = ..., + return_as_str: Literal[True] = ..., + additional_type_hints: list[str] | None = ..., + ) -> str: ... + @overload + def get_python_type_representation( + self, + for_type_hints: bool = ..., + return_as_str: Literal[False] = ..., + additional_type_hints: list[str] | None = ..., + ) -> list[str]: ... + def get_python_type_representation( # noqa: C901 self, for_type_hints: bool = False, - altair_classes_prefix: str | None = None, return_as_str: bool = True, additional_type_hints: list[str] | None = None, ) -> str | list[str]: - # This is a list of all types which can be used for the current SchemaInfo. - # This includes Altair classes, standard Python types, etc. type_representations: list[str] = [] - TP_CHECK_ONLY = {"Parameter", "SchemaBase"} - """Most common annotations are include in `TYPE_CHECKING` block. - They do not require `core.` prefix, and this saves many lines of code. - - Eventually a more robust solution would apply to more types from `core`. + """ + All types which can be used for the current `SchemaInfo`. + Including `altair` classes, standard `python` types, etc. """ if self.title: @@ -383,7 +403,8 @@ def get_python_type_representation( # To keep type hints simple, we only use the SchemaBase class # as the type hint for all classes which inherit from it. class_names = ["SchemaBase"] - if self.title == "ExprRef": + if self.title in {"ExprRef", "ParameterExtent"}: + class_names.append("Parameter") # In these cases, a value parameter is also always accepted. # It would be quite complex to further differentiate # between a value and a selection parameter based on @@ -391,23 +412,7 @@ def get_python_type_representation( # try to check for the type of the Parameter.param attribute # but then we would need to write some overload signatures for # api.param). - class_names.append("Parameter") - if self.title == "ParameterExtent": - class_names.append("Parameter") - prefix = ( - "" if not altair_classes_prefix else altair_classes_prefix + "." - ) - # If there is no prefix, it might be that the class is defined - # in the same script and potentially after this line -> We use - # deferred type annotations using quotation marks. - if not prefix: - class_names = [f'"{n}"' for n in class_names] - else: - class_names = ( - n if n in TP_CHECK_ONLY else f"{prefix}{n}" for n in class_names - ) - # class_names = [f"{prefix}{n}" for n in class_names] type_representations.extend(class_names) else: # use RST syntax for generated sphinx docs @@ -425,26 +430,22 @@ def get_python_type_representation( tp_str = TypeAliasTracer.add_literal(self, spell_literal(it), replace=True) type_representations.append(tp_str) elif self.is_anyOf(): - type_representations.extend( - [ - s.get_python_type_representation( - for_type_hints=for_type_hints, - altair_classes_prefix=altair_classes_prefix, - return_as_str=False, - ) - for s in self.anyOf - ] + it = ( + s.get_python_type_representation( + for_type_hints=for_type_hints, return_as_str=False + ) + for s in self.anyOf ) + type_representations.extend(it) elif isinstance(self.type, list): options = [] subschema = SchemaInfo(dict(**self.schema)) for typ_ in self.type: subschema.schema["type"] = typ_ + # We always use title if possible for nested objects options.append( subschema.get_python_type_representation( - # We always use title if possible for nested objects - for_type_hints=for_type_hints, - altair_classes_prefix=altair_classes_prefix, + for_type_hints=for_type_hints ) ) type_representations.extend(options) @@ -467,14 +468,10 @@ def get_python_type_representation( # method. However, it is not entirely accurate as some sequences # such as e.g. a range are not supported by SchemaBase.to_dict but # this tradeoff seems worth it. - type_representations.append( - "Sequence[{}]".format( - self.child(self.items).get_python_type_representation( - for_type_hints=for_type_hints, - altair_classes_prefix=altair_classes_prefix, - ) - ) + s = self.child(self.items).get_python_type_representation( + for_type_hints=for_type_hints ) + type_representations.append(f"Sequence[{s}]") elif self.type in jsonschema_to_python_types: type_representations.append(jsonschema_to_python_types[self.type]) else: @@ -685,11 +682,13 @@ def __call__(self, s: str) -> str: rst_parse: RSTParse = RSTParse(RSTRenderer()) -def indent_docstring( +def indent_docstring( # noqa: C901 lines: list[str], indent_level: int, width: int = 100, lstrip=True ) -> str: """Indent a docstring for use in generated code.""" final_lines = [] + if len(lines) > 1: + lines += [""] for i, line in enumerate(lines): stripped = line.lstrip() diff --git a/tools/update_init_file.py b/tools/update_init_file.py index a1e368571..c1831093a 100644 --- a/tools/update_init_file.py +++ b/tools/update_init_file.py @@ -2,11 +2,11 @@ from __future__ import annotations -from inspect import ismodule, getattr_static -from pathlib import Path -from typing import TYPE_CHECKING import typing as t import typing_extensions as te +from inspect import getattr_static, ismodule +from pathlib import Path +from typing import TYPE_CHECKING from tools.schemapi.utils import ruff_write_lint_format_str