Skip to content

Commit e85ed82

Browse files
committed
update dependencies
1 parent 8cd7404 commit e85ed82

File tree

11 files changed

+45
-51
lines changed

11 files changed

+45
-51
lines changed

.github/workflows/test.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ jobs:
1111
runs-on: ubuntu-latest
1212
strategy:
1313
matrix:
14-
python-version: ["3.9", "3.10", "3.11"]
14+
python-version: ["3.10", "3.11", "3.12"]
1515
fail-fast: false
1616
steps:
1717
- uses: actions/checkout@v3

.readthedocs.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ version: 2
55
build:
66
os: ubuntu-22.04
77
tools:
8-
python: "3.10"
8+
python: "3.11"
99

1010
sphinx:
1111
fail_on_warning: false

external_tests/helpers.py

+11-13
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import logging
66
import os
77
import sys
8-
from typing import Any, Optional
8+
from typing import Any
99

1010
import cloudpickle
1111
import numpy as np
@@ -135,18 +135,18 @@ def pystan_noncentered_schools(data, draws, chains):
135135
schools_code = """
136136
data {
137137
int<lower=0> J;
138-
real y[J];
139-
real<lower=0> sigma[J];
138+
array[J] real y;
139+
array[J] real<lower=0> sigma;
140140
}
141141
142142
parameters {
143143
real mu;
144144
real<lower=0> tau;
145-
real eta[J];
145+
array[J] real eta;
146146
}
147147
148148
transformed parameters {
149-
real theta[J];
149+
array[J] real theta;
150150
for (j in 1:J)
151151
theta[j] = mu + tau * eta[j];
152152
}
@@ -159,8 +159,8 @@ def pystan_noncentered_schools(data, draws, chains):
159159
}
160160
161161
generated quantities {
162-
vector[J] log_lik;
163-
vector[J] y_hat;
162+
array[J] real log_lik;
163+
array[J] real y_hat;
164164
for (j in 1:J) {
165165
log_lik[j] = normal_lpdf(y[j] | theta[j], sigma[j]);
166166
y_hat[j] = normal_rng(theta[j], sigma[j]);
@@ -185,10 +185,10 @@ def load_cached_models(eight_schools_data, draws, chains, libs=None):
185185
"""Load pystan, emcee, and pyro models from pickle."""
186186
here = os.path.dirname(os.path.abspath(__file__))
187187
supported = (
188-
("pystan", pystan_noncentered_schools),
188+
# ("pystan", pystan_noncentered_schools),
189189
("emcee", emcee_schools_model),
190-
("pyro", pyro_noncentered_schools),
191-
("numpyro", numpyro_schools_model),
190+
# ("pyro", pyro_noncentered_schools),
191+
# ("numpyro", numpyro_schools_model),
192192
)
193193
data_directory = os.path.join(here, "saved_models")
194194
if not os.path.isdir(data_directory):
@@ -241,9 +241,7 @@ def running_on_ci() -> bool:
241241
return os.environ.get("ARVIZ_CI_MACHINE") is not None
242242

243243

244-
def importorskip(
245-
modname: str, minversion: Optional[str] = None, reason: Optional[str] = None
246-
) -> Any:
244+
def importorskip(modname: str, minversion: str | None = None, reason: str | None = None) -> Any:
247245
"""Import and return the requested module ``modname``.
248246
249247
Doesn't allow skips on CI machine.

pyproject.toml

+4-4
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ build-backend = "flit_core.buildapi"
55
[project]
66
name = "arviz-base"
77
readme = "README.md"
8-
requires-python = ">=3.9"
8+
requires-python = ">=3.10"
99
license = {file = "LICENSE"}
1010
authors = [
1111
{name = "ArviZ team", email = "[email protected]"}
@@ -19,14 +19,14 @@ classifiers = [
1919
"Operating System :: OS Independent",
2020
"Programming Language :: Python",
2121
"Programming Language :: Python :: 3",
22-
"Programming Language :: Python :: 3.9",
2322
"Programming Language :: Python :: 3.10",
2423
"Programming Language :: Python :: 3.11",
24+
"Programming Language :: Python :: 3.12",
2525
]
2626
dynamic = ["version", "description"]
2727
dependencies = [
28-
"numpy>=1.20",
29-
"xarray>=0.18.0",
28+
"numpy>=1.23",
29+
"xarray>=2022.6.0",
3030
"xarray-datatree",
3131
"typing-extensions>=3.10",
3232
]

src/arviz_base/base.py

+15-15
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@
33
import importlib
44
import re
55
import warnings
6-
from collections.abc import Hashable, Iterable, Mapping
6+
from collections.abc import Callable, Hashable, Iterable, Mapping
77
from copy import deepcopy
8-
from typing import TYPE_CHECKING, Any, Callable, Optional, TypeVar, Union
8+
from typing import TYPE_CHECKING, Any, TypeVar
99

1010
import numpy as np
1111
import xarray as xr
@@ -24,9 +24,9 @@
2424
def generate_dims_coords(
2525
shape: Iterable[int],
2626
var_name: Hashable,
27-
dims: Optional[Iterable[Hashable]] = None,
28-
coords: Optional[CoordSpec] = None,
29-
index_origin: Optional[int] = None,
27+
dims: Iterable[Hashable] | None = None,
28+
coords: CoordSpec | None = None,
29+
index_origin: int | None = None,
3030
skip_event_dims: bool = False,
3131
check_conventions: bool = True,
3232
) -> tuple[list[Hashable], CoordSpec]:
@@ -216,12 +216,12 @@ def ndarray_to_dataarray(
216216
def dict_to_dataset(
217217
data: DictData,
218218
*,
219-
attrs: Optional[Mapping[Any, Any]] = None,
220-
inference_library: Optional[str] = None,
221-
coords: Optional[CoordSpec] = None,
222-
dims: Optional[DimSpec] = None,
223-
sample_dims: Optional[Iterable[Hashable]] = None,
224-
index_origin: Optional[int] = None,
219+
attrs: Mapping[Any, Any] | None = None,
220+
inference_library: str | None = None,
221+
coords: CoordSpec | None = None,
222+
dims: DimSpec | None = None,
223+
sample_dims: Iterable[Hashable] | None = None,
224+
index_origin: int | None = None,
225225
skip_event_dims: bool = False,
226226
check_conventions: bool = True,
227227
):
@@ -377,15 +377,15 @@ class requires: # pylint: disable=invalid-name
377377
See https://github.com/arviz-devs/arviz/pull/1504 for more discussion.
378378
"""
379379

380-
def __init__(self, *props: Union[str, list[str]]) -> None:
381-
self.props: tuple[Union[str, list[str]], ...] = props
380+
def __init__(self, *props: str | list[str]) -> None:
381+
self.props: tuple[str | list[str], ...] = props
382382

383383
def __call__(
384384
self, func: Callable[[RequiresArgTypeT], RequiresReturnTypeT]
385-
) -> Callable[[RequiresArgTypeT], Optional[RequiresReturnTypeT]]: # noqa: D202
385+
) -> Callable[[RequiresArgTypeT], RequiresReturnTypeT | None]: # noqa: D202
386386
"""Wrap the decorated function."""
387387

388-
def wrapped(cls: RequiresArgTypeT) -> Optional[RequiresReturnTypeT]:
388+
def wrapped(cls: RequiresArgTypeT) -> RequiresReturnTypeT | None:
389389
"""Return None if not all props are available."""
390390
for prop in self.props:
391391
prop_list = [prop] if isinstance(prop, str) else prop

src/arviz_base/io_dict.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
"""Dictionary specific conversion code."""
22
import warnings
3-
from typing import Optional
43

54
from datatree import DataTree
65

@@ -14,7 +13,7 @@ def from_dict(
1413
name=None,
1514
sample_dims=None,
1615
save_warmup=None,
17-
index_origin: Optional[int] = None,
16+
index_origin: int | None = None,
1817
coords=None,
1918
dims=None,
2019
pred_dims=None,

src/arviz_base/labels.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
# pylint: disable=unused-argument
22
"""Utilities to generate labels from xarray objects."""
3-
from typing import Union
43

54
__all__ = [
65
"mix_labellers",
@@ -92,7 +91,7 @@ def sel_to_str(self, sel: dict, isel: dict):
9291
)
9392
return ""
9493

95-
def var_name_to_str(self, var_name: Union[str, None]): # pylint: disable=no-self-use
94+
def var_name_to_str(self, var_name: str | None): # pylint: disable=no-self-use
9695
"""WIP."""
9796
return var_name
9897

@@ -106,7 +105,7 @@ def model_name_to_str(self, model_name): # pylint: disable=no-self-use
106105
"""WIP."""
107106
return model_name
108107

109-
def make_label_vert(self, var_name: Union[str, None], sel: dict, isel: dict):
108+
def make_label_vert(self, var_name: str | None, sel: dict, isel: dict):
110109
"""WIP."""
111110
var_name_str = self.var_name_to_str(var_name)
112111
sel_str = self.sel_to_str(sel, isel)

src/arviz_base/rcparams.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,9 @@
77
import sys
88
from collections.abc import MutableMapping
99
from pathlib import Path
10-
from typing import Any, Literal
10+
from typing import Any, Literal, get_args
1111

1212
import numpy as np
13-
from typing_extensions import get_args
1413

1514
_log = logging.getLogger("arviz")
1615

@@ -203,7 +202,7 @@ def validate_iterable(value):
203202
if allow_auto and value.lower() == "auto":
204203
return "auto"
205204
value = tuple(v.strip("([ ])") for v in value.split(",") if v.strip())
206-
if np.iterable(value) and not isinstance(value, (set, frozenset)):
205+
if np.iterable(value) and not isinstance(value, set | frozenset):
207206
val = tuple(scalar_validator(v) for v in value)
208207
if length is not None and len(val) != length:
209208
raise ValueError(f"Iterable must be of length: {length}")

src/arviz_base/testing.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
11
"""ArviZ testing utilities."""
22

3-
from typing import Union
43

54
from datatree import DataTree
65

76

87
def check_multiple_attrs(
98
test_dict: dict[str, list[str]], parent: DataTree
10-
) -> list[Union[str, tuple[str, str]]]:
9+
) -> list[str | tuple[str, str]]:
1110
"""Perform multiple hasattr checks on InferenceData objects.
1211
1312
It is thought to first check if the parent object contains a given dataset,
@@ -50,7 +49,7 @@ def check_multiple_attrs(
5049
in ``sample_stats``, also against what was expected.
5150
5251
"""
53-
failed_attrs: list[Union[str, tuple[str, str]]] = []
52+
failed_attrs: list[str | tuple[str, str]] = []
5453
for dataset_name, attributes in test_dict.items():
5554
if dataset_name.startswith("~"):
5655
if hasattr(parent, dataset_name[1:]):

src/arviz_base/utils.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def _var_names(var_names, data, filter_vars=None):
3232
)
3333

3434
if var_names is not None:
35-
if isinstance(data, (list, tuple)):
35+
if isinstance(data, list | tuple):
3636
all_vars = []
3737
for dataset in data:
3838
dataset_vars = list(dataset.data_vars)
@@ -156,7 +156,7 @@ def _get_coords(data, coords):
156156
data : Dataset or DataArray
157157
Return type is of the same type as the input
158158
"""
159-
if not isinstance(data, (list, tuple)):
159+
if not isinstance(data, list | tuple):
160160
try:
161161
return data.sel(**coords)
162162

@@ -170,7 +170,7 @@ def _get_coords(data, coords):
170170
"Check that coords structure is correct and"
171171
f" dimensions are valid. {err}"
172172
) from err
173-
if not isinstance(coords, (list, tuple)):
173+
if not isinstance(coords, list | tuple):
174174
coords = [coords] * len(data)
175175
data_subset = []
176176
for idx, (datum, coords_dict) in enumerate(zip(data, coords)):

tox.ini

+3-3
Original file line numberDiff line numberDiff line change
@@ -2,22 +2,22 @@
22
envlist =
33
check
44
docs
5-
{py39,py310,py311}{,-coverage}
5+
{py310,py311,py312}{,-coverage}
66
# See https://tox.readthedocs.io/en/latest/example/package.html#flit
77
isolated_build = True
88
isolated_build_env = build
99

1010
[gh-actions]
1111
python =
12-
3.9: py39
1312
3.10: py310
1413
3.11: py311
14+
3.12: py312
1515

1616
[testenv]
1717
basepython =
18-
py39: python3.9
1918
py310: python3.10
2019
py311: python3.11
20+
py312: python3.12
2121
# See https://github.com/tox-dev/tox/issues/1548
2222
{check,docs,cleandocs,viewdocs,build}: python3
2323
setenv =

0 commit comments

Comments
 (0)