Skip to content

Commit

Permalink
Merge branch 'main' into coders
Browse files Browse the repository at this point in the history
  • Loading branch information
kmuehlbauer authored Dec 30, 2024
2 parents 6cd81e5 + 33bf5e8 commit 1ae9a22
Show file tree
Hide file tree
Showing 12 changed files with 335 additions and 227 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/ci-additional.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ jobs:
python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report
- name: Upload mypy coverage to Codecov
uses: codecov/[email protected].1
uses: codecov/[email protected].2
with:
file: mypy_report/cobertura.xml
flags: mypy
Expand Down Expand Up @@ -174,7 +174,7 @@ jobs:
python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report
- name: Upload mypy coverage to Codecov
uses: codecov/[email protected].1
uses: codecov/[email protected].2
with:
file: mypy_report/cobertura.xml
flags: mypy-min
Expand Down Expand Up @@ -230,7 +230,7 @@ jobs:
python -m pyright xarray/
- name: Upload pyright coverage to Codecov
uses: codecov/[email protected].1
uses: codecov/[email protected].2
with:
file: pyright_report/cobertura.xml
flags: pyright
Expand Down Expand Up @@ -286,7 +286,7 @@ jobs:
python -m pyright xarray/
- name: Upload pyright coverage to Codecov
uses: codecov/[email protected].1
uses: codecov/[email protected].2
with:
file: pyright_report/cobertura.xml
flags: pyright39
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ jobs:
path: pytest.xml

- name: Upload code coverage to Codecov
uses: codecov/[email protected].1
uses: codecov/[email protected].2
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/upstream-dev-ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ jobs:
run: |
python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report
- name: Upload mypy coverage to Codecov
uses: codecov/[email protected].1
uses: codecov/[email protected].2
with:
file: mypy_report/cobertura.xml
flags: mypy
Expand Down
5 changes: 4 additions & 1 deletion asv_bench/benchmarks/rolling.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

import xarray as xr

from . import parameterized, randn, requires_dask
from . import _skip_slow, parameterized, randn, requires_dask

nx = 3000
long_nx = 30000
Expand Down Expand Up @@ -80,6 +80,9 @@ def time_rolling_construct(self, center, stride, use_bottleneck):
class RollingDask(Rolling):
def setup(self, *args, **kwargs):
requires_dask()
# TODO: Lazily skipped in CI as it is very demanding and slow.
# Improve times and remove errors.
_skip_slow()
super().setup(**kwargs)
self.ds = self.ds.chunk({"x": 100, "y": 50, "t": 50})
self.da_long = self.da_long.chunk({"x": 10000})
Expand Down
2 changes: 2 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ v.2024.12.0 (unreleased)

New Features
~~~~~~~~~~~~
- Improve the error message raised when using chunked-array methods if no chunk manager is available or if the requested chunk manager is missing (:pull:`9676`)
By `Justus Magin <https://github.com/keewis>`_. (:pull:`9676`)
- Better support wrapping additional array types (e.g. ``cupy`` or ``jax``) by calling generalized
duck array operations throughout more xarray methods. (:issue:`7848`, :pull:`9798`).
By `Sam Levang <https://github.com/slevang>`_.
Expand Down
4 changes: 2 additions & 2 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,13 +156,13 @@ def check_name(name: Hashable):
raise ValueError(
f"Invalid name {name!r} for DataArray or Dataset key: "
"string must be length 1 or greater for "
"serialization to netCDF files"
"serialization to netCDF or zarr files"
)
elif name is not None:
raise TypeError(
f"Invalid name {name!r} for DataArray or Dataset key: "
"must be either a string or None for serialization to netCDF "
"files"
"or zarr files"
)

for k in dataset.variables:
Expand Down
2 changes: 1 addition & 1 deletion xarray/backends/zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -1471,7 +1471,7 @@ def open_zarr(
) # attempt to import that parallel backend

chunks = {}
except ValueError:
except (ValueError, ImportError):
chunks = None

if kwargs:
Expand Down
55 changes: 18 additions & 37 deletions xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -2921,19 +2921,11 @@ def _validate_interp_indexers(
"""Variant of _validate_indexers to be used for interpolation"""
for k, v in self._validate_indexers(indexers):
if isinstance(v, Variable):
if v.ndim == 1:
yield k, v.to_index_variable()
else:
yield k, v
elif isinstance(v, int):
yield k, v
elif is_scalar(v):
yield k, Variable((), v, attrs=self.coords[k].attrs)
elif isinstance(v, np.ndarray):
if v.ndim == 0:
yield k, Variable((), v, attrs=self.coords[k].attrs)
elif v.ndim == 1:
yield k, IndexVariable((k,), v, attrs=self.coords[k].attrs)
else:
raise AssertionError() # Already tested by _validate_indexers
yield k, Variable(dims=(k,), data=v, attrs=self.coords[k].attrs)
else:
raise TypeError(type(v))

Expand Down Expand Up @@ -4127,18 +4119,6 @@ def interp(

coords = either_dict_or_kwargs(coords, coords_kwargs, "interp")
indexers = dict(self._validate_interp_indexers(coords))

if coords:
# This avoids broadcasting over coordinates that are both in
# the original array AND in the indexing array. It essentially
# forces interpolation along the shared coordinates.
sdims = (
set(self.dims)
.intersection(*[set(nx.dims) for nx in indexers.values()])
.difference(coords.keys())
)
indexers.update({d: self.variables[d] for d in sdims})

obj = self if assume_sorted else self.sortby(list(coords))

def maybe_variable(obj, k):
Expand Down Expand Up @@ -4169,16 +4149,18 @@ def _validate_interp_indexer(x, new_x):
for k, v in indexers.items()
}

# optimization: subset to coordinate range of the target index
if method in ["linear", "nearest"]:
for k, v in validated_indexers.items():
obj, newidx = missing._localize(obj, {k: v})
validated_indexers[k] = newidx[k]

# optimization: create dask coordinate arrays once per Dataset
# rather than once per Variable when dask.array.unify_chunks is called later
# GH4739
if obj.__dask_graph__():
has_chunked_array = bool(
any(is_chunked_array(v._data) for v in obj._variables.values())
)
if has_chunked_array:
# optimization: subset to coordinate range of the target index
if method in ["linear", "nearest"]:
for k, v in validated_indexers.items():
obj, newidx = missing._localize(obj, {k: v})
validated_indexers[k] = newidx[k]
# optimization: create dask coordinate arrays once per Dataset
# rather than once per Variable when dask.array.unify_chunks is called later
# GH4739
dask_indexers = {
k: (index.to_base_variable().chunk(), dest.to_base_variable().chunk())
for k, (index, dest) in validated_indexers.items()
Expand All @@ -4190,10 +4172,9 @@ def _validate_interp_indexer(x, new_x):
if name in indexers:
continue

if is_duck_dask_array(var.data):
use_indexers = dask_indexers
else:
use_indexers = validated_indexers
use_indexers = (
dask_indexers if is_duck_dask_array(var.data) else validated_indexers
)

dtype_kind = var.dtype.kind
if dtype_kind in "uifc":
Expand Down
Loading

0 comments on commit 1ae9a22

Please sign in to comment.