home / github / issues

Menu
  • Search all tables
  • GraphQL API

issues: 1181704575

This data as json

id node_id number title user state locked assignee milestone comments created_at updated_at closed_at author_association active_lock_reason draft pull_request body reactions performed_via_github_app state_reason repo type
1181704575 PR_kwDOAMm_X841EMNH 6414 use the `DaskIndexingAdapter` for `duck dask` arrays 14808389 closed 0     2 2022-03-26T12:00:34Z 2022-03-27T20:38:43Z 2022-03-27T20:38:40Z MEMBER   0 pydata/xarray/pulls/6414

(detected while trying to implement a PintMetaIndex in xarray-contrib/pint-xarray#163)

This fixes position-based indexing of duck dask arrays: ``` python

In [1]: import xarray as xr ...: impIn [1]: import xarray as xr ...: import dask.array as da ...: import pint ...: ...: ureg = pint.UnitRegistry(force_ndarray_like=True) ...: ...: a = da.zeros((20, 20), chunks=(10, 10)) ...: q = ureg.Quantity(a, "m") ...: ...: arr1 = xr.DataArray(a, dims=("x", "y")) ...: arr2 = xr.DataArray(q, dims=("x", "y"))

In [2]: arr1.isel(x=[0, 2, 4], y=[1, 3, 5]) Out[2]: <xarray.DataArray 'zeros_like-d81259c3a77e6dff3e60975e2afe4ff9' (x: 3, y: 3)> dask.array<getitem, shape=(3, 3), dtype=float64, chunksize=(3, 3), chunktype=numpy.ndarray> Dimensions without coordinates: x, y

In [3]: arr2.isel(x=[0, 2, 4], y=[1, 3, 5])

NotImplementedError Traceback (most recent call last) Input In [3], in <module> ----> 1 arr2.isel(x=[0, 2, 4], y=[1, 3, 5])

File .../xarray/core/dataarray.py:1220, in DataArray.isel(self, indexers, drop, missing_dims, **indexers_kwargs) 1215 return self._from_temp_dataset(ds) 1217 # Much faster algorithm for when all indexers are ints, slices, one-dimensional 1218 # lists, or zero or one-dimensional np.ndarray's -> 1220 variable = self._variable.isel(indexers, missing_dims=missing_dims) 1221 indexes, index_variables = isel_indexes(self.xindexes, indexers) 1223 coords = {}

File .../xarray/core/variable.py:1172, in Variable.isel(self, indexers, missing_dims, **indexers_kwargs) 1169 indexers = drop_dims_from_indexers(indexers, self.dims, missing_dims) 1171 key = tuple(indexers.get(dim, slice(None)) for dim in self.dims) -> 1172 return self[key]

File .../xarray/core/variable.py:765, in Variable.getitem(self, key) 752 """Return a new Variable object whose contents are consistent with 753 getting the provided key from the underlying data. 754 (...) 762 array x.values directly. 763 """ 764 dims, indexer, new_order = self._broadcast_indexes(key) --> 765 data = as_indexable(self._data)[indexer] 766 if new_order: 767 data = np.moveaxis(data, range(len(new_order)), new_order)

File .../xarray/core/indexing.py:1269, in NumpyIndexingAdapter.getitem(self, key) 1267 def getitem(self, key): 1268 array, key = self._indexing_array_and_key(key) -> 1269 return array[key]

File .../lib/python3.9/site-packages/pint/quantity.py:1899, in Quantity.getitem(self, key) 1897 def getitem(self, key): 1898 try: -> 1899 return type(self)(self._magnitude[key], self._units) 1900 except PintTypeError: 1901 raise

File .../lib/python3.9/site-packages/dask/array/core.py:1892, in Array.getitem(self, index) 1889 return self 1891 out = "getitem-" + tokenize(self, index2) -> 1892 dsk, chunks = slice_array(out, self.name, self.chunks, index2, self.itemsize) 1894 graph = HighLevelGraph.from_collections(out, dsk, dependencies=[self]) 1896 meta = meta_from_array(self._meta, ndim=len(chunks))

File .../lib/python3.9/site-packages/dask/array/slicing.py:174, in slice_array(out_name, in_name, blockdims, index, itemsize) 171 index += (slice(None, None, None),) * missing 173 # Pass down to next function --> 174 dsk_out, bd_out = slice_with_newaxes(out_name, in_name, blockdims, index, itemsize) 176 bd_out = tuple(map(tuple, bd_out)) 177 return dsk_out, bd_out

File .../lib/python3.9/site-packages/dask/array/slicing.py:196, in slice_with_newaxes(out_name, in_name, blockdims, index, itemsize) 193 where_none[i] -= n 195 # Pass down and do work --> 196 dsk, blockdims2 = slice_wrap_lists(out_name, in_name, blockdims, index2, itemsize) 198 if where_none: 199 expand = expander(where_none)

File .../lib/python3.9/site-packages/dask/array/slicing.py:242, in slice_wrap_lists(out_name, in_name, blockdims, index, itemsize) 238 where_list = [ 239 i for i, ind in enumerate(index) if is_arraylike(ind) and ind.ndim > 0 240 ] 241 if len(where_list) > 1: --> 242 raise NotImplementedError("Don't yet support nd fancy indexing") 243 # Is the single list an empty list? In this case just treat it as a zero 244 # length slice 245 if where_list and not index[where_list[0]].size:

NotImplementedError: Don't yet support nd fancy indexing ```

  • [x] Tests added
  • [x] User visible changes (including notable bug fixes) are documented in whats-new.rst
{
    "url": "https://api.github.com/repos/pydata/xarray/issues/6414/reactions",
    "total_count": 0,
    "+1": 0,
    "-1": 0,
    "laugh": 0,
    "hooray": 0,
    "confused": 0,
    "heart": 0,
    "rocket": 0,
    "eyes": 0
}
    13221727 pull

Links from other tables

  • 0 rows from issues_id in issues_labels
  • 2 rows from issue in issue_comments
Powered by Datasette · Queries took 79.532ms · About: xarray-datasette