issue_comments: 607066537
This data as json
html_url | issue_url | id | node_id | user | created_at | updated_at | author_association | body | reactions | performed_via_github_app | issue |
---|---|---|---|---|---|---|---|---|---|---|---|
https://github.com/pydata/xarray/pull/3922#issuecomment-607066537 | https://api.github.com/repos/pydata/xarray/issues/3922 | 607066537 | MDEyOklzc3VlQ29tbWVudDYwNzA2NjUzNw== | 5821660 | 2020-04-01T06:45:35Z | 2020-04-01T06:45:35Z | MEMBER | @dcherian This seemed to work until computation:
Full Traceback: ```python /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/xarray/core/dataarray.py in compute(self, kwargs) 839 """ 840 new = self.copy(deep=False) --> 841 return new.load(kwargs) 842 843 def persist(self, **kwargs) -> "DataArray": /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/xarray/core/dataarray.py in load(self, kwargs) 813 dask.array.compute 814 """ --> 815 ds = self._to_temp_dataset().load(kwargs) 816 new = self._from_temp_dataset(ds) 817 self._variable = new._variable /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/xarray/core/dataset.py in load(self, kwargs) 654 655 # evaluate all the dask arrays simultaneously --> 656 evaluated_data = da.compute(*lazy_data.values(), kwargs) 657 658 for k, data in zip(lazy_data, evaluated_data): /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/dask/base.py in compute(args, kwargs) 435 keys = [x.dask_keys() for x in collections] 436 postcomputes = [x.dask_postcompute() for x in collections] --> 437 results = schedule(dsk, keys, kwargs) 438 return repack([f(r, a) for r, (f, a) in zip(results, postcomputes)]) 439 /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, pool, **kwargs) 74 pools[thread][num_workers] = pool 75 ---> 76 results = get_async( 77 pool.apply_async, 78 len(pool._pool), /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs) 484 _execute_task(task, data) # Re-execute locally 485 else: --> 486 raise_exception(exc, tb) 487 res, worker_id = loads(res_info) 488 state["cache"][key] = res /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/dask/local.py in reraise(exc, tb) 314 if exc.traceback is not tb: 315 raise exc.with_traceback(tb) --> 316 raise exc 317 318 /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception) 220 try: 221 task, data = loads(task_info) --> 222 result = _execute_task(task, data) 223 id = get_id() 224 result = dumps((result, id)) /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/dask/core.py in _execute_task(arg, cache, dsk) 119 # temporaries by their reference count and can execute certain 120 # operations in-place. --> 121 return func(*(_execute_task(a, cache) for a in args)) 122 elif not ishashable(arg): 123 return arg /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/dask/optimization.py in call(self, *args) 980 if not len(args) == len(self.inkeys): 981 raise ValueError("Expected %d args, got %d" % (len(self.inkeys), len(args))) --> 982 return core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args))) 983 984 def reduce(self): /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/dask/core.py in get(dsk, out, cache) 149 for key in toposort(dsk): 150 task = dsk[key] --> 151 result = _execute_task(task, cache) 152 cache[key] = result 153 result = _execute_task(out, cache) /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/dask/core.py in _execute_task(arg, cache, dsk) 119 # temporaries by their reference count and can execute certain 120 # operations in-place. --> 121 return func(*(_execute_task(a, cache) for a in args)) 122 elif not ishashable(arg): 123 return arg /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/xarray/core/computation.py in <lambda>(ind, coord) 1387 res = indx.copy( 1388 data=indx.data.map_blocks( -> 1389 lambda ind, coord: coord[(ind,)], coordarray, dtype=coordarray.dtype 1390 ) 1391 ) /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/xarray/core/dataarray.py in getitem(self, key) 642 else: 643 # xarray-style array indexing --> 644 return self.isel(indexers=self._item_key_to_dict(key)) 645 646 def setitem(self, key: Any, value: Any) -> None: /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/xarray/core/dataarray.py in isel(self, indexers, drop, **indexers_kwargs) 1020 indexers = either_dict_or_kwargs(indexers, indexers_kwargs, "isel") 1021 if any(is_fancy_indexer(idx) for idx in indexers.values()): -> 1022 ds = self._to_temp_dataset()._isel_fancy(indexers, drop=drop) 1023 return self._from_temp_dataset(ds) 1024 /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/xarray/core/dataset.py in _isel_fancy(self, indexers, drop) 1962 # Note: we need to preserve the original indexers variable in order to merge the 1963 # coords below -> 1964 indexers_list = list(self._validate_indexers(indexers)) 1965 1966 variables: Dict[Hashable, Variable] = {} /home/kai/miniconda/envs/wradlib_38_01/lib/python3.8/site-packages/xarray/core/dataset.py in _validate_indexers(self, indexers) 1805 1806 if v.ndim > 1: -> 1807 raise IndexError( 1808 "Unlabeled multi-dimensional array cannot be " 1809 "used for indexing: {}".format(k) IndexError: Unlabeled multi-dimensional array cannot be used for indexing: array_bin ``` |
{ "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
591101988 |