html_url,issue_url,id,node_id,user,created_at,updated_at,author_association,body,reactions,performed_via_github_app,issue
https://github.com/pydata/xarray/issues/2480#issuecomment-429309135,https://api.github.com/repos/pydata/xarray/issues/2480,429309135,MDEyOklzc3VlQ29tbWVudDQyOTMwOTEzNQ==,306380,2018-10-12T12:29:47Z,2018-10-12T12:29:47Z,MEMBER,This should be fixed with https://github.com/dask/dask/pull/4081,"{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,369310993
https://github.com/pydata/xarray/issues/2480#issuecomment-429156168,https://api.github.com/repos/pydata/xarray/issues/2480,429156168,MDEyOklzc3VlQ29tbWVudDQyOTE1NjE2OA==,306380,2018-10-11T23:34:31Z,2018-10-11T23:34:31Z,MEMBER,"No need to bother with the reproducible example.
As a warning, there might be some increased churn like this if we move
forward with some of the proposed dask array changes.
On Thu, Oct 11, 2018, 7:32 PM Matthew Rocklin wrote:
> Yeah, I noticed this too. I have a fix already in a PR
>
> On Thu, Oct 11, 2018, 5:24 PM Stephan Hoyer
> wrote:
>
>> Example build failure: https://travis-ci.org/pydata/xarray/jobs/439949937
>>
>> =================================== FAILURES ===================================
>> _____________________ test_apply_dask_new_output_dimension _____________________
>> @requires_dask
>> def test_apply_dask_new_output_dimension():
>> import dask.array as da
>>
>> array = da.ones((2, 2), chunks=(1, 1))
>> data_array = xr.DataArray(array, dims=('x', 'y'))
>>
>> def stack_negative(obj):
>> def func(x):
>> return np.stack([x, -x], axis=-1)
>> return apply_ufunc(func, obj, output_core_dims=[['sign']],
>> dask='parallelized', output_dtypes=[obj.dtype],
>> output_sizes={'sign': 2})
>>
>> expected = stack_negative(data_array.compute())
>>
>> actual = stack_negative(data_array)
>> assert actual.dims == ('x', 'y', 'sign')
>> assert actual.shape == (2, 2, 2)
>> assert isinstance(actual.data, da.Array)
>> > assert_identical(expected, actual)
>> xarray/tests/test_computation.py:737:
>> _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
>> xarray/tests/test_computation.py:24: in assert_identical
>> assert a.identical(b), msg
>> xarray/core/dataarray.py:1923: in identical
>> self._all_compat(other, 'identical'))
>> xarray/core/dataarray.py:1875: in _all_compat
>> compat(self, other))
>> xarray/core/dataarray.py:1872: in compat
>> return getattr(x.variable, compat_str)(y.variable)
>> xarray/core/variable.py:1461: in identical
>> self.equals(other))
>> xarray/core/variable.py:1439: in equals
>> equiv(self.data, other.data)))
>> xarray/core/duck_array_ops.py:144: in array_equiv
>> arr1, arr2 = as_like_arrays(arr1, arr2)
>> xarray/core/duck_array_ops.py:128: in as_like_arrays
>> return tuple(np.asarray(d) for d in data)
>> xarray/core/duck_array_ops.py:128: in
>> return tuple(np.asarray(d) for d in data)
>> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/numpy/core/numeric.py:501: in asarray
>> return array(a, dtype, copy=False, order=order)
>> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/array/core.py:1118: in __array__
>> x = self.compute()
>> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/base.py:156: in compute
>> (result,) = compute(self, traverse=False, **kwargs)
>> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/base.py:390: in compute
>> dsk = collections_to_dsk(collections, optimize_graph, **kwargs)
>> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/base.py:194: in collections_to_dsk
>> for opt, (dsk, keys) in groups.items()]))
>> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/base.py:194: in
>> for opt, (dsk, keys) in groups.items()]))
>> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/array/optimization.py:41: in optimize
>> dsk = ensure_dict(dsk)
>> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/utils.py:830: in ensure_dict
>> result.update(dd)
>> ../../../miniconda/envs/test_env/lib/python3.6/_collections_abc.py:720: in __iter__
>> yield from self._mapping
>> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/array/top.py:168: in __iter__
>> return iter(self._dict)
>> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/array/top.py:160: in _dict
>> concatenate=self.concatenate
>> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/array/top.py:305: in top
>> keytups = list(itertools.product(*[range(dims[i]) for i in out_indices]))
>> _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
>> .0 =
>> > keytups = list(itertools.product(*[range(dims[i]) for i in out_indices]))
>> E KeyError: '.0'
>> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/array/top.py:305: KeyError
>>
>> My guess is that this is somehow related to @mrocklin
>> 's recent refactor of dask.array.atop:
>> dask/dask#3998
>>
>> If the cause isn't obvious, I'll try to come up with a simple dask only
>> example that reproduces it.
>>
>> —
>> You are receiving this because you were mentioned.
>> Reply to this email directly, view it on GitHub
>> , or mute the thread
>>
>> .
>>
>
","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,369310993
https://github.com/pydata/xarray/issues/2480#issuecomment-429155894,https://api.github.com/repos/pydata/xarray/issues/2480,429155894,MDEyOklzc3VlQ29tbWVudDQyOTE1NTg5NA==,306380,2018-10-11T23:32:59Z,2018-10-11T23:32:59Z,MEMBER,"Yeah, I noticed this too. I have a fix already in a PR
On Thu, Oct 11, 2018, 5:24 PM Stephan Hoyer
wrote:
> Example build failure: https://travis-ci.org/pydata/xarray/jobs/439949937
>
> =================================== FAILURES ===================================
> _____________________ test_apply_dask_new_output_dimension _____________________
> @requires_dask
> def test_apply_dask_new_output_dimension():
> import dask.array as da
>
> array = da.ones((2, 2), chunks=(1, 1))
> data_array = xr.DataArray(array, dims=('x', 'y'))
>
> def stack_negative(obj):
> def func(x):
> return np.stack([x, -x], axis=-1)
> return apply_ufunc(func, obj, output_core_dims=[['sign']],
> dask='parallelized', output_dtypes=[obj.dtype],
> output_sizes={'sign': 2})
>
> expected = stack_negative(data_array.compute())
>
> actual = stack_negative(data_array)
> assert actual.dims == ('x', 'y', 'sign')
> assert actual.shape == (2, 2, 2)
> assert isinstance(actual.data, da.Array)
> > assert_identical(expected, actual)
> xarray/tests/test_computation.py:737:
> _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
> xarray/tests/test_computation.py:24: in assert_identical
> assert a.identical(b), msg
> xarray/core/dataarray.py:1923: in identical
> self._all_compat(other, 'identical'))
> xarray/core/dataarray.py:1875: in _all_compat
> compat(self, other))
> xarray/core/dataarray.py:1872: in compat
> return getattr(x.variable, compat_str)(y.variable)
> xarray/core/variable.py:1461: in identical
> self.equals(other))
> xarray/core/variable.py:1439: in equals
> equiv(self.data, other.data)))
> xarray/core/duck_array_ops.py:144: in array_equiv
> arr1, arr2 = as_like_arrays(arr1, arr2)
> xarray/core/duck_array_ops.py:128: in as_like_arrays
> return tuple(np.asarray(d) for d in data)
> xarray/core/duck_array_ops.py:128: in
> return tuple(np.asarray(d) for d in data)
> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/numpy/core/numeric.py:501: in asarray
> return array(a, dtype, copy=False, order=order)
> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/array/core.py:1118: in __array__
> x = self.compute()
> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/base.py:156: in compute
> (result,) = compute(self, traverse=False, **kwargs)
> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/base.py:390: in compute
> dsk = collections_to_dsk(collections, optimize_graph, **kwargs)
> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/base.py:194: in collections_to_dsk
> for opt, (dsk, keys) in groups.items()]))
> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/base.py:194: in
> for opt, (dsk, keys) in groups.items()]))
> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/array/optimization.py:41: in optimize
> dsk = ensure_dict(dsk)
> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/utils.py:830: in ensure_dict
> result.update(dd)
> ../../../miniconda/envs/test_env/lib/python3.6/_collections_abc.py:720: in __iter__
> yield from self._mapping
> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/array/top.py:168: in __iter__
> return iter(self._dict)
> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/array/top.py:160: in _dict
> concatenate=self.concatenate
> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/array/top.py:305: in top
> keytups = list(itertools.product(*[range(dims[i]) for i in out_indices]))
> _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
> .0 =
> > keytups = list(itertools.product(*[range(dims[i]) for i in out_indices]))
> E KeyError: '.0'
> ../../../miniconda/envs/test_env/lib/python3.6/site-packages/dask/array/top.py:305: KeyError
>
> My guess is that this is somehow related to @mrocklin
> 's recent refactor of dask.array.atop:
> dask/dask#3998
>
> If the cause isn't obvious, I'll try to come up with a simple dask only
> example that reproduces it.
>
> —
> You are receiving this because you were mentioned.
> Reply to this email directly, view it on GitHub
> , or mute the thread
>
> .
>
","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,369310993