id,node_id,number,title,user,state,locked,assignee,milestone,comments,created_at,updated_at,closed_at,author_association,active_lock_reason,draft,pull_request,body,reactions,performed_via_github_app,state_reason,repo,type
1044363666,PR_kwDOAMm_X84uE2qZ,5936,Fix a missing @requires_zarr in tests,490531,closed,0,,,3,2021-11-04T04:49:39Z,2021-11-04T10:12:58Z,2021-11-04T10:12:46Z,CONTRIBUTOR,,0,pydata/xarray/pulls/5936,"When zarr is not available, this test fails with `NameError: name 'zarr' is not defined`.
Full trace
```
__________________________ test_zarr_storage_options ___________________________
@requires_fsspec
def test_zarr_storage_options():
pytest.importorskip(""aiobotocore"")
ds = create_test_data()
store_target = ""memory://test.zarr""
> ds.to_zarr(store_target, storage_options={""test"": ""zarr_write""})
/build/python-xarray/src/xarray-0.20.0/xarray/tests/test_backends.py:2406:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/build/python-xarray/src/xarray-0.20.0/xarray/core/dataset.py:2037: in to_zarr
return to_zarr(
/build/python-xarray/src/xarray-0.20.0/xarray/backends/api.py:1391: in to_zarr
zstore = backends.ZarrStore.open_group(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
cls =
store = , mode = 'w-'
synchronizer = None, group = None, consolidated = False
consolidate_on_close = True, chunk_store = None, storage_options = None
append_dim = None, write_region = None, safe_chunks = True, stacklevel = 4
@classmethod
def open_group(
cls,
store,
mode=""r"",
synchronizer=None,
group=None,
consolidated=False,
consolidate_on_close=False,
chunk_store=None,
storage_options=None,
append_dim=None,
write_region=None,
safe_chunks=True,
stacklevel=2,
):
# zarr doesn't support pathlib.Path objects yet. zarr-python#601
if isinstance(store, os.PathLike):
store = os.fspath(store)
open_kwargs = dict(
mode=mode,
synchronizer=synchronizer,
path=group,
)
open_kwargs[""storage_options""] = storage_options
if chunk_store:
open_kwargs[""chunk_store""] = chunk_store
if consolidated is None:
consolidated = False
if consolidated is None:
try:
zarr_group = zarr.open_consolidated(store, **open_kwargs)
except KeyError:
warnings.warn(
""Failed to open Zarr store with consolidated metadata, ""
""falling back to try reading non-consolidated metadata. ""
""This is typically much slower for opening a dataset. ""
""To silence this warning, consider:\n""
""1. Consolidating metadata in this existing store with ""
""zarr.consolidate_metadata().\n""
""2. Explicitly setting consolidated=False, to avoid trying ""
""to read consolidate metadata, or\n""
""3. Explicitly setting consolidated=True, to raise an ""
""error in this case instead of falling back to try ""
""reading non-consolidated metadata."",
RuntimeWarning,
stacklevel=stacklevel,
)
zarr_group = zarr.open_group(store, **open_kwargs)
elif consolidated:
# TODO: an option to pass the metadata_key keyword
zarr_group = zarr.open_consolidated(store, **open_kwargs)
else:
> zarr_group = zarr.open_group(store, **open_kwargs)
E NameError: name 'zarr' is not defined
/build/python-xarray/src/xarray-0.20.0/xarray/backends/zarr.py:386: NameError
```
","{""url"": ""https://api.github.com/repos/pydata/xarray/issues/5936/reactions"", ""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,,13221727,pull
832404698,MDU6SXNzdWU4MzI0MDQ2OTg=,5038,[tests] ImportError: Pandas requires version '0.12.3' or newer of 'xarray' (version '0.0.0' currently installed). ,490531,closed,0,,,11,2021-03-16T04:44:23Z,2021-03-16T20:15:38Z,2021-03-16T20:03:33Z,CONTRIBUTOR,,,,"**What happened**: I’m running tests of xarray while building it for packaging (Arch Linux), and they are 6 tests failures, all with this error message.
**What you expected to happen**: Tests should work. I’m not sure why a version 0.0.0 is reported while running tests in the build environment.
Full error log
```
__________________ TestDataArray.test_from_series_multiindex ___________________
self =
def test_from_series_multiindex(self):
# GH:3951
df = pd.DataFrame({""B"": [1, 2, 3], ""A"": [4, 5, 6]})
df = df.rename_axis(""num"").rename_axis(""alpha"", axis=1)
> actual = df.stack(""alpha"").to_xarray()
/build/python-xarray/src/xarray-0.17.0/xarray/tests/test_dataarray.py:3697:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/lib/python3.9/site-packages/pandas/core/generic.py:3011: in to_xarray
xarray = import_optional_dependency(""xarray"")
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
name = 'xarray', extra = '', raise_on_missing = True, on_version = 'raise'
def import_optional_dependency(
name: str, extra: str = """", raise_on_missing: bool = True, on_version: str = ""raise""
):
""""""
Import an optional dependency.
By default, if a dependency is missing an ImportError with a nice
message will be raised. If a dependency is present, but too old,
we raise.
Parameters
----------
name : str
The module name. This should be top-level only, so that the
version may be checked.
extra : str
Additional text to include in the ImportError message.
raise_on_missing : bool, default True
Whether to raise if the optional dependency is not found.
When False and the module is not present, None is returned.
on_version : str {'raise', 'warn'}
What to do when a dependency's version is too old.
* raise : Raise an ImportError
* warn : Warn that the version is too old. Returns None
* ignore: Return the module, even if the version is too old.
It's expected that users validate the version locally when
using ``on_version=""ignore""`` (see. ``io/html.py``)
Returns
-------
maybe_module : Optional[ModuleType]
The imported module, when found and the version is correct.
None is returned when the package is not found and `raise_on_missing`
is False, or when the package's version is too old and `on_version`
is ``'warn'``.
""""""
package_name = INSTALL_MAPPING.get(name)
install_name = package_name if package_name is not None else name
msg = (
f""Missing optional dependency '{install_name}'. {extra} ""
f""Use pip or conda to install {install_name}.""
)
try:
module = importlib.import_module(name)
except ImportError:
if raise_on_missing:
raise ImportError(msg) from None
else:
return None
minimum_version = VERSIONS.get(name)
if minimum_version:
version = get_version(module)
if distutils.version.LooseVersion(version) < minimum_version:
assert on_version in {""warn"", ""raise"", ""ignore""}
msg = (
f""Pandas requires version '{minimum_version}' or newer of '{name}' ""
f""(version '{version}' currently installed).""
)
if on_version == ""warn"":
warnings.warn(msg, UserWarning)
return None
elif on_version == ""raise"":
> raise ImportError(msg)
E ImportError: Pandas requires version '0.12.3' or newer of 'xarray' (version '0.0.0' currently installed).
/usr/lib/python3.9/site-packages/pandas/compat/_optional.py:126: ImportError
_______________________ TestDataset.test_sel_categorical _______________________
self =
def test_sel_categorical(self):
ind = pd.Series([""foo"", ""bar""], dtype=""category"")
df = pd.DataFrame({""ind"": ind, ""values"": [1, 2]})
> ds = df.set_index(""ind"").to_xarray()
/build/python-xarray/src/xarray-0.17.0/xarray/tests/test_dataset.py:1432:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/lib/python3.9/site-packages/pandas/core/generic.py:3011: in to_xarray
xarray = import_optional_dependency(""xarray"")
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
name = 'xarray', extra = '', raise_on_missing = True, on_version = 'raise'
def import_optional_dependency(
name: str, extra: str = """", raise_on_missing: bool = True, on_version: str = ""raise""
):
""""""
Import an optional dependency.
By default, if a dependency is missing an ImportError with a nice
message will be raised. If a dependency is present, but too old,
we raise.
Parameters
----------
name : str
The module name. This should be top-level only, so that the
version may be checked.
extra : str
Additional text to include in the ImportError message.
raise_on_missing : bool, default True
Whether to raise if the optional dependency is not found.
When False and the module is not present, None is returned.
on_version : str {'raise', 'warn'}
What to do when a dependency's version is too old.
* raise : Raise an ImportError
* warn : Warn that the version is too old. Returns None
* ignore: Return the module, even if the version is too old.
It's expected that users validate the version locally when
using ``on_version=""ignore""`` (see. ``io/html.py``)
Returns
-------
maybe_module : Optional[ModuleType]
The imported module, when found and the version is correct.
None is returned when the package is not found and `raise_on_missing`
is False, or when the package's version is too old and `on_version`
is ``'warn'``.
""""""
package_name = INSTALL_MAPPING.get(name)
install_name = package_name if package_name is not None else name
msg = (
f""Missing optional dependency '{install_name}'. {extra} ""
f""Use pip or conda to install {install_name}.""
)
try:
module = importlib.import_module(name)
except ImportError:
if raise_on_missing:
raise ImportError(msg) from None
else:
return None
minimum_version = VERSIONS.get(name)
if minimum_version:
version = get_version(module)
if distutils.version.LooseVersion(version) < minimum_version:
assert on_version in {""warn"", ""raise"", ""ignore""}
msg = (
f""Pandas requires version '{minimum_version}' or newer of '{name}' ""
f""(version '{version}' currently installed).""
)
if on_version == ""warn"":
warnings.warn(msg, UserWarning)
return None
elif on_version == ""raise"":
> raise ImportError(msg)
E ImportError: Pandas requires version '0.12.3' or newer of 'xarray' (version '0.0.0' currently installed).
/usr/lib/python3.9/site-packages/pandas/compat/_optional.py:126: ImportError
____________________ TestDataset.test_sel_categorical_error ____________________
self =
def test_sel_categorical_error(self):
ind = pd.Series([""foo"", ""bar""], dtype=""category"")
df = pd.DataFrame({""ind"": ind, ""values"": [1, 2]})
> ds = df.set_index(""ind"").to_xarray()
/build/python-xarray/src/xarray-0.17.0/xarray/tests/test_dataset.py:1440:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/lib/python3.9/site-packages/pandas/core/generic.py:3011: in to_xarray
xarray = import_optional_dependency(""xarray"")
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
name = 'xarray', extra = '', raise_on_missing = True, on_version = 'raise'
def import_optional_dependency(
name: str, extra: str = """", raise_on_missing: bool = True, on_version: str = ""raise""
):
""""""
Import an optional dependency.
By default, if a dependency is missing an ImportError with a nice
message will be raised. If a dependency is present, but too old,
we raise.
Parameters
----------
name : str
The module name. This should be top-level only, so that the
version may be checked.
extra : str
Additional text to include in the ImportError message.
raise_on_missing : bool, default True
Whether to raise if the optional dependency is not found.
When False and the module is not present, None is returned.
on_version : str {'raise', 'warn'}
What to do when a dependency's version is too old.
* raise : Raise an ImportError
* warn : Warn that the version is too old. Returns None
* ignore: Return the module, even if the version is too old.
It's expected that users validate the version locally when
using ``on_version=""ignore""`` (see. ``io/html.py``)
Returns
-------
maybe_module : Optional[ModuleType]
The imported module, when found and the version is correct.
None is returned when the package is not found and `raise_on_missing`
is False, or when the package's version is too old and `on_version`
is ``'warn'``.
""""""
package_name = INSTALL_MAPPING.get(name)
install_name = package_name if package_name is not None else name
msg = (
f""Missing optional dependency '{install_name}'. {extra} ""
f""Use pip or conda to install {install_name}.""
)
try:
module = importlib.import_module(name)
except ImportError:
if raise_on_missing:
raise ImportError(msg) from None
else:
return None
minimum_version = VERSIONS.get(name)
if minimum_version:
version = get_version(module)
if distutils.version.LooseVersion(version) < minimum_version:
assert on_version in {""warn"", ""raise"", ""ignore""}
msg = (
f""Pandas requires version '{minimum_version}' or newer of '{name}' ""
f""(version '{version}' currently installed).""
)
if on_version == ""warn"":
warnings.warn(msg, UserWarning)
return None
elif on_version == ""raise"":
> raise ImportError(msg)
E ImportError: Pandas requires version '0.12.3' or newer of 'xarray' (version '0.0.0' currently installed).
/usr/lib/python3.9/site-packages/pandas/compat/_optional.py:126: ImportError
___________________ TestDataset.test_categorical_multiindex ____________________
self =
def test_categorical_multiindex(self):
i1 = pd.Series([0, 0])
cat = pd.CategoricalDtype(categories=[""foo"", ""baz"", ""bar""])
i2 = pd.Series([""baz"", ""bar""], dtype=cat)
df = pd.DataFrame({""i1"": i1, ""i2"": i2, ""values"": [1, 2]}).set_index(
[""i1"", ""i2""]
)
> actual = df.to_xarray()
/build/python-xarray/src/xarray-0.17.0/xarray/tests/test_dataset.py:1487:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/lib/python3.9/site-packages/pandas/core/generic.py:3011: in to_xarray
xarray = import_optional_dependency(""xarray"")
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
name = 'xarray', extra = '', raise_on_missing = True, on_version = 'raise'
def import_optional_dependency(
name: str, extra: str = """", raise_on_missing: bool = True, on_version: str = ""raise""
):
""""""
Import an optional dependency.
By default, if a dependency is missing an ImportError with a nice
message will be raised. If a dependency is present, but too old,
we raise.
Parameters
----------
name : str
The module name. This should be top-level only, so that the
version may be checked.
extra : str
Additional text to include in the ImportError message.
raise_on_missing : bool, default True
Whether to raise if the optional dependency is not found.
When False and the module is not present, None is returned.
on_version : str {'raise', 'warn'}
What to do when a dependency's version is too old.
* raise : Raise an ImportError
* warn : Warn that the version is too old. Returns None
* ignore: Return the module, even if the version is too old.
It's expected that users validate the version locally when
using ``on_version=""ignore""`` (see. ``io/html.py``)
Returns
-------
maybe_module : Optional[ModuleType]
The imported module, when found and the version is correct.
None is returned when the package is not found and `raise_on_missing`
is False, or when the package's version is too old and `on_version`
is ``'warn'``.
""""""
package_name = INSTALL_MAPPING.get(name)
install_name = package_name if package_name is not None else name
msg = (
f""Missing optional dependency '{install_name}'. {extra} ""
f""Use pip or conda to install {install_name}.""
)
try:
module = importlib.import_module(name)
except ImportError:
if raise_on_missing:
raise ImportError(msg) from None
else:
return None
minimum_version = VERSIONS.get(name)
if minimum_version:
version = get_version(module)
if distutils.version.LooseVersion(version) < minimum_version:
assert on_version in {""warn"", ""raise"", ""ignore""}
msg = (
f""Pandas requires version '{minimum_version}' or newer of '{name}' ""
f""(version '{version}' currently installed).""
)
if on_version == ""warn"":
warnings.warn(msg, UserWarning)
return None
elif on_version == ""raise"":
> raise ImportError(msg)
E ImportError: Pandas requires version '0.12.3' or newer of 'xarray' (version '0.0.0' currently installed).
/usr/lib/python3.9/site-packages/pandas/compat/_optional.py:126: ImportError
_________________ TestDataset.test_from_dataframe_categorical __________________
self =
def test_from_dataframe_categorical(self):
cat = pd.CategoricalDtype(
categories=[""foo"", ""bar"", ""baz"", ""qux"", ""quux"", ""corge""]
)
i1 = pd.Series([""foo"", ""bar"", ""foo""], dtype=cat)
i2 = pd.Series([""bar"", ""bar"", ""baz""], dtype=cat)
df = pd.DataFrame({""i1"": i1, ""i2"": i2, ""values"": [1, 2, 3]})
> ds = df.set_index(""i1"").to_xarray()
/build/python-xarray/src/xarray-0.17.0/xarray/tests/test_dataset.py:4131:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/lib/python3.9/site-packages/pandas/core/generic.py:3011: in to_xarray
xarray = import_optional_dependency(""xarray"")
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
name = 'xarray', extra = '', raise_on_missing = True, on_version = 'raise'
def import_optional_dependency(
name: str, extra: str = """", raise_on_missing: bool = True, on_version: str = ""raise""
):
""""""
Import an optional dependency.
By default, if a dependency is missing an ImportError with a nice
message will be raised. If a dependency is present, but too old,
we raise.
Parameters
----------
name : str
The module name. This should be top-level only, so that the
version may be checked.
extra : str
Additional text to include in the ImportError message.
raise_on_missing : bool, default True
Whether to raise if the optional dependency is not found.
When False and the module is not present, None is returned.
on_version : str {'raise', 'warn'}
What to do when a dependency's version is too old.
* raise : Raise an ImportError
* warn : Warn that the version is too old. Returns None
* ignore: Return the module, even if the version is too old.
It's expected that users validate the version locally when
using ``on_version=""ignore""`` (see. ``io/html.py``)
Returns
-------
maybe_module : Optional[ModuleType]
The imported module, when found and the version is correct.
None is returned when the package is not found and `raise_on_missing`
is False, or when the package's version is too old and `on_version`
is ``'warn'``.
""""""
package_name = INSTALL_MAPPING.get(name)
install_name = package_name if package_name is not None else name
msg = (
f""Missing optional dependency '{install_name}'. {extra} ""
f""Use pip or conda to install {install_name}.""
)
try:
module = importlib.import_module(name)
except ImportError:
if raise_on_missing:
raise ImportError(msg) from None
else:
return None
minimum_version = VERSIONS.get(name)
if minimum_version:
version = get_version(module)
if distutils.version.LooseVersion(version) < minimum_version:
assert on_version in {""warn"", ""raise"", ""ignore""}
msg = (
f""Pandas requires version '{minimum_version}' or newer of '{name}' ""
f""(version '{version}' currently installed).""
)
if on_version == ""warn"":
warnings.warn(msg, UserWarning)
return None
elif on_version == ""raise"":
> raise ImportError(msg)
E ImportError: Pandas requires version '0.12.3' or newer of 'xarray' (version '0.0.0' currently installed).
/usr/lib/python3.9/site-packages/pandas/compat/_optional.py:126: ImportError
_________________________ test_roundtrip_pandas_series _________________________
@given(numeric_series, st.text())
> def test_roundtrip_pandas_series(ser, ix_name):
/build/python-xarray/src/xarray-0.17.0/properties/test_pandas_roundtrip.py:73:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/build/python-xarray/src/xarray-0.17.0/properties/test_pandas_roundtrip.py:79: in test_roundtrip_pandas_series
xr.testing.assert_identical(arr, roundtripped.to_xarray())
/usr/lib/python3.9/site-packages/pandas/core/generic.py:3011: in to_xarray
xarray = import_optional_dependency(""xarray"")
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
name = 'xarray', extra = '', raise_on_missing = True, on_version = 'raise'
def import_optional_dependency(
name: str, extra: str = """", raise_on_missing: bool = True, on_version: str = ""raise""
):
""""""
Import an optional dependency.
By default, if a dependency is missing an ImportError with a nice
message will be raised. If a dependency is present, but too old,
we raise.
Parameters
----------
name : str
The module name. This should be top-level only, so that the
version may be checked.
extra : str
Additional text to include in the ImportError message.
raise_on_missing : bool, default True
Whether to raise if the optional dependency is not found.
When False and the module is not present, None is returned.
on_version : str {'raise', 'warn'}
What to do when a dependency's version is too old.
* raise : Raise an ImportError
* warn : Warn that the version is too old. Returns None
* ignore: Return the module, even if the version is too old.
It's expected that users validate the version locally when
using ``on_version=""ignore""`` (see. ``io/html.py``)
Returns
-------
maybe_module : Optional[ModuleType]
The imported module, when found and the version is correct.
None is returned when the package is not found and `raise_on_missing`
is False, or when the package's version is too old and `on_version`
is ``'warn'``.
""""""
package_name = INSTALL_MAPPING.get(name)
install_name = package_name if package_name is not None else name
msg = (
f""Missing optional dependency '{install_name}'. {extra} ""
f""Use pip or conda to install {install_name}.""
)
try:
module = importlib.import_module(name)
except ImportError:
if raise_on_missing:
raise ImportError(msg) from None
else:
return None
minimum_version = VERSIONS.get(name)
if minimum_version:
version = get_version(module)
if distutils.version.LooseVersion(version) < minimum_version:
assert on_version in {""warn"", ""raise"", ""ignore""}
msg = (
f""Pandas requires version '{minimum_version}' or newer of '{name}' ""
f""(version '{version}' currently installed).""
)
if on_version == ""warn"":
warnings.warn(msg, UserWarning)
return None
elif on_version == ""raise"":
> raise ImportError(msg)
E ImportError: Pandas requires version '0.12.3' or newer of 'xarray' (version '0.0.0' currently installed).
/usr/lib/python3.9/site-packages/pandas/compat/_optional.py:126: ImportError
```
**Environment**:
Output of xr.show_versions()
commit: None
python: 3.9.2 (default, Feb 20 2021, 18:40:11)
[GCC 10.2.0]
python-bits: 64
OS: Linux
OS-release: 5.11.5-arch1-1
machine: x86_64
processor:
byteorder: little
LC_ALL: None
LANG: en_US.UTF-8
LOCALE: en_US.UTF-8
libhdf5: 1.12.0
libnetcdf: 4.7.4
xarray: 0.0.0
pandas: 1.2.3
numpy: 1.20.1
scipy: 1.6.1
netCDF4: 1.5.5.1
pydap: None
h5netcdf: None
h5py: 3.1.0
Nio: None
zarr: None
cftime: 1.4.1
nc_time_axis: None
PseudoNetCDF: None
rasterio: None
cfgrib: None
iris: None
bottleneck: 1.3.2
dask: 2021.03.0
distributed: 2021.03.0
matplotlib: 3.3.4
cartopy: None
seaborn: 0.11.1
numbagg: None
pint: 0.16.1
setuptools: 54.1.1
pip: None
conda: None
pytest: 6.2.2
IPython: None
sphinx: None
","{""url"": ""https://api.github.com/repos/pydata/xarray/issues/5038/reactions"", ""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,completed,13221727,issue
567058587,MDU6SXNzdWU1NjcwNTg1ODc=,3778,Two strange errors in tests TestVariable using .copy(),490531,closed,0,,,7,2020-02-18T17:51:22Z,2020-02-28T15:16:14Z,2020-02-28T15:16:14Z,CONTRIBUTOR,,,,"Setup is as described in #3777.
The two failures are the following ones:
```
____________________ TestVariable.test_index_0d_not_a_time _____________________
self =
def test_index_0d_not_a_time(self):
d = np.datetime64(""NaT"", ""ns"")
x = self.cls([""x""], [d])
> self._assertIndexedLikeNDArray(x, d)
xarray/tests/test_variable.py:206:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
variable =
expected_value0 = numpy.datetime64('NaT'), expected_dtype = None
def _assertIndexedLikeNDArray(self, variable, expected_value0, expected_dtype=None):
""""""Given a 1-dimensional variable, verify that the variable is indexed
like a numpy.ndarray.
""""""
assert variable[0].shape == ()
assert variable[0].ndim == 0
assert variable[0].size == 1
# test identity
> assert variable.equals(variable.copy())
E AssertionError: assert False
E + where False = \n>(\n)
E + where \n> = \n.equals
E + and \n = \n>()
E + where \n> = \n.copy
xarray/tests/test_variable.py:151: AssertionError
_____________________ TestVariable.test_equals_all_dtypes ______________________
self =
def test_equals_all_dtypes(self):
for v, _ in self.example_1d_objects():
v2 = v.copy()
> assert v.equals(v2)
E AssertionError: assert False
E + where False = \n>(\n)
E + where \n> = \n.equals
xarray/tests/test_variable.py:386: AssertionError
```
I have no idea what’s wrong here.","{""url"": ""https://api.github.com/repos/pydata/xarray/issues/3778/reactions"", ""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,completed,13221727,issue
567062305,MDU6SXNzdWU1NjcwNjIzMDU=,3779,Three test_aggregation[int-method_median] tests failing,490531,closed,0,,,2,2020-02-18T17:58:29Z,2020-02-23T19:34:35Z,2020-02-23T19:34:35Z,CONTRIBUTOR,,,,"Follow-up of #3777.
The three failing tests seems to be failing because `dask_array=None`, which is likely the same kind of issue as #3777: dask-dependant tests ran while dask is not available. The other printed error is strange to me, because `numpy` is at version 1.18.1 on this system.
```
_______________ TestVariable.test_aggregation[int-method_median] _______________
values = array([0, 0, 0, 0, 0, 0, 0, 0, 0, 1]), axis = None, skipna = None
kwargs = {}, func = .f at 0x7f3927bdbe50>
msg = 'median is not available with skipna=False with the installed version of numpy; upgrade to numpy 1.12 or newer to use skipna=True or skipna=None'
def f(values, axis=None, skipna=None, **kwargs):
if kwargs.pop(""out"", None) is not None:
raise TypeError(f""`out` is not valid for {name}"")
values = asarray(values)
if coerce_strings and values.dtype.kind in ""SU"":
values = values.astype(object)
func = None
if skipna or (skipna is None and values.dtype.kind in ""cfO""):
nanname = ""nan"" + name
func = getattr(nanops, nanname)
else:
func = _dask_or_eager_func(name, dask_module=dask_module)
try:
> return func(values, axis=axis, **kwargs)
xarray/core/duck_array_ops.py:307:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
args = (array([0, 0, 0, 0, 0, 0, 0, 0, 0, 1]),), kwargs = {'axis': None}
dispatch_args = (array([0, 0, 0, 0, 0, 0, 0, 0, 0, 1]),)
def f(*args, **kwargs):
if list_of_args:
dispatch_args = args[0]
else:
dispatch_args = args[array_args]
> if any(isinstance(a, dask_array.Array) for a in dispatch_args):
xarray/core/duck_array_ops.py:40:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.0 =
> if any(isinstance(a, dask_array.Array) for a in dispatch_args):
E AttributeError: 'NoneType' object has no attribute 'Array'
xarray/core/duck_array_ops.py:40: AttributeError
During handling of the above exception, another exception occurred:
self =
func = method_median, dtype =
@pytest.mark.parametrize(
""func"",
(
method(""all""),
method(""any""),
method(""argmax""),
method(""argmin""),
method(""argsort""),
method(""cumprod""),
method(""cumsum""),
method(""max""),
method(""mean""),
method(""median""),
method(""min""),
pytest.param(
method(""prod""),
marks=pytest.mark.xfail(reason=""not implemented by pint""),
),
method(""std""),
method(""sum""),
method(""var""),
),
ids=repr,
)
def test_aggregation(self, func, dtype):
array = np.linspace(0, 1, 10).astype(dtype) * (
unit_registry.m if func.name != ""cumprod"" else unit_registry.dimensionless
)
variable = xr.Variable(""x"", array)
units = extract_units(func(array))
> expected = attach_units(func(strip_units(variable)), units)
xarray/tests/test_units.py:1389:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
xarray/tests/test_units.py:374: in __call__
return func(*all_args, **all_kwargs)
xarray/core/common.py:46: in wrapped_func
return self.reduce(func, dim, axis, skipna=skipna, **kwargs)
xarray/core/variable.py:1537: in reduce
data = func(input_data, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
values = array([0, 0, 0, 0, 0, 0, 0, 0, 0, 1]), axis = None, skipna = None
kwargs = {}, func = .f at 0x7f3927bdbe50>
msg = 'median is not available with skipna=False with the installed version of numpy; upgrade to numpy 1.12 or newer to use skipna=True or skipna=None'
def f(values, axis=None, skipna=None, **kwargs):
if kwargs.pop(""out"", None) is not None:
raise TypeError(f""`out` is not valid for {name}"")
values = asarray(values)
if coerce_strings and values.dtype.kind in ""SU"":
values = values.astype(object)
func = None
if skipna or (skipna is None and values.dtype.kind in ""cfO""):
nanname = ""nan"" + name
func = getattr(nanops, nanname)
else:
func = _dask_or_eager_func(name, dask_module=dask_module)
try:
return func(values, axis=axis, **kwargs)
except AttributeError:
if isinstance(values, dask_array_type):
try: # dask/dask#3133 dask sometimes needs dtype argument
# if func does not accept dtype, then raises TypeError
return func(values, axis=axis, dtype=values.dtype, **kwargs)
except (AttributeError, TypeError):
msg = ""%s is not yet implemented on dask arrays"" % name
else:
msg = (
""%s is not available with skipna=False with the ""
""installed version of numpy; upgrade to numpy 1.12 ""
""or newer to use skipna=True or skipna=None"" % name
)
> raise NotImplementedError(msg)
E NotImplementedError: median is not available with skipna=False with the installed version of numpy; upgrade to numpy 1.12 or newer to use skipna=True or skipna=None
xarray/core/duck_array_ops.py:321: NotImplementedError
______________ TestDataArray.test_aggregation[int-method_median] _______________
values = array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), axis = None, skipna = None
kwargs = {}, func = .f at 0x7f39286dbc10>
msg = 'median is not available with skipna=False with the installed version of numpy; upgrade to numpy 1.12 or newer to use skipna=True or skipna=None'
def f(values, axis=None, skipna=None, **kwargs):
if kwargs.pop(""out"", None) is not None:
raise TypeError(f""`out` is not valid for {name}"")
values = asarray(values)
if coerce_strings and values.dtype.kind in ""SU"":
values = values.astype(object)
func = None
if skipna or (skipna is None and values.dtype.kind in ""cfO""):
nanname = ""nan"" + name
func = getattr(nanops, nanname)
else:
func = _dask_or_eager_func(name, dask_module=dask_module)
try:
> return func(values, axis=axis, **kwargs)
xarray/core/duck_array_ops.py:307:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
args = (array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),), kwargs = {'axis': None}
dispatch_args = (array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),)
def f(*args, **kwargs):
if list_of_args:
dispatch_args = args[0]
else:
dispatch_args = args[array_args]
> if any(isinstance(a, dask_array.Array) for a in dispatch_args):
xarray/core/duck_array_ops.py:40:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.0 =
> if any(isinstance(a, dask_array.Array) for a in dispatch_args):
E AttributeError: 'NoneType' object has no attribute 'Array'
xarray/core/duck_array_ops.py:40: AttributeError
During handling of the above exception, another exception occurred:
self =
func = method_median, dtype =
@pytest.mark.parametrize(
""func"",
(
pytest.param(
function(""all""),
marks=pytest.mark.xfail(reason=""not implemented by pint yet""),
),
pytest.param(
function(""any""),
marks=pytest.mark.xfail(reason=""not implemented by pint yet""),
),
function(""argmax""),
function(""argmin""),
function(""max""),
function(""mean""),
pytest.param(
function(""median""),
marks=pytest.mark.xfail(reason=""not implemented by xarray""),
),
function(""min""),
pytest.param(
function(""prod""),
marks=pytest.mark.xfail(reason=""not implemented by pint yet""),
),
function(""sum""),
function(""std""),
function(""var""),
function(""cumsum""),
pytest.param(
function(""cumprod""),
marks=pytest.mark.xfail(reason=""not implemented by pint yet""),
),
pytest.param(
method(""all""),
marks=pytest.mark.xfail(reason=""not implemented by pint yet""),
),
pytest.param(
method(""any""),
marks=pytest.mark.xfail(reason=""not implemented by pint yet""),
),
method(""argmax""),
method(""argmin""),
method(""max""),
method(""mean""),
method(""median""),
method(""min""),
pytest.param(
method(""prod""),
marks=pytest.mark.xfail(
reason=""comparison of quantity with ndarrays in nanops not implemented""
),
),
method(""sum""),
method(""std""),
method(""var""),
method(""cumsum""),
pytest.param(
method(""cumprod""),
marks=pytest.mark.xfail(reason=""pint does not implement cumprod yet""),
),
),
ids=repr,
)
def test_aggregation(self, func, dtype):
array = np.arange(10).astype(dtype) * (
unit_registry.m if func.name != ""cumprod"" else unit_registry.dimensionless
)
data_array = xr.DataArray(data=array, dims=""x"")
# units differ based on the applied function, so we need to
# first compute the units
units = extract_units(func(array))
> expected = attach_units(func(strip_units(data_array)), units)
xarray/tests/test_units.py:2226:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
xarray/tests/test_units.py:374: in __call__
return func(*all_args, **all_kwargs)
xarray/core/common.py:46: in wrapped_func
return self.reduce(func, dim, axis, skipna=skipna, **kwargs)
xarray/core/dataarray.py:2235: in reduce
var = self.variable.reduce(func, dim, axis, keep_attrs, keepdims, **kwargs)
xarray/core/variable.py:1537: in reduce
data = func(input_data, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
values = array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), axis = None, skipna = None
kwargs = {}, func = .f at 0x7f39286dbc10>
msg = 'median is not available with skipna=False with the installed version of numpy; upgrade to numpy 1.12 or newer to use skipna=True or skipna=None'
def f(values, axis=None, skipna=None, **kwargs):
if kwargs.pop(""out"", None) is not None:
raise TypeError(f""`out` is not valid for {name}"")
values = asarray(values)
if coerce_strings and values.dtype.kind in ""SU"":
values = values.astype(object)
func = None
if skipna or (skipna is None and values.dtype.kind in ""cfO""):
nanname = ""nan"" + name
func = getattr(nanops, nanname)
else:
func = _dask_or_eager_func(name, dask_module=dask_module)
try:
return func(values, axis=axis, **kwargs)
except AttributeError:
if isinstance(values, dask_array_type):
try: # dask/dask#3133 dask sometimes needs dtype argument
# if func does not accept dtype, then raises TypeError
return func(values, axis=axis, dtype=values.dtype, **kwargs)
except (AttributeError, TypeError):
msg = ""%s is not yet implemented on dask arrays"" % name
else:
msg = (
""%s is not available with skipna=False with the ""
""installed version of numpy; upgrade to numpy 1.12 ""
""or newer to use skipna=True or skipna=None"" % name
)
> raise NotImplementedError(msg)
E NotImplementedError: median is not available with skipna=False with the installed version of numpy; upgrade to numpy 1.12 or newer to use skipna=True or skipna=None
xarray/core/duck_array_ops.py:321: NotImplementedError
_______________ TestDataset.test_aggregation[int-method_median] ________________
values = , axis = 0, skipna = None
kwargs = {}, func = .f at 0x7f392619b820>
msg = 'median is not available with skipna=False with the installed version of numpy; upgrade to numpy 1.12 or newer to use skipna=True or skipna=None'
def f(values, axis=None, skipna=None, **kwargs):
if kwargs.pop(""out"", None) is not None:
raise TypeError(f""`out` is not valid for {name}"")
values = asarray(values)
if coerce_strings and values.dtype.kind in ""SU"":
values = values.astype(object)
func = None
if skipna or (skipna is None and values.dtype.kind in ""cfO""):
nanname = ""nan"" + name
func = getattr(nanops, nanname)
else:
func = _dask_or_eager_func(name, dask_module=dask_module)
try:
> return func(values, axis=axis, **kwargs)
xarray/core/duck_array_ops.py:307:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
args = (,), kwargs = {'axis': 0}
dispatch_args = (,)
def f(*args, **kwargs):
if list_of_args:
dispatch_args = args[0]
else:
dispatch_args = args[array_args]
> if any(isinstance(a, dask_array.Array) for a in dispatch_args):
xarray/core/duck_array_ops.py:40:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
.0 =
> if any(isinstance(a, dask_array.Array) for a in dispatch_args):
E AttributeError: 'NoneType' object has no attribute 'Array'
xarray/core/duck_array_ops.py:40: AttributeError
During handling of the above exception, another exception occurred:
self =
func = method_median, dtype =
@pytest.mark.parametrize(
""func"",
(
pytest.param(
function(""all""),
marks=pytest.mark.xfail(reason=""not implemented by pint""),
),
pytest.param(
function(""any""),
marks=pytest.mark.xfail(reason=""not implemented by pint""),
),
function(""argmax""),
function(""argmin""),
function(""max""),
function(""min""),
function(""mean""),
pytest.param(
function(""median""),
marks=pytest.mark.xfail(
reason=""np.median does not work with dataset yet""
),
),
function(""sum""),
pytest.param(
function(""prod""),
marks=pytest.mark.xfail(reason=""not implemented by pint""),
),
function(""std""),
function(""var""),
function(""cumsum""),
pytest.param(
function(""cumprod""),
marks=pytest.mark.xfail(reason=""fails within xarray""),
),
pytest.param(
method(""all""), marks=pytest.mark.xfail(reason=""not implemented by pint"")
),
pytest.param(
method(""any""), marks=pytest.mark.xfail(reason=""not implemented by pint"")
),
method(""argmax""),
method(""argmin""),
method(""max""),
method(""min""),
method(""mean""),
method(""median""),
method(""sum""),
pytest.param(
method(""prod""),
marks=pytest.mark.xfail(reason=""not implemented by pint""),
),
method(""std""),
method(""var""),
method(""cumsum""),
pytest.param(
method(""cumprod""), marks=pytest.mark.xfail(reason=""fails within xarray"")
),
),
ids=repr,
)
def test_aggregation(self, func, dtype):
unit_a = (
unit_registry.Pa if func.name != ""cumprod"" else unit_registry.dimensionless
)
unit_b = (
unit_registry.kg / unit_registry.m ** 3
if func.name != ""cumprod""
else unit_registry.dimensionless
)
a = xr.DataArray(data=np.linspace(0, 1, 10).astype(dtype) * unit_a, dims=""x"")
b = xr.DataArray(data=np.linspace(-1, 0, 10).astype(dtype) * unit_b, dims=""x"")
x = xr.DataArray(data=np.arange(10).astype(dtype) * unit_registry.m, dims=""x"")
y = xr.DataArray(
data=np.arange(10, 20).astype(dtype) * unit_registry.s, dims=""x""
)
ds = xr.Dataset(data_vars={""a"": a, ""b"": b}, coords={""x"": x, ""y"": y})
> actual = func(ds)
xarray/tests/test_units.py:3733:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
xarray/tests/test_units.py:374: in __call__
return func(*all_args, **all_kwargs)
xarray/core/common.py:83: in wrapped_func
return self.reduce(
xarray/core/dataset.py:4230: in reduce
variables[name] = var.reduce(
xarray/core/variable.py:1535: in reduce
data = func(input_data, axis=axis, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
values = , axis = 0, skipna = None
kwargs = {}, func = .f at 0x7f392619b820>
msg = 'median is not available with skipna=False with the installed version of numpy; upgrade to numpy 1.12 or newer to use skipna=True or skipna=None'
def f(values, axis=None, skipna=None, **kwargs):
if kwargs.pop(""out"", None) is not None:
raise TypeError(f""`out` is not valid for {name}"")
values = asarray(values)
if coerce_strings and values.dtype.kind in ""SU"":
values = values.astype(object)
func = None
if skipna or (skipna is None and values.dtype.kind in ""cfO""):
nanname = ""nan"" + name
func = getattr(nanops, nanname)
else:
func = _dask_or_eager_func(name, dask_module=dask_module)
try:
return func(values, axis=axis, **kwargs)
except AttributeError:
if isinstance(values, dask_array_type):
try: # dask/dask#3133 dask sometimes needs dtype argument
# if func does not accept dtype, then raises TypeError
return func(values, axis=axis, dtype=values.dtype, **kwargs)
except (AttributeError, TypeError):
msg = ""%s is not yet implemented on dask arrays"" % name
else:
msg = (
""%s is not available with skipna=False with the ""
""installed version of numpy; upgrade to numpy 1.12 ""
""or newer to use skipna=True or skipna=None"" % name
)
> raise NotImplementedError(msg)
E NotImplementedError: median is not available with skipna=False with the installed version of numpy; upgrade to numpy 1.12 or newer to use skipna=True or skipna=None
xarray/core/duck_array_ops.py:321: NotImplementedError
```
However I’m not much knowledgeable on all this, so I’ll defer to you for finding the root cause.","{""url"": ""https://api.github.com/repos/pydata/xarray/issues/3779/reactions"", ""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,completed,13221727,issue
568320746,MDU6SXNzdWU1NjgzMjA3NDY=,3783,New TestVariable.test_pad failure with pint 0.11,490531,closed,0,,,1,2020-02-20T13:59:35Z,2020-02-23T19:13:08Z,2020-02-23T19:13:08Z,CONTRIBUTOR,,,,"Since y-day, `pint` 0.11 was released and it results in a new test failure (in addition to the ones in #3778 and #3779):
```
____________________________ TestVariable.test_pad _____________________________
self =
def test_pad(self):
data = np.arange(4 * 3 * 2).reshape(4, 3, 2)
v = self.cls([""x"", ""y"", ""z""], data)
xr_args = [{""x"": (2, 1)}, {""y"": (0, 3)}, {""x"": (3, 1), ""z"": (2, 0)}]
np_args = [
((2, 1), (0, 0), (0, 0)),
((0, 0), (0, 3), (0, 0)),
((3, 1), (0, 0), (2, 0)),
]
for xr_arg, np_arg in zip(xr_args, np_args):
actual = v.pad_with_fill_value(**xr_arg)
expected = np.pad(
np.array(v.data.astype(float)),
np_arg,
mode=""constant"",
constant_values=np.nan,
)
assert_array_equal(actual, expected)
assert isinstance(actual._data, type(v._data))
# for the boolean array, we pad False
data = np.full_like(data, False, dtype=bool).reshape(4, 3, 2)
v = self.cls([""x"", ""y"", ""z""], data)
for xr_arg, np_arg in zip(xr_args, np_args):
> actual = v.pad_with_fill_value(fill_value=False, **xr_arg)
xarray/tests/test_variable.py:813:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
xarray/core/variable.py:1210: in pad_with_fill_value
array = np.pad(
<__array_function__ internals>:5: in pad
???
/usr/lib/python3.8/site-packages/pint/quantity.py:1543: in __array_function__
return numpy_wrap(""function"", func, args, kwargs, types)
/usr/lib/python3.8/site-packages/pint/numpy_func.py:894: in numpy_wrap
return handled[name](*args, **kwargs)
/usr/lib/python3.8/site-packages/pint/numpy_func.py:671: in _pad
kwargs[""constant_values""] = _recursive_convert(kwargs[""constant_values""], units)
/usr/lib/python3.8/site-packages/pint/numpy_func.py:659: in _recursive_convert
arg = unit._REGISTRY.Quantity(arg, unit)
/usr/lib/python3.8/site-packages/pint/quantity.py:200: in __new__
inst._magnitude = _to_magnitude(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
value = False, force_ndarray = True, force_ndarray_like = False
def _to_magnitude(value, force_ndarray=False, force_ndarray_like=False):
if isinstance(value, (dict, bool)) or value is None:
> raise TypeError(""Invalid magnitude for Quantity: {0!r}"".format(value))
E TypeError: Invalid magnitude for Quantity: False
/usr/lib/python3.8/site-packages/pint/compat.py:49: TypeError
```","{""url"": ""https://api.github.com/repos/pydata/xarray/issues/3783/reactions"", ""total_count"": 1, ""+1"": 1, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,completed,13221727,issue
567035590,MDU6SXNzdWU1NjcwMzU1OTA=,3777,test_open_mfdataset_list_attr is ran even when dask is not available,490531,closed,0,,,5,2020-02-18T17:08:23Z,2020-02-19T18:24:43Z,2020-02-19T18:24:43Z,CONTRIBUTOR,,,,"I’m currently packaging xarray (0.15.0) for ArchLinux, and I’m running the test suite with all our currently available packages (in addition to `python-numpy`, `python-pandas` and `python-pytest` of course):
python-netcdf4
python-scipy
python-cftime
python-bottleneck
python-matplotlib
python-seaborn
python-pint
I was greatly impressed at the automatic selection of tests depending on what is available on the system. :) I’m only seeing 10 tests failures (`10 failed, 8057 passed, 1384 skipped, 518 xfailed, 277 xpassed, 5268 warnings`), and amongst them one is happening because a test requiring dask is run even without it being installed: `test_open_mfdataset_list_attr`.
Corresponding test output:
```
________________________ test_open_mfdataset_list_attr _________________________
@requires_netCDF4
def test_open_mfdataset_list_attr():
""""""
Case when an attribute of type list differs across the multiple files
""""""
from netCDF4 import Dataset
with create_tmp_files(2) as nfiles:
for i in range(2):
f = Dataset(nfiles[i], ""w"")
f.createDimension(""x"", 3)
vlvar = f.createVariable(""test_var"", np.int32, (""x""))
# here create an attribute as a list
vlvar.test_attr = [f""string a {i}"", f""string b {i}""]
vlvar[:] = np.arange(3)
f.close()
ds1 = open_dataset(nfiles[0])
ds2 = open_dataset(nfiles[1])
original = xr.concat([ds1, ds2], dim=""x"")
> with xr.open_mfdataset(
[nfiles[0], nfiles[1]], combine=""nested"", concat_dim=""x""
) as actual:
xarray/tests/test_backends.py:2561:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
xarray/backends/api.py:908: in open_mfdataset
datasets = [open_(p, **open_kwargs) for p in paths]
xarray/backends/api.py:908: in
datasets = [open_(p, **open_kwargs) for p in paths]
xarray/backends/api.py:538: in open_dataset
ds = maybe_decode_store(store)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
store =
lock = False
def maybe_decode_store(store, lock=False):
ds = conventions.decode_cf(
store,
mask_and_scale=mask_and_scale,
decode_times=decode_times,
concat_characters=concat_characters,
decode_coords=decode_coords,
drop_variables=drop_variables,
use_cftime=use_cftime,
)
_protect_dataset_variables_inplace(ds, cache)
if chunks is not None:
> from dask.base import tokenize
E ModuleNotFoundError: No module named 'dask'
xarray/backends/api.py:459: ModuleNotFoundError
```
I think this test should thus not be selected if dask is not installed. ;)","{""url"": ""https://api.github.com/repos/pydata/xarray/issues/3777/reactions"", ""total_count"": 1, ""+1"": 1, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,completed,13221727,issue
567510745,MDExOlB1bGxSZXF1ZXN0Mzc3MTIxMjM4,3780,Avoid running test_open_mfdataset_list_attr without dask,490531,closed,0,,,6,2020-02-19T11:41:25Z,2020-02-19T18:24:43Z,2020-02-19T18:24:43Z,CONTRIBUTOR,,0,pydata/xarray/pulls/3780,"Fixes GH-3777.
- [x] Closes #3777","{""url"": ""https://api.github.com/repos/pydata/xarray/issues/3780/reactions"", ""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,,13221727,pull