html_url,issue_url,id,node_id,user,created_at,updated_at,author_association,body,reactions,performed_via_github_app,issue https://github.com/pydata/xarray/issues/5375#issuecomment-848726039,https://api.github.com/repos/pydata/xarray/issues/5375,848726039,MDEyOklzc3VlQ29tbWVudDg0ODcyNjAzOQ==,199050,2021-05-26T12:26:10Z,2021-05-26T12:26:10Z,CONTRIBUTOR,"> Well, it seems more like a workaround than a solution, but be it. It was proposed by @dcherian in https://github.com/pydata/xarray/issues/5341#issuecomment-844105683","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,901856178 https://github.com/pydata/xarray/pull/5364#issuecomment-846608474,https://api.github.com/repos/pydata/xarray/issues/5364,846608474,MDEyOklzc3VlQ29tbWVudDg0NjYwODQ3NA==,199050,2021-05-23T18:56:10Z,2021-05-23T18:56:10Z,CONTRIBUTOR,"> Thanks @sebix > > Do we know where the difference is coming from? If I'm reading the test correctly, the floats aren't related to the offset — which is on discrete values — and so this may be a broader problem. Is that right? Sorry, I have no more insight, I just know that the test is failing on that particular architecture (x86_64, ppc64, ppc64le, aarch64 are fine).","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,898971620 https://github.com/pydata/xarray/issues/5341#issuecomment-844257919,https://api.github.com/repos/pydata/xarray/issues/5341,844257919,MDEyOklzc3VlQ29tbWVudDg0NDI1NzkxOQ==,199050,2021-05-19T16:16:59Z,2021-05-19T16:16:59Z,CONTRIBUTOR,"Yes, that works","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,895163261 https://github.com/pydata/xarray/issues/2050#issuecomment-381395284,https://api.github.com/repos/pydata/xarray/issues/2050,381395284,MDEyOklzc3VlQ29tbWVudDM4MTM5NTI4NA==,199050,2018-04-15T10:17:20Z,2018-04-15T10:17:20Z,CONTRIBUTOR,"On 2018-04-14 18:48, Stephan Hoyer wrote: > The test is marked as xfail now, so if you merge in master tests should > pass. No, commit 9b76f219ec314dcb0c9a310c097a34f5c751fdd6 sets the xfail for test_cross_engine_read_write_netcdf3, but not for |GenericNetCDFDataTest.test_append_overwrite_values GenericNetCDFDataTest.test_append_write ||GenericNetCDFDataTestAutocloseTrue.test_append_overwrite_values| || ","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,313040371 https://github.com/pydata/xarray/issues/2050#issuecomment-381326874,https://api.github.com/repos/pydata/xarray/issues/2050,381326874,MDEyOklzc3VlQ29tbWVudDM4MTMyNjg3NA==,199050,2018-04-14T12:48:35Z,2018-04-14T12:48:35Z,CONTRIBUTOR,"Same problem here. Full log: ``` [ 69s] =================================== FAILURES =================================== [ 69s] ______________ GenericNetCDFDataTest.test_append_overwrite_values ______________ [ 69s] [ 69s] self = [ 69s] [ 69s] def test_append_overwrite_values(self): [ 69s] # regression for GH1215 [ 69s] data = create_test_data() [ 69s] with create_tmp_file(allow_cleanup_failure=False) as tmp_file: [ 69s] self.save(data, tmp_file, mode='w') [ 69s] data['var2'][:] = -999 [ 69s] data['var9'] = data['var2'] * 3 [ 69s] > self.save(data[['var2', 'var9']], tmp_file, mode='a') [ 69s] [ 69s] xarray/tests/test_backends.py:796: [ 69s] _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ [ 69s] xarray/tests/test_backends.py:162: in save [ 69s] **kwargs) [ 69s] xarray/core/dataset.py:1137: in to_netcdf [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/backends/api.py:657: in to_netcdf [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/core/dataset.py:1074: in dump_to_store [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/backends/common.py:363: in store [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/backends/common.py:402: in set_variables [ 69s] self.writer.add(source, target) [ 69s] xarray/backends/common.py:265: in add [ 69s] target[...] = source [ 69s] xarray/backends/scipy_.py:61: in __setitem__ [ 69s] data[key] = value [ 69s] _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ [ 69s] [ 69s] self = [ 69s] index = Ellipsis [ 69s] data = array([[-999., -999., -999., -999., -999., -999., -999., -999., -999.], [ 69s] ...999.], [ 69s] [-999., -999., -999., -999., -999., -999., -999., -999., -999.]]) [ 69s] [ 69s] def __setitem__(self, index, data): [ 69s] if self.maskandscale: [ 69s] missing_value = ( [ 69s] self._get_missing_value() or [ 69s] getattr(data, 'fill_value', 999999)) [ 69s] self._attributes.setdefault('missing_value', missing_value) [ 69s] self._attributes.setdefault('_FillValue', missing_value) [ 69s] data = ((data - self._attributes.get('add_offset', 0.0)) / [ 69s] self._attributes.get('scale_factor', 1.0)) [ 69s] data = np.ma.asarray(data).filled(missing_value) [ 69s] if self._typecode not in 'fd' and data.dtype.kind == 'f': [ 69s] data = np.round(data) [ 69s] [ 69s] # Expand data for record vars? [ 69s] if self.isrec: [ 69s] if isinstance(index, tuple): [ 69s] rec_index = index[0] [ 69s] else: [ 69s] rec_index = index [ 69s] if isinstance(rec_index, slice): [ 69s] recs = (rec_index.start or 0) + len(data) [ 69s] else: [ 69s] recs = rec_index + 1 [ 69s] if recs > len(self.data): [ 69s] shape = (recs,) + self._shape[1:] [ 69s] # Resize in-place does not always work since [ 69s] # the array might not be single-segment [ 69s] try: [ 69s] self.data.resize(shape) [ 69s] except ValueError: [ 69s] self.__dict__['data'] = np.resize(self.data, shape).astype(self.data.dtype) [ 69s] > self.data[index] = data [ 69s] E ValueError: assignment destination is read-only [ 69s] [ 69s] /usr/lib64/python2.7/site-packages/scipy/io/netcdf.py:996: ValueError [ 69s] ___________________ GenericNetCDFDataTest.test_append_write ____________________ [ 69s] [ 69s] self = [ 69s] [ 69s] def test_append_write(self): [ 69s] # regression for GH1215 [ 69s] data = create_test_data() [ 69s] > with self.roundtrip_append(data) as actual: [ 69s] [ 69s] xarray/tests/test_backends.py:786: [ 69s] _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ [ 69s] /usr/lib64/python2.7/contextlib.py:17: in __enter__ [ 69s] return self.gen.next() [ 69s] xarray/tests/test_backends.py:155: in roundtrip_append [ 69s] self.save(data[[key]], path, mode=mode, **save_kwargs) [ 69s] xarray/tests/test_backends.py:162: in save [ 69s] **kwargs) [ 69s] xarray/core/dataset.py:1137: in to_netcdf [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/backends/api.py:657: in to_netcdf [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/core/dataset.py:1074: in dump_to_store [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/backends/common.py:363: in store [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/backends/common.py:402: in set_variables [ 69s] self.writer.add(source, target) [ 69s] xarray/backends/common.py:265: in add [ 69s] target[...] = source [ 69s] xarray/backends/scipy_.py:61: in __setitem__ [ 69s] data[key] = value [ 69s] _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ [ 69s] [ 69s] self = [ 69s] index = Ellipsis, data = array([0. , 0.5, 1. , 1.5, 2. , 2.5, 3. , 3.5, 4. ]) [ 69s] [ 69s] def __setitem__(self, index, data): [ 69s] if self.maskandscale: [ 69s] missing_value = ( [ 69s] self._get_missing_value() or [ 69s] getattr(data, 'fill_value', 999999)) [ 69s] self._attributes.setdefault('missing_value', missing_value) [ 69s] self._attributes.setdefault('_FillValue', missing_value) [ 69s] data = ((data - self._attributes.get('add_offset', 0.0)) / [ 69s] self._attributes.get('scale_factor', 1.0)) [ 69s] data = np.ma.asarray(data).filled(missing_value) [ 69s] if self._typecode not in 'fd' and data.dtype.kind == 'f': [ 69s] data = np.round(data) [ 69s] [ 69s] # Expand data for record vars? [ 69s] if self.isrec: [ 69s] if isinstance(index, tuple): [ 69s] rec_index = index[0] [ 69s] else: [ 69s] rec_index = index [ 69s] if isinstance(rec_index, slice): [ 69s] recs = (rec_index.start or 0) + len(data) [ 69s] else: [ 69s] recs = rec_index + 1 [ 69s] if recs > len(self.data): [ 69s] shape = (recs,) + self._shape[1:] [ 69s] # Resize in-place does not always work since [ 69s] # the array might not be single-segment [ 69s] try: [ 69s] self.data.resize(shape) [ 69s] except ValueError: [ 69s] self.__dict__['data'] = np.resize(self.data, shape).astype(self.data.dtype) [ 69s] > self.data[index] = data [ 69s] E ValueError: assignment destination is read-only [ 69s] [ 69s] /usr/lib64/python2.7/site-packages/scipy/io/netcdf.py:996: ValueError [ 69s] _______ GenericNetCDFDataTestAutocloseTrue.test_append_overwrite_values ________ [ 69s] [ 69s] self = [ 69s] [ 69s] def test_append_overwrite_values(self): [ 69s] # regression for GH1215 [ 69s] data = create_test_data() [ 69s] with create_tmp_file(allow_cleanup_failure=False) as tmp_file: [ 69s] self.save(data, tmp_file, mode='w') [ 69s] data['var2'][:] = -999 [ 69s] data['var9'] = data['var2'] * 3 [ 69s] > self.save(data[['var2', 'var9']], tmp_file, mode='a') [ 69s] [ 69s] xarray/tests/test_backends.py:796: [ 69s] _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ [ 69s] xarray/tests/test_backends.py:162: in save [ 69s] **kwargs) [ 69s] xarray/core/dataset.py:1137: in to_netcdf [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/backends/api.py:657: in to_netcdf [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/core/dataset.py:1074: in dump_to_store [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/backends/common.py:363: in store [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/backends/common.py:402: in set_variables [ 69s] self.writer.add(source, target) [ 69s] xarray/backends/common.py:265: in add [ 69s] target[...] = source [ 69s] xarray/backends/scipy_.py:61: in __setitem__ [ 69s] data[key] = value [ 69s] _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ [ 69s] [ 69s] self = [ 69s] index = Ellipsis [ 69s] data = array([[-999., -999., -999., -999., -999., -999., -999., -999., -999.], [ 69s] ...999.], [ 69s] [-999., -999., -999., -999., -999., -999., -999., -999., -999.]]) [ 69s] [ 69s] def __setitem__(self, index, data): [ 69s] if self.maskandscale: [ 69s] missing_value = ( [ 69s] self._get_missing_value() or [ 69s] getattr(data, 'fill_value', 9[ 64.291734] serial8250: too much work for irq4 [ 69s] 99999)) [ 69s] self._attributes.setdefault('missing_value', missing_value) [ 69s] self._attributes.setdefault('_FillValue', missing_value) [ 69s] data = ((data - self._attributes.get('add_offset', 0.0)) / [ 69s] self._attributes.get('scale_factor', 1.0)) [ 69s] data = np.ma.asarray(data).filled(missing_value) [ 69s] if self._typecode not in 'fd' and data.dtype.kind == 'f': [ 69s] data = np.round(data) [ 69s] [ 69s] # Expand data for record vars? [ 69s] if self.isrec: [ 69s] if isinstance(index, tuple): [ 69s] rec_index = index[0] [ 69s] else: [ 69s] rec_index = index [ 69s] if isinstance(rec_index, slice): [ 69s] recs = (rec_index.start or 0) + len(data) [ 69s] else: [ 69s] recs = rec_index + 1 [ 69s] if recs > len(self.data): [ 69s] shape = (recs,) + self._shape[1:] [ 69s] # Resize in-place does not always work since [ 69s] # the array might not be single-segment [ 69s] try: [ 69s] self.data.resize(shape) [ 69s] except ValueError: [ 69s] self.__dict__['data'] = np.resize(self.data, shape).astype(self.data.dtype) [ 69s] > self.data[index] = data [ 69s] E ValueError: assignment destination is read-only [ 69s] [ 69s] /usr/lib64/python2.7/site-packages/scipy/io/netcdf.py:996: ValueError [ 69s] _____________ GenericNetCDFDataTestAutocloseTrue.test_append_write _____________ [ 69s] [ 69s] self = [ 69s] [ 69s] def test_append_write(self): [ 69s] # regression for GH1215 [ 69s] data = create_test_data() [ 69s] > with self.roundtrip_append(data) as actual: [ 69s] [ 69s] xarray/tests/test_backends.py:786: [ 69s] _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ [ 69s] /usr/lib64/python2.7/contextlib.py:17: in __enter__ [ 69s] return self.gen.next() [ 69s] xarray/tests/test_backends.py:155: in roundtrip_append [ 69s] self.save(data[[key]], path, mode=mode, **save_kwargs) [ 69s] xarray/tests/test_backends.py:162: in save [ 69s] **kwargs) [ 69s] xarray/core/dataset.py:1137: in to_netcdf [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/backends/api.py:657: in to_netcdf [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/core/dataset.py:1074: in dump_to_store [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/backends/common.py:363: in store [ 69s] unlimited_dims=unlimited_dims) [ 69s] xarray/backends/common.py:402: in set_variables [ 69s] self.writer.add(source, target) [ 69s] xarray/backends/common.py:265: in add [ 69s] target[...] = source [ 69s] xarray/backends/scipy_.py:61: in __setitem__ [ 69s] data[key] = value [ 69s] _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ [ 69s] [ 69s] self = [ 69s] index = Ellipsis, data = array([0. , 0.5, 1. , 1.5, 2. , 2.5, 3. , 3.5, 4. ]) [ 69s] [ 69s] def __setitem__(self, index, data): [ 69s] if self.maskandscale: [ 69s] missing_value = ( [ 69s] self._get_missing_value() or [ 69s] getattr(data, 'fill_value', 999999)) [ 69s] self._attributes.setdefault('missing_value', missing_value) [ 69s] self._attributes.setdefault('_FillValue', missing_value) [ 69s] data = ((data - self._attributes.get('add_offset', 0.0)) / [ 69s] self._attributes.get('scale_factor', 1.0)) [ 69s] data = np.ma.asarray(data).filled(missing_value) [ 69s] if self._typecode not in 'fd' and data.dtype.kind == 'f': [ 69s] data = np.round(data) [ 69s] [ 69s] # Expand data for record vars? [ 69s] if self.isrec: [ 69s] if isinstance(index, tuple): [ 69s] rec_index = index[0] [ 69s] else: [ 69s] rec_index = index [ 69s] if isinstance(rec_index, slice): [ 69s] recs = (rec_index.start or 0) + len(data) [ 69s] else: [ 69s] recs = rec_index + 1 [ 69s] if recs > len(self.data): [ 69s] shape = (recs,) + self._shape[1:] [ 69s] # Resize in-place does not always work since [ 69s] # the array might not be single-segment [ 69s] try: [ 69s] self.data.resize(shape) [ 69s] except ValueError: [ 69s] self.__dict__['data'] = np.resize(self.data, shape).astype(self.data.dtype) [ 69s] > self.data[index] = data [ 69s] E ValueError: assignment destination is read-only [ 69s] [ 69s] /usr/lib64/python2.7/site-packages/scipy/io/netcdf.py:996: ValueError [ 69s] =============================== warnings summary =============================== [ 69s] xarray/tests/test_backends.py::ScipyInMemoryDataTest::test_default_fill_value [ 69s] /home/abuild/rpmbuild/BUILD/xarray-0.10.3/xarray/conventions.py:748: SerializationWarning: saving variable x with floating point data as an integer dtype without any _FillValue to use for NaNs [ 69s] for k, v in iteritems(variables)) [ 69s] [ 69s] xarray/tests/test_backends.py::ScipyInMemoryDataTest::test_pickle [ 69s] /usr/lib64/python2.7/site-packages/scipy/io/netcdf.py:299: RuntimeWarning: Cannot close a netcdf_file opened with mmap=True, when netcdf_variables or arrays referring to its data still exist. All data arrays obtained from such files refer directly to data on disk, and must be copied before the file can be cleanly closed. (See netcdf_file docstring for more information on mmap.) [ 69s] ), category=RuntimeWarning) [ 69s] [ 69s] xarray/tests/test_dataarray.py::TestDataArray::test_reindex_regressions [ 69s] /home/abuild/rpmbuild/BUILD/xarray-0.10.3/xarray/core/dataarray.py:882: FutureWarning: Indexer has dimensions ('time2',) that are different from that to be indexed along time. This will behave differently in the future. [ 69s] method=method, tolerance=tolerance, copy=copy, **indexers) [ 69s] [ 69s] xarray/tests/test_missing.py::test_scipy_methods_function [ 69s] /usr/lib64/python2.7/site-packages/scipy/interpolate/polyint.py:511: RuntimeWarning: overflow encountered in multiply [ 69s] self.wi[:j] *= (self.xi[j]-self.xi[:j]) [ 69s] /usr/lib64/python2.7/site-packages/scipy/interpolate/polyint.py:512: RuntimeWarning: overflow encountered in reduce [ 69s] self.wi[j] = np.multiply.reduce(self.xi[:j]-self.xi[j]) [ 69s] /usr/lib64/python2.7/site-packages/scipy/interpolate/polyint.py:609: RuntimeWarning: invalid value encountered in true_divide [ 69s] p = np.dot(c,self.yi)/np.sum(c,axis=-1)[...,np.newaxis] [ 69s] /usr/lib64/python2.7/site-packages/scipy/interpolate/polyint.py:324: RuntimeWarning: overflow encountered in multiply [ 69s] pi = w*pi [ 69s] /usr/lib64/python2.7/site-packages/scipy/interpolate/polyint.py:325: RuntimeWarning: invalid value encountered in multiply [ 69s] p += pi[:,np.newaxis] * self.c[k] [ 69s] /usr/lib64/python2.7/site-packages/scipy/interpolate/polyint.py:325: RuntimeWarning: invalid value encountered in add [ 69s] p += pi[:,np.newaxis] * self.c[k] [ 69s] [ 69s] xarray/tests/test_variable.py::TestVariable::test_index_0d_not_a_time [ 69s] /home/abuild/rpmbuild/BUILD/xarray-0.10.3/xarray/core/duck_array_ops.py:137: FutureWarning: In the future, 'NAT == x' and 'x == NAT' will always be False. [ 69s] flag_array = (arr1 == arr2) [ 69s] /home/abuild/rpmbuild/BUILD/xarray-0.10.3/xarray/tests/test_variable.py:141: FutureWarning: In the future, 'NAT == x' and 'x == NAT' will always be False. [ 69s] assert variable.values[0] == expected_value0 [ 69s] /home/abuild/rpmbuild/BUILD/xarray-0.10.3/xarray/tests/test_variable.py:142: FutureWarning: In the future, 'NAT == x' and 'x == NAT' will always be False. [ 69s] assert variable[0].values == expected_value0 [ 69s] [ 69s] xarray/tests/test_variable.py::TestVariableWithDask::test_index_0d_not_a_time [ 69s] /usr/lib/python2.7/site-packages/dask/local.py:271: FutureWarning: In the future, 'NAT == x' and 'x == NAT' will always be False. [ 69s] return func(*args2) [ 69s] [ 69s] -- Docs: http://doc.pytest.org/en/latest/warnings.html [ 69s] 4 failed, 2621 passed, 1443 skipped, 19 xfailed, 4 xpassed, 13 warnings in 44.85 seconds ``` With python-netCDF4 == 1.3.1, scipy == 1.0.0, netcdf == 4.4.1 Probably a conditional skip is a (short term) solution?","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",,313040371