issues: 1851388168
This data as json
id | node_id | number | title | user | state | locked | assignee | milestone | comments | created_at | updated_at | closed_at | author_association | active_lock_reason | draft | pull_request | body | reactions | performed_via_github_app | state_reason | repo | type |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1851388168 | I_kwDOAMm_X85uWfEI | 8072 | RuntimeError: NetCDF: Invalid dimension ID or name | 8382834 | closed | 0 | 7 | 2023-08-15T12:50:43Z | 2023-08-20T03:41:24Z | 2023-08-20T03:41:23Z | CONTRIBUTOR | What happened?I got a long error message and the What did you expect to happen?No response Minimal Complete Verifiable ExampleSee the jupyter notebook: https://github.com/jerabaul29/public_bug_reports/blob/main/xarray/2023_08_15/illustrate_issue_xr.ipynb . Copying the commands (note that it should be run as a notebook): ```Python import os import xarray as xr xr.show_versions() get the file!wget https://arcticdata.io/metacat/d1/mn/v2/object/urn%3Auuid%3Ad5f179a3-76a8-4e4f-b45f-8e8d85960ba6 rename the file!mv urn\:uuid\:d5f179a3-76a8-4e4f-b45f-8e8d85960ba6 bar_BSO3_a_rcm7_2008_09.nc the file is not xarray-friendly, modify and rename!ncrename -v six_con,six_con_dim bar_BSO3_a_rcm7_2008_09.nc bar_BSO3_a_rcm7_2008_09_xr.nc xr.open_dataset("./bar_BSO3_a_rcm7_2008_09_xr.nc") ``` MVCE confirmation
Relevant log output```PythonKeyError Traceback (most recent call last) File ~/miniconda3/envs/pytmd/lib/python3.11/site-packages/xarray/backends/file_manager.py:211, in CachingFileManager._acquire_with_cache_info(self, needs_lock) 210 try: --> 211 file = self._cache[self._key] 212 except KeyError: File ~/miniconda3/envs/pytmd/lib/python3.11/site-packages/xarray/backends/lru_cache.py:56, in LRUCache.getitem(self, key) 55 with self._lock: ---> 56 value = self._cache[key] 57 self._cache.move_to_end(key) KeyError: [<class 'netCDF4._netCDF4.Dataset'>, ('/home/jrmet/Desktop/Git/public_bug_reports/xarray/2023_08_15/bar_BSO3_a_rcm7_2008_09_xr.nc',), 'r', (('clobber', True), ('diskless', False), ('format', 'NETCDF4'), ('persist', False)), 'b771184f-a474-4093-b5f1-784542ef9ce6'] During handling of the above exception, another exception occurred: RuntimeError Traceback (most recent call last) Cell In[6], line 1 ----> 1 xr.open_dataset("./bar_BSO3_a_rcm7_2008_09_xr.nc") File ~/miniconda3/envs/pytmd/lib/python3.11/site-packages/xarray/backends/api.py:570, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, inline_array, chunked_array_type, from_array_kwargs, backend_kwargs, kwargs) 558 decoders = _resolve_decoders_kwargs( 559 decode_cf, 560 open_backend_dataset_parameters=backend.open_dataset_parameters, (...) 566 decode_coords=decode_coords, 567 ) 569 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None) --> 570 backend_ds = backend.open_dataset( 571 filename_or_obj, 572 drop_variables=drop_variables, 573 decoders, 574 kwargs, 575 ) 576 ds = _dataset_from_backend_dataset( 577 backend_ds, 578 filename_or_obj, (...) 588 kwargs, 589 ) 590 return ds File ~/miniconda3/envs/pytmd/lib/python3.11/site-packages/xarray/backends/netCDF4_.py:602, in NetCDF4BackendEntrypoint.open_dataset(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, format, clobber, diskless, persist, lock, autoclose) 581 def open_dataset( # type: ignore[override] # allow LSP violation, not supporting **kwargs 582 self, 583 filename_or_obj: str | os.PathLike[Any] | BufferedIOBase | AbstractDataStore, (...) 599 autoclose=False, 600 ) -> Dataset: 601 filename_or_obj = _normalize_path(filename_or_obj) --> 602 store = NetCDF4DataStore.open( 603 filename_or_obj, 604 mode=mode, 605 format=format, 606 group=group, 607 clobber=clobber, 608 diskless=diskless, 609 persist=persist, 610 lock=lock, 611 autoclose=autoclose, 612 ) 614 store_entrypoint = StoreBackendEntrypoint() 615 with close_on_error(store): File ~/miniconda3/envs/pytmd/lib/python3.11/site-packages/xarray/backends/netCDF4_.py:400, in NetCDF4DataStore.open(cls, filename, mode, format, group, clobber, diskless, persist, lock, lock_maker, autoclose) 394 kwargs = dict( 395 clobber=clobber, diskless=diskless, persist=persist, format=format 396 ) 397 manager = CachingFileManager( 398 netCDF4.Dataset, filename, mode=mode, kwargs=kwargs 399 ) --> 400 return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose) File ~/miniconda3/envs/pytmd/lib/python3.11/site-packages/xarray/backends/netCDF4_.py:347, in NetCDF4DataStore.init(self, manager, group, mode, lock, autoclose) 345 self._group = group 346 self._mode = mode --> 347 self.format = self.ds.data_model 348 self._filename = self.ds.filepath() 349 self.is_remote = is_remote_uri(self._filename) File ~/miniconda3/envs/pytmd/lib/python3.11/site-packages/xarray/backends/netCDF4_.py:409, in NetCDF4DataStore.ds(self) 407 @property 408 def ds(self): --> 409 return self._acquire() File ~/miniconda3/envs/pytmd/lib/python3.11/site-packages/xarray/backends/netCDF4_.py:403, in NetCDF4DataStore._acquire(self, needs_lock) 402 def _acquire(self, needs_lock=True): --> 403 with self._manager.acquire_context(needs_lock) as root: 404 ds = _nc4_require_group(root, self._group, self._mode) 405 return ds File ~/miniconda3/envs/pytmd/lib/python3.11/contextlib.py:137, in _GeneratorContextManager.enter(self) 135 del self.args, self.kwds, self.func 136 try: --> 137 return next(self.gen) 138 except StopIteration: 139 raise RuntimeError("generator didn't yield") from None File ~/miniconda3/envs/pytmd/lib/python3.11/site-packages/xarray/backends/file_manager.py:199, in CachingFileManager.acquire_context(self, needs_lock) 196 @contextlib.contextmanager 197 def acquire_context(self, needs_lock=True): 198 """Context manager for acquiring a file.""" --> 199 file, cached = self._acquire_with_cache_info(needs_lock) 200 try: 201 yield file File ~/miniconda3/envs/pytmd/lib/python3.11/site-packages/xarray/backends/file_manager.py:217, in CachingFileManager._acquire_with_cache_info(self, needs_lock) 215 kwargs = kwargs.copy() 216 kwargs["mode"] = self._mode --> 217 file = self._opener(self._args, *kwargs) 218 if self._mode == "w": 219 # ensure file doesn't get overridden when opened again 220 self._mode = "a" File src/netCDF4/_netCDF4.pyx:2485, in netCDF4._netCDF4.Dataset.init() File src/netCDF4/_netCDF4.pyx:1863, in netCDF4._netCDF4._get_dims() File src/netCDF4/_netCDF4.pyx:2029, in netCDF4._netCDF4._ensure_nc_success() RuntimeError: NetCDF: Invalid dimension ID or name
Anything else we need to know?No response Environment
INSTALLED VERSIONS
------------------
commit: None
python: 3.11.4 | packaged by conda-forge | (main, Jun 10 2023, 18:08:17) [GCC 12.2.0]
python-bits: 64
OS: Linux
OS-release: 5.15.0-78-generic
machine: x86_64
processor: x86_64
byteorder: little
LC_ALL: None
LANG: en_US.UTF-8
LOCALE: ('en_US', 'UTF-8')
libhdf5: 1.14.1
libnetcdf: 4.9.2
xarray: 2023.7.0
pandas: 2.0.3
numpy: 1.25.2
scipy: 1.11.1
netCDF4: 1.6.4
pydap: None
h5netcdf: None
h5py: 3.9.0
Nio: None
zarr: None
cftime: 1.6.2
nc_time_axis: None
PseudoNetCDF: None
iris: None
bottleneck: None
dask: None
distributed: None
matplotlib: 3.7.2
cartopy: 0.22.0
seaborn: None
numbagg: None
fsspec: None
cupy: None
pint: None
sparse: None
flox: None
numpy_groupies: None
setuptools: 68.0.0
pip: 23.2.1
conda: None
pytest: None
mypy: None
IPython: 8.14.0
sphinx: None
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/8072/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
completed | 13221727 | issue |