home / github / issue_comments

Menu
  • Search all tables
  • GraphQL API

issue_comments: 1059052257

This data as json

html_url issue_url id node_id user created_at updated_at author_association body reactions performed_via_github_app issue
https://github.com/pydata/xarray/issues/6069#issuecomment-1059052257 https://api.github.com/repos/pydata/xarray/issues/6069 1059052257 IC_kwDOAMm_X84_H9rh 9576982 2022-03-04T10:50:09Z 2022-03-04T10:50:09Z NONE

OK that's not exactly the same error message, I could not even start the appending. But that's basically one example that could be tested. A model would want to compute each of these variables step by step and variable by variable and save them for each single iteration. There is no need of concurrent writing as most of the resources are focused on the modelling.

python import xarray as xr from rasterio.enums import Resampling import numpy as np ds = xr.tutorial.open_dataset('air_temperature').isel(time=0) ds = ds.rio.write_crs('EPSG:4326') dst = ds.rio.reproject('EPSG:3857', shape=(250, 250), resampling=Resampling.bilinear, nodata=np.nan) dst.to_zarr('test.zarr') Returns


ValueError Traceback (most recent call last) /opt/conda/lib/python3.7/site-packages/zarr/util.py in normalize_fill_value(fill_value, dtype) 277 else: --> 278 fill_value = np.array(fill_value, dtype=dtype)[()] 279

ValueError: cannot convert float NaN to integer

During handling of the above exception, another exception occurred:

ValueError Traceback (most recent call last) /tmp/ipykernel_2604/3259577033.py in <module> ----> 1 dst.to_zarr('test.zarr')

/opt/conda/lib/python3.7/site-packages/xarray/core/dataset.py in to_zarr(self, store, chunk_store, mode, synchronizer, group, encoding, compute, consolidated, append_dim, region, safe_chunks, storage_options) 2048 append_dim=append_dim, 2049 region=region, -> 2050 safe_chunks=safe_chunks, 2051 ) 2052

/opt/conda/lib/python3.7/site-packages/xarray/backends/api.py in to_zarr(dataset, store, chunk_store, mode, synchronizer, group, encoding, compute, consolidated, append_dim, region, safe_chunks, storage_options) 1429 writer = ArrayWriter() 1430 # TODO: figure out how to properly handle unlimited_dims -> 1431 dump_to_store(dataset, zstore, writer, encoding=encoding) 1432 writes = writer.sync(compute=compute) 1433

/opt/conda/lib/python3.7/site-packages/xarray/backends/api.py in dump_to_store(dataset, store, writer, encoder, encoding, unlimited_dims) 1117 variables, attrs = encoder(variables, attrs) 1118 -> 1119 store.store(variables, attrs, check_encoding, writer, unlimited_dims=unlimited_dims) 1120 1121

/opt/conda/lib/python3.7/site-packages/xarray/backends/zarr.py in store(self, variables, attributes, check_encoding_set, writer, unlimited_dims) 549 550 self.set_variables( --> 551 variables_encoded, check_encoding_set, writer, unlimited_dims=unlimited_dims 552 ) 553 if self._consolidate_on_close:

/opt/conda/lib/python3.7/site-packages/xarray/backends/zarr.py in set_variables(self, variables, check_encoding_set, writer, unlimited_dims) 607 dtype = str 608 zarr_array = self.zarr_group.create( --> 609 name, shape=shape, dtype=dtype, fill_value=fill_value, **encoding 610 ) 611 zarr_array.attrs.put(encoded_attrs)

/opt/conda/lib/python3.7/site-packages/zarr/hierarchy.py in create(self, name, kwargs) 889 """Create an array. Keyword arguments as per 890 :func:zarr.creation.create.""" --> 891 return self._write_op(self._create_nosync, name, kwargs) 892 893 def _create_nosync(self, name, **kwargs):

/opt/conda/lib/python3.7/site-packages/zarr/hierarchy.py in _write_op(self, f, args, kwargs) 659 660 with lock: --> 661 return f(args, **kwargs) 662 663 def create_group(self, name, overwrite=False):

/opt/conda/lib/python3.7/site-packages/zarr/hierarchy.py in _create_nosync(self, name, kwargs) 896 kwargs.setdefault('cache_attrs', self.attrs.cache) 897 return create(store=self._store, path=path, chunk_store=self._chunk_store, --> 898 kwargs) 899 900 def empty(self, name, **kwargs):

/opt/conda/lib/python3.7/site-packages/zarr/creation.py in create(shape, chunks, dtype, compressor, fill_value, order, store, synchronizer, overwrite, path, chunk_store, filters, cache_metadata, cache_attrs, read_only, object_codec, dimension_separator, **kwargs) 139 fill_value=fill_value, order=order, overwrite=overwrite, path=path, 140 chunk_store=chunk_store, filters=filters, object_codec=object_codec, --> 141 dimension_separator=dimension_separator) 142 143 # instantiate array

/opt/conda/lib/python3.7/site-packages/zarr/storage.py in init_array(store, shape, chunks, dtype, compressor, fill_value, order, overwrite, path, chunk_store, filters, object_codec, dimension_separator) 356 chunk_store=chunk_store, filters=filters, 357 object_codec=object_codec, --> 358 dimension_separator=dimension_separator) 359 360

/opt/conda/lib/python3.7/site-packages/zarr/storage.py in _init_array_metadata(store, shape, chunks, dtype, compressor, fill_value, order, overwrite, path, chunk_store, filters, object_codec, dimension_separator) 392 chunks = normalize_chunks(chunks, shape, dtype.itemsize) 393 order = normalize_order(order) --> 394 fill_value = normalize_fill_value(fill_value, dtype) 395 396 # optional array metadata

/opt/conda/lib/python3.7/site-packages/zarr/util.py in normalize_fill_value(fill_value, dtype) 281 # re-raise with our own error message to be helpful 282 raise ValueError('fill_value {!r} is not valid for dtype {}; nested ' --> 283 'exception: {}'.format(fill_value, dtype, e)) 284 285 return fill_value

ValueError: fill_value nan is not valid for dtype int16; nested exception: cannot convert float NaN to integer

{
    "total_count": 0,
    "+1": 0,
    "-1": 0,
    "laugh": 0,
    "hooray": 0,
    "confused": 0,
    "heart": 0,
    "rocket": 0,
    "eyes": 0
}
  1077079208
Powered by Datasette · Queries took 0.668ms · About: xarray-datasette