issues
18 rows where comments = 7, repo = 13221727 and user = 14808389 sorted by updated_at descending
This data as json, CSV (advanced)
Suggested facets: created_at (date), updated_at (date)
id | node_id | number | title | user | state | locked | assignee | milestone | comments | created_at | updated_at ▲ | closed_at | author_association | active_lock_reason | draft | pull_request | body | reactions | performed_via_github_app | state_reason | repo | type |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2267711587 | PR_kwDOAMm_X85t8VWy | 8978 | more engine environment tricks in preparation for `numpy>=2` | keewis 14808389 | closed | 0 | 7 | 2024-04-28T17:54:38Z | 2024-04-29T14:56:22Z | 2024-04-29T14:56:21Z | MEMBER | 0 | pydata/xarray/pulls/8978 | Turns out And finally, the
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/8978/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
2078559800 | PR_kwDOAMm_X85j6NsC | 8605 | run CI on `python=3.12` | keewis 14808389 | closed | 0 | 7 | 2024-01-12T10:47:18Z | 2024-01-17T21:54:13Z | 2024-01-17T21:54:12Z | MEMBER | 0 | pydata/xarray/pulls/8605 |
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/8605/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
1655290694 | I_kwDOAMm_X85iqbtG | 7721 | `as_shared_dtype` converts scalars to 0d `numpy` arrays if chunked `cupy` is involved | keewis 14808389 | open | 0 | 7 | 2023-04-05T09:48:34Z | 2023-12-04T10:45:43Z | MEMBER | I tried to run TypeError Traceback (most recent call last) Cell In[4], line 1 ----> 1 arr.chunk().where(mask).compute() File ~/repos/xarray/xarray/core/dataarray.py:1095, in DataArray.compute(self, kwargs) 1076 """Manually trigger loading of this array's data from disk or a 1077 remote source into memory and return a new array. The original is 1078 left unaltered. (...) 1092 dask.compute 1093 """ 1094 new = self.copy(deep=False) -> 1095 return new.load(kwargs) File ~/repos/xarray/xarray/core/dataarray.py:1069, in DataArray.load(self, kwargs) 1051 def load(self: T_DataArray, kwargs) -> T_DataArray: 1052 """Manually trigger loading of this array's data from disk or a 1053 remote source into memory and return this array. 1054 (...) 1067 dask.compute 1068 """ -> 1069 ds = self._to_temp_dataset().load(**kwargs) 1070 new = self._from_temp_dataset(ds) 1071 self._variable = new._variable File ~/repos/xarray/xarray/core/dataset.py:752, in Dataset.load(self, kwargs) 749 import dask.array as da 751 # evaluate all the dask arrays simultaneously --> 752 evaluated_data = da.compute(*lazy_data.values(), kwargs) 754 for k, data in zip(lazy_data, evaluated_data): 755 self.variables[k].data = data File ~/.local/opt/mambaforge/envs/xarray/lib/python3.10/site-packages/dask/base.py:600, in compute(traverse, optimize_graph, scheduler, get, args, kwargs) 597 keys.append(x.dask_keys()) 598 postcomputes.append(x.dask_postcompute()) --> 600 results = schedule(dsk, keys, kwargs) 601 return repack([f(r, a) for r, (f, a) in zip(results, postcomputes)]) File ~/.local/opt/mambaforge/envs/xarray/lib/python3.10/site-packages/dask/threaded.py:89, in get(dsk, keys, cache, num_workers, pool, kwargs) 86 elif isinstance(pool, multiprocessing.pool.Pool): 87 pool = MultiprocessingPoolExecutor(pool) ---> 89 results = get_async( 90 pool.submit, 91 pool._max_workers, 92 dsk, 93 keys, 94 cache=cache, 95 get_id=_thread_get_id, 96 pack_exception=pack_exception, 97 kwargs, 98 ) 100 # Cleanup pools associated to dead threads 101 with pools_lock: File ~/.local/opt/mambaforge/envs/xarray/lib/python3.10/site-packages/dask/local.py:511, in get_async(submit, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, chunksize, **kwargs) 509 _execute_task(task, data) # Re-execute locally 510 else: --> 511 raise_exception(exc, tb) 512 res, worker_id = loads(res_info) 513 state["cache"][key] = res File ~/.local/opt/mambaforge/envs/xarray/lib/python3.10/site-packages/dask/local.py:319, in reraise(exc, tb) 317 if exc.traceback is not tb: 318 raise exc.with_traceback(tb) --> 319 raise exc File ~/.local/opt/mambaforge/envs/xarray/lib/python3.10/site-packages/dask/local.py:224, in execute_task(key, task_info, dumps, loads, get_id, pack_exception) 222 try: 223 task, data = loads(task_info) --> 224 result = _execute_task(task, data) 225 id = get_id() 226 result = dumps((result, id)) File ~/.local/opt/mambaforge/envs/xarray/lib/python3.10/site-packages/dask/core.py:119, in _execute_task(arg, cache, dsk) 115 func, args = arg[0], arg[1:] 116 # Note: Don't assign the subtask results to a variable. numpy detects 117 # temporaries by their reference count and can execute certain 118 # operations in-place. --> 119 return func(*(_execute_task(a, cache) for a in args)) 120 elif not ishashable(arg): 121 return arg File ~/.local/opt/mambaforge/envs/xarray/lib/python3.10/site-packages/dask/optimization.py:990, in SubgraphCallable.call(self, *args) 988 if not len(args) == len(self.inkeys): 989 raise ValueError("Expected %d args, got %d" % (len(self.inkeys), len(args))) --> 990 return core.get(self.dsk, self.outkey, dict(zip(self.inkeys, args))) File ~/.local/opt/mambaforge/envs/xarray/lib/python3.10/site-packages/dask/core.py:149, in get(dsk, out, cache) 147 for key in toposort(dsk): 148 task = dsk[key] --> 149 result = _execute_task(task, cache) 150 cache[key] = result 151 result = _execute_task(out, cache) File ~/.local/opt/mambaforge/envs/xarray/lib/python3.10/site-packages/dask/core.py:119, in _execute_task(arg, cache, dsk) 115 func, args = arg[0], arg[1:] 116 # Note: Don't assign the subtask results to a variable. numpy detects 117 # temporaries by their reference count and can execute certain 118 # operations in-place. --> 119 return func(*(_execute_task(a, cache) for a in args)) 120 elif not ishashable(arg): 121 return arg File <array_function internals>:180, in where(args, *kwargs) File cupy/_core/core.pyx:1723, in cupy._core.core._ndarray_base.array_function() File ~/.local/opt/mambaforge/envs/xarray/lib/python3.10/site-packages/cupy/_sorting/search.py:211, in where(condition, x, y) 209 if fusion._is_fusing(): 210 return fusion._call_ufunc(_where_ufunc, condition, x, y) --> 211 return _where_ufunc(condition.astype('?'), x, y) File cupy/_core/_kernel.pyx:1287, in cupy._core._kernel.ufunc.call() File cupy/_core/_kernel.pyx:160, in cupy._core._kernel._preprocess_args() File cupy/_core/_kernel.pyx:146, in cupy._core._kernel._preprocess_arg() TypeError: Unsupported type <class 'numpy.ndarray'>
I think the reason is that this: https://github.com/pydata/xarray/blob/d4db16699f30ad1dc3e6861601247abf4ac96567/xarray/core/duck_array_ops.py#L195 is not sufficient to detect cc @jacobtomlinson |
{ "url": "https://api.github.com/repos/pydata/xarray/issues/7721/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | issue | ||||||||
1845449919 | PR_kwDOAMm_X85Xp1U1 | 8064 | adapt to NEP 51 | keewis 14808389 | closed | 0 | 7 | 2023-08-10T15:43:13Z | 2023-09-30T09:27:25Z | 2023-09-25T04:46:49Z | MEMBER | 0 | pydata/xarray/pulls/8064 | With NEP 51 (and the changes to
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/8064/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
1898193938 | PR_kwDOAMm_X85abbJ4 | 8188 | fix the failing docs | keewis 14808389 | closed | 0 | 7 | 2023-09-15T11:01:42Z | 2023-09-20T11:04:03Z | 2023-09-15T13:26:24Z | MEMBER | 0 | pydata/xarray/pulls/8188 | The docs have been failing because of a malformatted docstring we inherit from
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/8188/reactions", "total_count": 1, "+1": 1, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
1306887842 | PR_kwDOAMm_X847g7WQ | 6795 | display the indexes in the string reprs | keewis 14808389 | closed | 0 | 7 | 2022-07-16T19:42:19Z | 2022-10-15T18:28:36Z | 2022-10-12T16:52:53Z | MEMBER | 0 | pydata/xarray/pulls/6795 | With the flexible indexes refactor indexes have become much more important, which means we should include them in the reprs of This is a initial attempt, covering only the string reprs, with a few unanswered questions:
- how do we format indexes? Do we delegate to their (also, how do we best test this?)
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/6795/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
532696790 | MDU6SXNzdWU1MzI2OTY3OTA= | 3594 | support for units with pint | keewis 14808389 | open | 0 | 7 | 2019-12-04T13:49:28Z | 2022-08-03T11:44:05Z | MEMBER |
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/3594/reactions", "total_count": 14, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 14, "rocket": 0, "eyes": 0 } |
xarray 13221727 | issue | ||||||||
935279688 | MDExOlB1bGxSZXF1ZXN0NjgyMjIzODA2 | 5560 | conditionally disable bottleneck | keewis 14808389 | closed | 0 | 7 | 2021-07-01T23:12:03Z | 2021-08-12T15:05:48Z | 2021-08-12T14:41:34Z | MEMBER | 0 | pydata/xarray/pulls/5560 | As this came up in #5424 (and because I can't seem to reliably reproduce expected values in #4972 if it is enabled) this adds a option to disable In #5424 it was suggested to also allow replacing Tests are missing because I have no idea how to check that this works (except by mocking bottleneck).
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/5560/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
896002237 | MDExOlB1bGxSZXF1ZXN0NjQ4MTUzNTIy | 5352 | prefer the objects _repr_inline_ over xarray's custom reprs | keewis 14808389 | closed | 0 | 7 | 2021-05-19T22:24:19Z | 2021-05-25T21:58:00Z | 2021-05-25T21:57:57Z | MEMBER | 0 | pydata/xarray/pulls/5352 | In preparation of pushing these upstream, this increases the priority of the
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/5352/reactions", "total_count": 1, "+1": 1, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
807620560 | MDExOlB1bGxSZXF1ZXN0NTcyODM5Mjk3 | 4902 | also apply combine_attrs to the attrs of the variables | keewis 14808389 | closed | 0 | 7 | 2021-02-12T23:53:49Z | 2021-05-05T16:43:08Z | 2021-05-05T16:37:25Z | MEMBER | 0 | pydata/xarray/pulls/4902 | Follow-up to #4827. The current behavior is to always combine with
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/4902/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
686848844 | MDExOlB1bGxSZXF1ZXN0NDc0MzczMTE4 | 4381 | run black and blackdoc | keewis 14808389 | closed | 0 | 7 | 2020-08-27T00:23:04Z | 2020-08-27T15:12:48Z | 2020-08-27T14:56:38Z | MEMBER | 0 | pydata/xarray/pulls/4381 |
This is a separate PR so reviewing becomes easier. Since it is an automatic change, this should be ready to be merged once the CI is green. |
{ "url": "https://api.github.com/repos/pydata/xarray/issues/4381/reactions", "total_count": 1, "+1": 1, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
640740029 | MDExOlB1bGxSZXF1ZXN0NDM2MDk0MjQ3 | 4163 | silence UnitStrippedWarnings | keewis 14808389 | closed | 0 | 7 | 2020-06-17T21:02:16Z | 2020-07-02T17:29:28Z | 2020-07-02T16:13:59Z | MEMBER | 0 | pydata/xarray/pulls/4163 | Now that the tests are cleaned up, it is time to fix the numerous "unit stripped" warnings. We'll get there by promoting all
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/4163/reactions", "total_count": 2, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 2, "eyes": 0 } |
xarray 13221727 | pull | |||||
603930174 | MDExOlB1bGxSZXF1ZXN0NDA2NjE5MDkz | 3988 | built-in accessor documentation | keewis 14808389 | closed | 0 | 7 | 2020-04-21T11:39:36Z | 2020-06-13T17:52:51Z | 2020-06-13T17:52:46Z | MEMBER | 0 | pydata/xarray/pulls/3988 | We currently use This adds a A few questions / comments on this:
1. I noticed we have
Edit: err, it seems I used the branch I pushed to the main repository for a documentation preview for this... |
{ "url": "https://api.github.com/repos/pydata/xarray/issues/3988/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
619455327 | MDExOlB1bGxSZXF1ZXN0NDE4OTU3NzQ3 | 4070 | remove the backslash escapes and typehint fragments in the API docs | keewis 14808389 | closed | 0 | 7 | 2020-05-16T11:55:23Z | 2020-05-19T16:49:31Z | 2020-05-19T16:49:26Z | MEMBER | 0 | pydata/xarray/pulls/4070 | The reason for those fragments is that For reference, here's the new API page.
- [x] Closes #3178
- [x] Passes |
{ "url": "https://api.github.com/repos/pydata/xarray/issues/4070/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
536639533 | MDExOlB1bGxSZXF1ZXN0MzUyMTM2MTEx | 3611 | Pint support for top-level functions | keewis 14808389 | closed | 0 | 7 | 2019-12-11T22:04:13Z | 2020-03-09T11:35:27Z | 2020-03-09T07:40:46Z | MEMBER | 0 | pydata/xarray/pulls/3611 | This PR tries to get the pint integration tests (see #3594) to pass. To make sure this does not become a giant PR, it is limited to the top-level functions and the work on the tests for I added pint master to the
Also, I finally ran into a situation where I have to use Failing tests list from #3594:
- |
{ "url": "https://api.github.com/repos/pydata/xarray/issues/3611/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
532373563 | MDExOlB1bGxSZXF1ZXN0MzQ4NjM2MTE1 | 3592 | Silence sphinx warnings: Round 2 | keewis 14808389 | closed | 0 | 7 | 2019-12-04T01:09:30Z | 2019-12-06T15:38:54Z | 2019-12-06T15:37:46Z | MEMBER | 0 | pydata/xarray/pulls/3592 | As a follow-up to #3516, this silences the last warnings without There is, however, a problem with the doctest output for the 3D result of Also, the complaints from #3516 about duplicate definitions of attributes were caused by the
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/3592/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
520636106 | MDExOlB1bGxSZXF1ZXN0MzM5MTc1NTI3 | 3508 | add missing pint integration tests | keewis 14808389 | closed | 0 | 7 | 2019-11-10T18:13:49Z | 2019-11-10T23:42:14Z | 2019-11-10T23:41:52Z | MEMBER | 0 | pydata/xarray/pulls/3508 | This adds the tests that were missed by #3238 and #3447.
This should be ready to merge, but I'm almost certain that while going through #3238 and #3447 and trying to fix obvious mistakes I will find more missing tests. Missing tests:
* [x] |
{ "url": "https://api.github.com/repos/pydata/xarray/issues/3508/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull | |||||
496826564 | MDExOlB1bGxSZXF1ZXN0MzIwMDc4MzQ1 | 3333 | More doc fixes | keewis 14808389 | closed | 0 | 7 | 2019-09-22T20:09:36Z | 2019-09-23T10:46:34Z | 2019-09-23T01:08:43Z | MEMBER | 0 | pydata/xarray/pulls/3333 | While going through the docs, I noticed several issues:
* in This PR tries to fix all of those, but while the first three issues are easy to fix, I'm not sure whether my proposed fix for the one involving
|
{ "url": "https://api.github.com/repos/pydata/xarray/issues/3333/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0 } |
xarray 13221727 | pull |
Advanced export
JSON shape: default, array, newline-delimited, object
CREATE TABLE [issues] ( [id] INTEGER PRIMARY KEY, [node_id] TEXT, [number] INTEGER, [title] TEXT, [user] INTEGER REFERENCES [users]([id]), [state] TEXT, [locked] INTEGER, [assignee] INTEGER REFERENCES [users]([id]), [milestone] INTEGER REFERENCES [milestones]([id]), [comments] INTEGER, [created_at] TEXT, [updated_at] TEXT, [closed_at] TEXT, [author_association] TEXT, [active_lock_reason] TEXT, [draft] INTEGER, [pull_request] TEXT, [body] TEXT, [reactions] TEXT, [performed_via_github_app] TEXT, [state_reason] TEXT, [repo] INTEGER REFERENCES [repos]([id]), [type] TEXT ); CREATE INDEX [idx_issues_repo] ON [issues] ([repo]); CREATE INDEX [idx_issues_milestone] ON [issues] ([milestone]); CREATE INDEX [idx_issues_assignee] ON [issues] ([assignee]); CREATE INDEX [idx_issues_user] ON [issues] ([user]);