id,node_id,number,state,locked,title,user,body,created_at,updated_at,closed_at,merged_at,merge_commit_sha,assignee,milestone,draft,head,base,author_association,auto_merge,repo,url,merged_by
57178535,MDExOlB1bGxSZXF1ZXN0NTcxNzg1MzU=,726,closed,0,Make import error of tokenize more explicit,7799184,"This [ImportError](https://github.com/pydata/xarray/blob/master/xarray/backends/api.py#L161-L165) is raised when using open_mfdataset, even though my version of dask is > 0.6:

```
---------------------------------------------------------------------------
ImportError                               Traceback (most recent call last)
<ipython-input-2-6d05f9a40585> in <module>()
----> 1 dset = xarray.open_mfdataset('/Users/rafaguedes/work/campos20150709_0*.nc')

/source/xarray/xarray/backends/api.pyc in open_mfdataset(paths, chunks, concat_dim, preprocess, engine, lock, **kwargs)
    297         lock = _default_lock(paths[0], engine)
    298     datasets = [open_dataset(p, engine=engine, chunks=chunks or {}, lock=lock,
--> 299                              **kwargs) for p in paths]
    300     file_objs = [ds._file_obj for ds in datasets]
    301

/source/xarray/xarray/backends/api.pyc in open_dataset(filename_or_obj, group, decode_cf, mask_and_scale, decode_times, concat_characters, decode_coords, engine, chunks, lock, drop_variables)
    222             lock = _default_lock(filename_or_obj, engine)
    223         with close_on_error(store):
--> 224             return maybe_decode_store(store, lock)
    225     else:
    226         if engine is not None and engine != 'scipy':

/source/xarray/xarray/backends/api.pyc in maybe_decode_store(store, lock)
    163             except ImportError:
    164                 import dask  # raise the usual error if dask is entirely missing
--> 165                 raise ImportError('xarray requires dask version 0.6 or newer')
    166
    167             if (isinstance(filename_or_obj, basestring) and

ImportError: xarray requires dask version 0.6 or newer
```

```
In [3]: import dask
In [4]: dask.__version__
Out[4]: '0.7.6'
```

This change ensures the actual error caused by the missing library is displayed:

```
---------------------------------------------------------------------------
ImportError                               Traceback (most recent call last)
<ipython-input-2-6d05f9a40585> in <module>()
----> 1 dset = xarray.open_mfdataset('/Users/rafaguedes/work/campos20150709_0*.nc')

/source/xarray/xarray/backends/api.py in open_mfdataset(paths, chunks, concat_dim, preprocess, engine, lock, **kwargs)
    300         lock = _default_lock(paths[0], engine)
    301     datasets = [open_dataset(p, engine=engine, chunks=chunks or {}, lock=lock,
--> 302                              **kwargs) for p in paths]
    303     file_objs = [ds._file_obj for ds in datasets]
    304

/source/xarray/xarray/backends/api.py in open_dataset(filename_or_obj, group, decode_cf, mask_and_scale, decode_times, concat_characters, decode_coords, engine, chunks, lock, drop_variables)
    225             lock = _default_lock(filename_or_obj, engine)
    226         with close_on_error(store):
--> 227             return maybe_decode_store(store, lock)
    228     else:
    229         if engine is not None and engine != 'scipy':

/source/xarray/xarray/backends/api.py in maybe_decode_store(store, lock)
    166                     raise ImportError('xarray requires dask version 0.6 or newer')
    167                 else:
--> 168                     raise ImportError(err)
    169
    170             if (isinstance(filename_or_obj, basestring) and

ImportError: No module named toolz
```
",2016-01-26T07:46:21Z,2016-01-27T16:27:05Z,2016-01-27T16:26:55Z,2016-01-27T16:26:55Z,28ad2294d3fbdfbb284e44181aade7b412a9aa0d,,,0,8eda719c522cfce2f432258dd83022f259d4eeb6,66057e60201ac5a2e4cd37913220f2ea42e6f06e,CONTRIBUTOR,,13221727,https://github.com/pydata/xarray/pull/726,