Skip to content
forked from pydata/xarray

Commit

Permalink
Fix window test
Browse files Browse the repository at this point in the history
  • Loading branch information
dcherian committed Oct 29, 2019
1 parent 4ab6a66 commit 507b1f6
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 2 deletions.
2 changes: 1 addition & 1 deletion doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ Internal Changes
- Use Python 3.6 idioms throughout the codebase. (:pull:3419)
By `Maximilian Roos <https://github.com/max-sixty>`_
- Implement :py:func:`__dask_tokenize__` for xarray objects.
By `Deepak Cherian <https://github.com/dcherian>`_
By `Deepak Cherian <https://github.com/dcherian>`_ and `Guido Imperiale <https://github.com/crusaderky>`_.

.. _whats-new.0.14.0:

Expand Down
5 changes: 4 additions & 1 deletion xarray/tests/test_dask.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import operator
import pickle
import sys
from contextlib import suppress
from distutils.version import LooseVersion
from textwrap import dedent
Expand Down Expand Up @@ -28,6 +29,8 @@
da = pytest.importorskip("dask.array")
dd = pytest.importorskip("dask.dataframe")

ON_WINDOWS = sys.platform == "win32"


class CountingScheduler:
""" Simple dask scheduler counting the number of computes.
Expand Down Expand Up @@ -1186,7 +1189,7 @@ def test_normalize_token_identical(obj, transform):


def test_normalize_token_netcdf_backend(map_ds):
with create_tmp_file() as tmp_file:
with create_tmp_file(allow_cleanup_failure=ON_WINDOWS) as tmp_file:
map_ds.to_netcdf(tmp_file)
read = xr.open_dataset(tmp_file)
assert not dask.base.tokenize(map_ds) == dask.base.tokenize(read)

0 comments on commit 507b1f6

Please sign in to comment.