def setUp(self): self.testfile = 'tiny' self.testfilepath = os.path.expanduser( os.sep.join(('~', '.xarray_tutorial_data', self.testfile))) with suppress(OSError): os.remove('{}.nc'.format(self.testfilepath)) with suppress(OSError): os.remove('{}.md5'.format(self.testfilepath))
def setUp(self): self.testfile = 'tiny' self.testfilepath = os.path.expanduser(os.sep.join( ('~', '.xarray_tutorial_data', self.testfile))) with suppress(OSError): os.remove('{}.nc'.format(self.testfilepath)) with suppress(OSError): os.remove('{}.md5'.format(self.testfilepath))
def test_dataset_getattr(self): # Test that pickling/unpickling converts the dask backend # to numpy in neither the data variables nor the non-index coords data = build_dask_array('data') nonindex_coord = build_dask_array('coord') ds = Dataset(data_vars={'a': ('x', data)}, coords={'y': ('x', nonindex_coord)}) with suppress(AttributeError): getattr(ds, 'NOTEXIST') assert kernel_call_count == 0
def test_dataset_getattr(self): # Test that pickling/unpickling converts the dask backend # to numpy in neither the data variables nor the non-index coords data = build_dask_array('data') nonindex_coord = build_dask_array('coord') ds = Dataset(data_vars={'a': ('x', data)}, coords={'y': ('x', nonindex_coord)}) with suppress(AttributeError): getattr(ds, 'NOTEXIST') self.assertEquals(kernel_call_count, 0)
def test_dataarray_getattr(self): # ipython/jupyter does a long list of getattr() calls to when trying to # represent an object. # Make sure we're not accidentally computing dask variables. data = build_dask_array('data') nonindex_coord = build_dask_array('coord') a = DataArray(data, dims=['x'], coords={'y': ('x', nonindex_coord)}) with suppress(AttributeError): getattr(a, 'NOTEXIST') self.assertEquals(kernel_call_count, 0)
def test_dataarray_getattr(self): # ipython/jupyter does a long list of getattr() calls to when trying to # represent an object. # Make sure we're not accidentally computing dask variables. data = build_dask_array('data') nonindex_coord = build_dask_array('coord') a = DataArray(data, dims=['x'], coords={'y': ('x', nonindex_coord)}) with suppress(AttributeError): getattr(a, 'NOTEXIST') assert kernel_call_count == 0
from __future__ import division from __future__ import print_function import pickle import numpy as np import pandas as pd import pytest import xarray as xr from xarray import Variable, DataArray, Dataset import xarray.ufuncs as xu from xarray.core.pycompat import suppress from . import TestCase, requires_dask from xarray.tests import unittest, mock with suppress(ImportError): import dask import dask.array as da class DaskTestCase(TestCase): def assertLazyAnd(self, expected, actual, test): with dask.set_options(get=dask.get): test(actual, expected) if isinstance(actual, Dataset): for k, v in actual.variables.items(): if k in actual.dims: self.assertIsInstance(var.data, np.ndarray) else: self.assertIsInstance(var.data, da.Array) elif isinstance(actual, DataArray):
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import numpy as np import pytest from xarray import Variable from xarray.coding import strings from xarray.core import indexing from xarray.core.pycompat import bytes_type, suppress, unicode_type from . import ( IndexerMaker, assert_array_equal, assert_identical, raises_regex, requires_dask) with suppress(ImportError): import dask.array as da def test_vlen_dtype(): dtype = strings.create_vlen_dtype(unicode_type) assert dtype.metadata['element_type'] == unicode_type assert strings.is_unicode_dtype(dtype) assert not strings.is_bytes_dtype(dtype) assert strings.check_vlen_dtype(dtype) is unicode_type dtype = strings.create_vlen_dtype(bytes_type) assert dtype.metadata['element_type'] == bytes_type assert not strings.is_unicode_dtype(dtype) assert strings.is_bytes_dtype(dtype) assert strings.check_vlen_dtype(dtype) is bytes_type