def test_tile(tmpdir, chunks, buffer): tile_path = tmpdir / 'tiles' tiling.tile(ds, str(tile_path), chunks=chunks, buffer=buffer) if isinstance(buffer, int): buffer_dict = {dim: buffer for dim in ds.dims.keys()} else: buffer_dict = { dim: buffer[dim] if dim in buffer else 0 for dim in chunks.keys() } # Check whether the correct number of tiles has been created nchunks = np.prod([ int(np.ceil(ds.dims[dim] / n)) # - np.floor(buffer_dict[dim] / n)) for dim, n in chunks.items() ]) tile_files = list(map(str, tile_path.listdir())) assert len(tile_files) == nchunks for f in tile_files: t = open_dataset(f) assert_equal(ds.attrs, t.attrs) for dim, val in chunks.items(): assert t.dims[dim] <= val + 2 * buffer_dict[dim] if buffer == 0 and len(chunks) == 1: mf_data = xr.open_mfdataset(tile_files, engine='h5netcdf').compute() assert_equal_data(ds, mf_data) else: merged = tiling.auto_merge(tile_files) assert_equal_data(ds, merged)
def test_auto_merge_metadata(): ds_meta = generate_test_dataset(dims={'y': 20, 'x': 20, 'time': 10}) # Create copy of each chunk as otherwise the metadata cannot be altered # individually. chunks = [c.copy() for c in utils.xr_split(ds_meta, 'time', 5)] for i, c in enumerate(chunks): c.attrs['part_number'] = i ds_meta['part_number'] = ('time', np.repeat(np.arange(5), 2)) xr_assert_equal(ds_meta, tiling.auto_merge(chunks, meta_variables=['part_number']))
def test_auto_merge_with_buffer(use_xarray): xr_assert_equal( ds, tiling.auto_merge(buffered_parts, use_xarray_combine=use_xarray))
def test_auto_merge(use_xarray): xr_assert_equal(ds, tiling.auto_merge(parts, use_xarray_combine=use_xarray))
def test_tile_and_merge(tmpdir, chunks, buffer): tile_path = tmpdir / 'tiles' tiling.tile(ds, str(tile_path), chunks=chunks, buffer=buffer) merged = tiling.auto_merge(str(tile_path / '*.nc')) xr_assert_equal(merged, ds)
def test_auto_merge_with_buffer(): xr_assert_equal(ds, tiling.auto_merge(buffered_parts))
def test_auto_merge(): xr_assert_equal(ds, tiling.auto_merge(parts))