示例#1
0
def test_accessor_nd_to_netcdf(tmpdir, generator):
    ds = generator()
    path_1 = str(tmpdir.join('ds1.nc'))
    path_2 = str(tmpdir.join('ds2.nc'))

    io.to_netcdf(ds, path_1)
    ds.nd.to_netcdf(path_2)

    xr_assert_equal(io.open_dataset(path_1), io.open_dataset(path_2))
示例#2
0
文件: test_warp.py 项目: jnhansen/nd
def test_alignment(tmpdir, extent, from_files):
    datapath = tmpdir.mkdir('data')
    path = tmpdir.mkdir('aligned')
    bounds = [
        (-10.0, 50.0, 0.0, 60.0),
        (-12.0, 40.0, -2.0, 52.0),
        (-13.0, 50.0, -3.0, 60.0),
        (-9.0, 51.0, 1.0, 61.0)
    ]
    datasets = [generate_test_dataset(extent=ext) for ext in bounds]
    if extent is None:
        common_bounds = warp.get_common_bounds(datasets)
    else:
        common_bounds = extent
    files = [str(datapath.join('data_%d.nc' % i))
             for i in range(len(datasets))]
    if from_files:
        for ds, f in zip(datasets, files):
            to_netcdf(ds, f)
        datasets = files
    warp.Alignment(extent=extent).apply(datasets, path=str(path))
    aligned = [open_dataset(str(f)) for f in path.listdir()]
    for ds in aligned:
        assert_equal(warp.get_bounds(ds), common_bounds)
        assert_equal(
            warp.get_transform(ds),
            warp.get_transform(aligned[0])
        )
        xr_assert_equal(ds['x'], aligned[0]['x'])
        xr_assert_equal(ds['y'], aligned[0]['y'])
示例#3
0
def test_write_read_netcdf(tmpdir):
    ds = generate_test_dataset()
    ds = assemble_complex(ds)
    path = str(tmpdir.join('test_dataset.nc'))
    to_netcdf(ds, path)
    ds_read = open_dataset(path)
    xr_assert_equal(ds, ds_read)
示例#4
0
def test_tile(tmpdir, chunks, buffer):
    tile_path = tmpdir / 'tiles'
    tiling.tile(ds, str(tile_path), chunks=chunks, buffer=buffer)

    if isinstance(buffer, int):
        buffer_dict = {dim: buffer for dim in ds.dims.keys()}
    else:
        buffer_dict = {
            dim: buffer[dim] if dim in buffer else 0
            for dim in chunks.keys()
        }

    # Check whether the correct number of tiles has been created
    nchunks = np.prod([
        int(np.ceil(ds.dims[dim] / n))
        #  - np.floor(buffer_dict[dim] / n))
        for dim, n in chunks.items()
    ])
    tile_files = list(map(str, tile_path.listdir()))
    assert len(tile_files) == nchunks

    for f in tile_files:
        t = open_dataset(f)
        assert_equal(ds.attrs, t.attrs)

        for dim, val in chunks.items():
            assert t.dims[dim] <= val + 2 * buffer_dict[dim]

    if buffer == 0 and len(chunks) == 1:
        mf_data = xr.open_mfdataset(tile_files, engine='h5netcdf').compute()
        assert_equal_data(ds, mf_data)

    else:
        merged = tiling.auto_merge(tile_files)
        assert_equal_data(ds, merged)
示例#5
0
def test_equal_datasets():
    ds0 = open_dataset(slc_files[0])
    for f in slc_files[1:]:
        ds = open_dataset(f)
        assert_equal(ds0['x'].values, ds['x'].values,
                     'x coordinates are not equal')
        assert_equal(ds0['y'].values, ds['y'].values,
                     'y coordinates are not equal')
        assert_equal(get_transform(ds0), get_transform(ds),
                     'transforms are not equal')
        assert_equal_crs(get_crs(ds0), get_crs(ds), 'CRS are not equal')
        assert_equal(get_resolution(ds0), get_resolution(ds),
                     'resolutions are not equal')
        assert_equal(get_bounds(ds0), get_bounds(ds), 'bounds are not equal')
        assert_equal(get_extent(ds0), get_extent(ds), 'extents are not equal')
        ds.close()
    ds0.close()
示例#6
0
def test_resolution_equal_transform_from_real_data(f):
    ds = open_dataset(f)
    res = get_resolution(ds)
    tf = get_transform(ds)
    ds.close()
    assert_almost_equal(res, (tf.a, abs(tf.e)))
示例#7
0
文件: test_open.py 项目: jnhansen/nd
def test_equivalent_formats():
    files = [nc_path, tif_path, dim_path]
    datasets = [open_dataset(f) for f in files]
示例#8
0
文件: test_open.py 项目: jnhansen/nd
def test_open_dataset(f):
    ds = open_dataset(f)
    assert isinstance(ds, (xr.Dataset, xr.DataArray))
    ds.close()