def setUp(self) -> None: rimraf(self.TEST_CUBE) cube = new_cube(time_periods=3, variables=dict(precipitation=np.nan, temperature=np.nan)).chunk( dict(time=1, lat=90, lon=90)) write_cube(cube, self.TEST_CUBE, "zarr", cube_asserted=True)
def setUpClass(cls) -> None: rimraf(S3_BUCKET) os.mkdir(S3_BUCKET) cube = new_cube(time_periods=3, variables=dict(precipitation=0.9, temperature=278.3)).chunk(dict(time=1, lat=90, lon=90)) write_cube(cube, TEST_CUBE_1, "zarr", cube_asserted=True) write_cube(cube, TEST_CUBE_2, "zarr", cube_asserted=True)
def step8(input_slice): if not dry_run: rimraf(output_path) output_writer.write(input_slice, output_path, **output_writer_params) _update_cube_attrs(output_writer, output_path, global_attrs=output_metadata, temporal_only=False) return input_slice
def setUp(self) -> None: rimraf(self.TEST_CUBE) cube = new_cube(time_periods=3, variables=dict(precipitation=np.nan, temperature=np.nan)) \ .chunk(dict(time=1, lat=90, lon=90)) fv_encoding = dict( _FillValue=None ) encoding = dict( precipitation=fv_encoding, temperature=fv_encoding ) cube.to_zarr(self.TEST_CUBE, encoding=encoding)
def setUp(self): rimraf(self.TEST_ZARR) cube = new_cube(variables=dict(A=0.5, B=-1.5)) cube = chunk_dataset(cube, chunk_sizes=dict(time=1, lat=90, lon=90), format_name=FORMAT_NAME_ZARR) cube.to_zarr(self.TEST_ZARR) self.chunked_a_files = { '.zarray', '.zattrs', '0.0.0', '0.0.1', '0.0.2', '0.0.3', '0.1.0', '0.1.1', '0.1.2', '0.1.3', '1.0.0', '1.0.1', '1.0.2', '1.0.3', '1.1.0', '1.1.1', '1.1.2', '1.1.3', '2.0.0', '2.0.1', '2.0.2', '2.0.3', '2.1.0', '2.1.1', '2.1.2', '2.1.3', '3.0.0', '3.0.1', '3.0.2', '3.0.3', '3.1.0', '3.1.1', '3.1.2', '3.1.3', '4.0.0', '4.0.1', '4.0.2', '4.0.3', '4.1.0', '4.1.1', '4.1.2', '4.1.3' } self.chunked_b_files = self.chunked_a_files self.chunked_time_files = { '.zarray', '.zattrs', '0', '1', '2', '3', '4' } self.chunked_lat_files = {'.zattrs', '.zarray', '0', '1'} self.chunked_lon_files = {'.zattrs', '.zarray', '0', '1', '2', '3'}
def clean_up(): files = [ 'l2c-single.nc', 'l2c-single.zarr', 'l2c.nc', 'l2c.zarr', 'l2c_1x80x60.zarr', 'l2c_1x80x80.zarr' ] for file in files: rimraf(file) rimraf(file + '.temp.nc') # May remain from Netcdf4DatasetIO.append() rimraf(get_inputdata_path("input.txt"))
def _assert_io_ok(self, shape, tile_shape, link_input: bool, expected_num_levels, expected_shapes, expected_chunks): input_path = get_path("pyramid-input.zarr") output_path = get_path("pyramid-output") rimraf(input_path) rimraf(output_path) try: dataset = self.create_test_dataset(shape, chunks=(1, *tile_shape)) dataset.to_zarr(input_path) t0 = time.perf_counter() levels = write_levels(output_path, dataset=dataset, spatial_tile_shape=tile_shape, input_path=input_path, link_input=link_input) print(f"write time total: ", time.perf_counter() - t0) self._assert_levels_ok(levels, expected_num_levels, expected_shapes, expected_chunks) t0 = time.perf_counter() levels = read_levels(output_path) print(f"read time total: ", time.perf_counter() - t0) self._assert_levels_ok(levels, expected_num_levels, expected_shapes, expected_chunks) finally: rimraf(input_path) rimraf(output_path)
def tearDownClass(cls) -> None: rimraf(S3_BUCKET)
def setUp(self) -> None: rimraf(self.CUBE_PATH)
def tearDown(self) -> None: rimraf(result_file, result_zarr, result_levels)
def setUp(self) -> None: rimraf(result_file, result_zarr, result_levels)
def setUp(self): rimraf(TEST_CUBE_ZARR) rimraf(TEST_CUBE_ZARR_COORDS) rimraf(TEST_NEW_META_YML) rimraf(TEST_CUBE_ZARR_EDIT) rimraf(TEST_CUBE_ZARR_OPTIMIZED) rimraf(TEST_CUBE_ZARR_OPTIMIZED_EDIT) TEST_CUBE.to_zarr(TEST_CUBE_ZARR) TEST_CUBE_COORDS.to_zarr(TEST_CUBE_ZARR_COORDS) with open(TEST_NEW_META_YML, 'w') as outfile: yaml.dump(TEST_NEW_META, outfile, default_flow_style=False)
def tearDown(self): rimraf(self.TEST_ZARR)
def tearDown(self): rimraf(TEST_CUBE_ZARR)
def tearDown(self) -> None: rimraf('_request.json') rimraf('_request.yaml') MemoryDataStore.replace_global_data_dict(self.saved_cube_memory)
def tearDown(self) -> None: rimraf('store-dump.json') rimraf('store-conf.yml') rimraf('out.json')
def tearDown(self) -> None: rimraf('store-dump.json') rimraf('store-dump.yml') rimraf('store-dump.csv') rimraf('out.json') rimraf('out.jml') rimraf('out.txt')
def tearDownClass(cls) -> None: rimraf('store-conf.json') rimraf('store-conf.yml')
def tearDown(self) -> None: rimraf(self.TEST_CUBE)
def tearDown(self) -> None: rimraf(self.CUBE_PATH) rimraf(self.CUBE_PATH_2)
def tearDown(self): rimraf(TEST_CUBE_ZARR) rimraf(TEST_CUBE_ZARR_COORDS) rimraf(TEST_NEW_META_YML) rimraf(TEST_CUBE_ZARR_EDIT) rimraf(TEST_CUBE_ZARR_OPTIMIZED) rimraf(TEST_CUBE_ZARR_OPTIMIZED_EDIT)
def delete_data(self, data_id: str): if not os.path.exists(data_id): raise DataStoreError(f'A dataset named "{data_id}" does not exist') rimraf(data_id)
def setUp(self): rimraf(TEST_CUBE_ZARR) TEST_CUBE.to_zarr(TEST_CUBE_ZARR)
def setUp(self) -> None: rimraf(self.TEST_CUBE)
def _rm_outputs(self): for path in [TEST_ZARR_DIR, TEST_NC_FILE] + self.outputs(): rimraf(path)
def clean_up(): files = ['l2c-single.nc', 'l2c.nc', 'l2c.zarr'] for file in files: rimraf(os.path.join('.', file)) rimraf(os.path.join('.', file + 'temp.nc'))
def _clear_outputs(self): rimraf(INPUT_CUBE_PATH) rimraf(OUTPUT_CUBE_PATH)
def clean_up(): files = [INPUT_FILE, OUTPUT_FILE, OUTPUT_ZARR, OUTPUT_ZARR_tiled] for file in files: rimraf(os.path.join('.', file))
def _clear_outputs(self): rimraf(TEST_CUBE_ZARR) rimraf(TEST_CUBE_ZARR_EDIT) rimraf(TEST_NEW_META_YML) rimraf(TEST_CUBE_ZARR_EDIT_DEFAULT) rimraf(TEST_CUBE_ZARR_COORDS)
def _clear_outputs(self): rimraf(INPUT_CUBE_PATH) rimraf(OUTPUT_CUBE_OPTIMIZED_DEFAULT_PATH) rimraf(OUTPUT_CUBE_OPTIMIZED_USER_PATH)