def _save_cat(file_name, radar, lidar, model, obs, keep_uuid): """Creates a categorize netCDF4 file and saves all data into it.""" def _merge_source(): # Probably should include mwr and model source if existing rootgrp.source = f"radar: {radar.source}\nlidar: {lidar.source}" dims = { 'time': len(radar.time), 'height': len(radar.height), 'model_time': len(model.time), 'model_height': len(model.mean_height) } rootgrp = output.init_file(file_name, dims, obs, keep_uuid) uuid = rootgrp.file_uuid output.add_file_type(rootgrp, 'categorize') output.copy_global(radar.dataset, rootgrp, ('year', 'month', 'day', 'location')) rootgrp.title = f"Categorize file from {radar.location}" # Needs to solve how to provide institution # rootgrp.institution = f"Data processed at {config.INSTITUTE}" rootgrp.references = 'https://doi.org/10.1175/BAMS-88-6-883' output.merge_history(rootgrp, 'categorize', radar, lidar) _merge_source() rootgrp.close() return uuid
def test_merge_history(): root = RootGrp() file_type = 'dummy' source1 = RootGrp() source1.dataset.history = 'some history x' source2 = RootGrp() source2.dataset.history = 'some history y' output.merge_history(root, file_type, source1, source2) assert utils.is_timestamp(f"-{root.history[:19]}") is True assert root.history[ 19:] == ' - dummy file created\nsome history x\nsome history y'
def test_merge_history(): root = RootGrp() file_type = "dummy" source1 = RootGrp() source1.dataset.history = "20:00 some history x" source2 = RootGrp() source2.dataset.history = "21:00 some history y" output.merge_history(root, file_type, {"a": source1, "b": source2}) history = str(root.history) assert utils.is_timestamp(f"-{history[:19]}") is True assert ( history[19:] == " +00:00 - dummy file created\n21:00 some history y\n20:00 some history x" )
def _save_cat(full_path: str, data_obs: dict, cloudnet_arrays: dict, keep_uuid: bool, uuid: Union[str, None]) -> str: """Creates a categorize netCDF4 file and saves all data into it.""" dims = { 'time': len(data_obs['radar'].time), 'height': len(data_obs['radar'].height), 'model_time': len(data_obs['model'].time), 'model_height': len(data_obs['model'].mean_height) } file_type = 'categorize' nc = output.init_file(full_path, dims, cloudnet_arrays, keep_uuid, uuid) uuid = nc.file_uuid output.add_file_type(nc, file_type) output.copy_global(data_obs['radar'].dataset, nc, ('year', 'month', 'day', 'location')) nc.title = f"{file_type.capitalize()} file from {data_obs['radar'].location}" nc.source_file_uuids = output.get_source_uuids(*data_obs.values()) output.add_references(nc, file_type) output.merge_history(nc, file_type, data_obs['radar'], data_obs['lidar']) nc.close() return uuid
def _save_cat(full_path: str, data_obs: dict, cloudnet_arrays: dict, uuid: Union[str, None]) -> str: """Creates a categorize netCDF4 file and saves all data into it.""" dims = { "time": len(data_obs["radar"].time), "height": len(data_obs["radar"].height), "model_time": len(data_obs["model"].time), "model_height": len(data_obs["model"].mean_height), } file_type = "categorize" nc = output.init_file(full_path, dims, cloudnet_arrays, uuid) uuid_out = nc.file_uuid nc.cloudnet_file_type = file_type output.copy_global(data_obs["radar"].dataset, nc, ("year", "month", "day", "location")) nc.title = f"Cloud categorization products from {data_obs['radar'].location}" nc.source_file_uuids = output.get_source_uuids(*data_obs.values()) nc.references = output.get_references(file_type) output.add_source_instruments(nc, data_obs) output.merge_history(nc, file_type, data_obs) nc.close() return uuid_out