Ejemplo n.º 1
0
def _save_cat(file_name, radar, lidar, model, obs, keep_uuid):
    """Creates a categorize netCDF4 file and saves all data into it."""
    def _merge_source():
        # Probably should include mwr and model source if existing
        rootgrp.source = f"radar: {radar.source}\nlidar: {lidar.source}"

    dims = {
        'time': len(radar.time),
        'height': len(radar.height),
        'model_time': len(model.time),
        'model_height': len(model.mean_height)
    }
    rootgrp = output.init_file(file_name, dims, obs, keep_uuid)
    uuid = rootgrp.file_uuid
    output.add_file_type(rootgrp, 'categorize')
    output.copy_global(radar.dataset, rootgrp,
                       ('year', 'month', 'day', 'location'))
    rootgrp.title = f"Categorize file from {radar.location}"
    # Needs to solve how to provide institution
    # rootgrp.institution = f"Data processed at {config.INSTITUTE}"
    rootgrp.references = 'https://doi.org/10.1175/BAMS-88-6-883'
    output.merge_history(rootgrp, 'categorize', radar, lidar)
    _merge_source()
    rootgrp.close()
    return uuid
Ejemplo n.º 2
0
def test_copy_global(tmpdir_factory, fake_nc_file):
    file = tmpdir_factory.mktemp("data").join("nc_file.nc")
    root_grp = netCDF4.Dataset(file, "w", format="NETCDF4_CLASSIC")
    attr_list = ('a', 'b', 'c')
    source = netCDF4.Dataset(fake_nc_file)
    output.copy_global(source, root_grp, attr_list)
    for attr in attr_list:
        assert getattr(root_grp, attr) == getattr(source, attr)
Ejemplo n.º 3
0
def _save_cat(full_path: str, data_obs: dict, cloudnet_arrays: dict,
              keep_uuid: bool, uuid: Union[str, None]) -> str:
    """Creates a categorize netCDF4 file and saves all data into it."""

    dims = {
        'time': len(data_obs['radar'].time),
        'height': len(data_obs['radar'].height),
        'model_time': len(data_obs['model'].time),
        'model_height': len(data_obs['model'].mean_height)
    }

    file_type = 'categorize'
    nc = output.init_file(full_path, dims, cloudnet_arrays, keep_uuid, uuid)
    uuid = nc.file_uuid
    output.add_file_type(nc, file_type)
    output.copy_global(data_obs['radar'].dataset, nc,
                       ('year', 'month', 'day', 'location'))
    nc.title = f"{file_type.capitalize()} file from {data_obs['radar'].location}"
    nc.source_file_uuids = output.get_source_uuids(*data_obs.values())
    output.add_references(nc, file_type)
    output.merge_history(nc, file_type, data_obs['radar'], data_obs['lidar'])
    nc.close()
    return uuid
Ejemplo n.º 4
0
def _save_cat(full_path: str, data_obs: dict, cloudnet_arrays: dict,
              uuid: Union[str, None]) -> str:
    """Creates a categorize netCDF4 file and saves all data into it."""

    dims = {
        "time": len(data_obs["radar"].time),
        "height": len(data_obs["radar"].height),
        "model_time": len(data_obs["model"].time),
        "model_height": len(data_obs["model"].mean_height),
    }

    file_type = "categorize"
    nc = output.init_file(full_path, dims, cloudnet_arrays, uuid)
    uuid_out = nc.file_uuid
    nc.cloudnet_file_type = file_type
    output.copy_global(data_obs["radar"].dataset, nc,
                       ("year", "month", "day", "location"))
    nc.title = f"Cloud categorization products from {data_obs['radar'].location}"
    nc.source_file_uuids = output.get_source_uuids(*data_obs.values())
    nc.references = output.get_references(file_type)
    output.add_source_instruments(nc, data_obs)
    output.merge_history(nc, file_type, data_obs)
    nc.close()
    return uuid_out