Beispiel #1
0
def rpg2nc(
    path_to_l1_files: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> Tuple[str, list]:
    """Converts RPG-FMCW-94 cloud radar data into Cloudnet Level 1b netCDF file.

    This function reads one day of RPG Level 1 cloud radar binary files,
    concatenates the data and writes a netCDF file.

    Args:
        path_to_l1_files: Folder containing one day of RPG LV1 files.
        output_file: Output file name.
        site_meta: Dictionary containing information about the
            site. Required key value pairs are `altitude` (metres above mean
            sea level) and `name`.
        uuid: Set specific UUID for the file.
        date: Expected date in the input files. If not set,
            all files will be used. This might cause unexpected behavior if
            there are files from several days. If date is set as 'YYYY-MM-DD',
            only files that match the date will be used.

    Returns:
        2-element tuple containing

        - UUID of the generated file.
        - Files used in the processing.

    Raises:
        ValidTimeStampError: No valid timestamps found.

    Examples:
        >>> from cloudnetpy.instruments import rpg2nc
        >>> site_meta = {'name': 'Hyytiala', 'altitude': 174}
        >>> rpg2nc('/path/to/files/', 'test.nc', site_meta)

    """
    l1_files = utils.get_sorted_filenames(path_to_l1_files, ".LV1")
    fmcw94_objects, valid_files = _get_fmcw94_objects(l1_files, date)
    one_day_of_data = create_one_day_data_record(fmcw94_objects)
    if not valid_files:
        return "", []
    print_info(one_day_of_data)
    fmcw = Fmcw(one_day_of_data, site_meta)
    fmcw.convert_time_to_fraction_hour()
    fmcw.mask_invalid_ldr()
    fmcw.mask_invalid_width()
    fmcw.sort_timestamps()
    fmcw.remove_duplicate_timestamps()
    general.linear_to_db(fmcw, ("Zh", "antenna_gain"))
    general.add_site_geolocation(fmcw)
    fmcw.add_solar_angles()
    general.add_height(fmcw)
    attributes = output.add_time_attribute(RPG_ATTRIBUTES, fmcw.date)
    output.update_attributes(fmcw.data, attributes)
    uuid = output.save_level1b(fmcw, output_file, uuid)
    return uuid, valid_files
Beispiel #2
0
def hatpro2nc(
    path_to_lwp_files: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> Tuple[str, list]:
    """Converts RPG HATPRO microwave radiometer data (LWP) into Cloudnet Level 1b netCDF file.

    This function reads one day of RPG HATPRO .LWP binary files,
    concatenates the data and writes it into netCDF file.

    Args:
        path_to_lwp_files: Folder containing one day of RPG HATPRO files.
        output_file: Output file name.
        site_meta: Dictionary containing information about the site with keys:

            - `name`: Name of the site (required)
            - `altitude`: Site altitude in [m] (optional).
            - `latitude` (optional).
            - `longitude` (optional).

        uuid: Set specific UUID for the file.
        date: Expected date in the input files. If not set,
            all files will be used. This might cause unexpected behavior if
            there are files from several days. If date is set as 'YYYY-MM-DD',
            only files that match the date will be used.

    Returns:
        2-element tuple containing

        - UUID of the generated file.
        - Files used in the processing.

    Raises:
        ValidTimeStampError: No valid timestamps found.

    Examples:
        >>> from cloudnetpy.instruments import hatpro2nc
        >>> site_meta = {'name': 'Hyytiala', 'altitude': 174}
        >>> hatpro2nc('/path/to/files/', 'hatpro.nc', site_meta)

    """
    all_files = utils.get_sorted_filenames(path_to_lwp_files, ".LWP")
    hatpro_objects, valid_files = _get_hatpro_objects(all_files, date)
    if not valid_files:
        raise ValidTimeStampError
    one_day_of_data = rpg.create_one_day_data_record(hatpro_objects)
    hatpro = rpg.Hatpro(one_day_of_data, site_meta)
    hatpro.sort_timestamps()
    hatpro.convert_time_to_fraction_hour("float64")
    general.add_site_geolocation(hatpro)
    hatpro.remove_duplicate_timestamps()
    attributes = output.add_time_attribute(ATTRIBUTES, hatpro.date)
    output.update_attributes(hatpro.data, attributes)
    uuid = output.save_level1b(hatpro, output_file, uuid)
    return uuid, valid_files
Beispiel #3
0
def basta2nc(
    basta_file: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> str:
    """Converts BASTA cloud radar data into Cloudnet Level 1b netCDF file.

    This function converts daily BASTA file into a much smaller file that
    contains only the relevant data and can be used in further processing
    steps.

    Args:
        basta_file: Filename of a daily BASTA .nc file.
        output_file: Output filename.
        site_meta: Dictionary containing information about the site. Required key is `name`.
        uuid: Set specific UUID for the file.
        date: Expected date of the measurements as YYYY-MM-DD.

    Returns:
        UUID of the generated file.

    Raises:
        ValueError: Timestamps do not match the expected date.

    Examples:
          >>> from cloudnetpy.instruments import basta2nc
          >>> site_meta = {'name': 'Palaiseau', 'latitude': 48.718, 'longitude': 2.207}
          >>> basta2nc('basta_file.nc', 'radar.nc', site_meta)

    """
    keymap = {
        "reflectivity": "Zh",
        "velocity": "v",
        "radar_pitch": "radar_pitch",
        "radar_yaw": "radar_yaw",
        "radar_roll": "radar_roll",
    }

    basta = Basta(basta_file, site_meta)
    basta.init_data(keymap)
    if date is not None:
        basta.validate_date(date)
    basta.screen_data(keymap)
    basta.add_time_and_range()
    general.add_site_geolocation(basta)
    basta.add_zenith_angle()
    general.add_radar_specific_variables(basta)
    general.add_height(basta)
    basta.close()
    attributes = output.add_time_attribute(ATTRIBUTES, basta.date)
    output.update_attributes(basta.data, attributes)
    uuid = output.save_level1b(basta, output_file, uuid)
    return uuid
Beispiel #4
0
def pollyxt2nc(
    input_folder: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> str:
    """
    Converts PollyXT Raman lidar data into Cloudnet Level 1b netCDF file.

    Args:
        input_folder: Path to pollyxt netCDF files.
        output_file: Output filename.
        site_meta: Dictionary containing information about the site with keys:

            - `name`: Name of the site (mandatory)
            - `altitude`: Site altitude in [m] (mandatory).
            - `latitude` (optional).
            - `longitude` (optional).
            - `zenith_angle`: If not the default 5 degrees (optional).
            - `snr_limit`: If not the default 2 (optional).
        uuid: Set specific UUID for the file.
        date: Expected date of the measurements as YYYY-MM-DD.

    Returns:
        UUID of the generated file.

    Examples:
        >>> from cloudnetpy.instruments import pollyxt2nc
        >>> site_meta = {'name': 'Mindelo', 'altitude': 13, 'zenith_angle': 6, 'snr_limit': 3}
        >>> pollyxt2nc('/path/to/files/', 'pollyxt.nc', site_meta)

    """
    snr_limit = site_meta.get("snr_limit", 2)
    polly = PollyXt(site_meta, date)
    epoch = polly.fetch_data(input_folder)
    polly.get_date_and_time(epoch)
    polly.fetch_zenith_angle()
    polly.calc_screened_products(snr_limit)
    polly.mask_nan_values()
    polly.prepare_data()
    polly.data_to_cloudnet_arrays()
    attributes = output.add_time_attribute(ATTRIBUTES, polly.date)
    output.update_attributes(polly.data, attributes)
    polly.add_snr_info("beta", snr_limit)
    uuid = output.save_level1b(polly, output_file, uuid)
    return uuid
Beispiel #5
0
def radiometrics2nc(
    full_path: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> str:
    """Converts Radiometrics .csv file into Cloudnet Level 1b netCDF file.

    Args:
        full_path: Input file name.
        output_file: Output file name, e.g. 'radiometrics.nc'.
        site_meta: Dictionary containing information about the site and instrument.
            Required key value pairs are `name` and `altitude` (metres above mean sea level).
        uuid: Set specific UUID for the file.
        date: Expected date as YYYY-MM-DD of all profiles in the file.

    Returns:
        UUID of the generated file.

    Examples:
        >>> from cloudnetpy.instruments import radiometrics2nc
        >>> site_meta = {'name': 'Soverato', 'altitude': 21}
        >>> radiometrics2nc('radiometrics.csv', 'radiometrics.nc', site_meta)

    """
    radiometrics = Radiometrics(full_path, site_meta)
    radiometrics.read_raw_data()
    radiometrics.read_lwp()
    radiometrics.read_timestamps()
    radiometrics.screen_time(date)
    radiometrics.data_to_cloudnet_arrays()
    radiometrics.add_meta()
    attributes = output.add_time_attribute({}, radiometrics.date)
    output.update_attributes(radiometrics.data, attributes)
    uuid = output.save_level1b(radiometrics, output_file, uuid)
    return uuid
Beispiel #6
0
def ceilo2nc(
    full_path: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> str:
    """Converts Vaisala / Lufft ceilometer data into Cloudnet Level 1b netCDF file.

    This function reads raw Vaisala (CT25k, CL31, CL51, CL61-D) and Lufft (CHM15k, CHM15kx)
    ceilometer files and writes the data into netCDF file. Three variants
    of the attenuated backscatter are saved in the file:

        1. Raw backscatter, `beta_raw`
        2. Signal-to-noise screened backscatter, `beta`
        3. SNR-screened backscatter with smoothed weak background, `beta_smooth`

    With CL61-D `beta_raw` is not saved due to large file size. Instead, two dditional
    depolarisation parameters are saved:

        1. Signal-to-noise screened depolarisation, `depolarisation`
        2. SNR-screened depolarisation with smoothed weak background, `depolarisation_smooth`

    Args:
        full_path: Ceilometer file name. For Vaisala it is a text file, for CHM15k(x) it is
        a netCDF file.
        output_file: Output file name, e.g. 'ceilo.nc'.
        site_meta: Dictionary containing information about the site and instrument.
            Required key value pairs are `name` and `altitude` (metres above mean sea level).
            Also 'calibration_factor' is recommended because the default value is probably
            incorrect. If the backround noise is *not* range-corrected, you must define:
            {'range_corrected': False}.
        uuid: Set specific UUID for the file.
        date: Expected date as YYYY-MM-DD of all profiles in the file.

    Returns:
        UUID of the generated file.

    Raises:
        RuntimeError: Failed to read or process raw ceilometer data.

    Examples:
        >>> from cloudnetpy.instruments import ceilo2nc
        >>> site_meta = {'name': 'Mace-Head', 'altitude': 5}
        >>> ceilo2nc('vaisala_raw.txt', 'vaisala.nc', site_meta)
        >>> site_meta = {'name': 'Juelich', 'altitude': 108, 'calibration_factor': 2.3e-12}
        >>> ceilo2nc('chm15k_raw.nc', 'chm15k.nc', site_meta)

    """
    snr_limit = 5
    ceilo_obj = _initialize_ceilo(full_path, site_meta, date)
    calibration_factor = site_meta.get("calibration_factor", None)
    range_corrected = site_meta.get("range_corrected", True)
    ceilo_obj.read_ceilometer_file(calibration_factor)
    ceilo_obj.data["beta"] = ceilo_obj.calc_screened_product(
        ceilo_obj.data["beta_raw"], snr_limit, range_corrected)
    ceilo_obj.data["beta_smooth"] = ceilo_obj.calc_beta_smooth(
        ceilo_obj.data["beta"], snr_limit, range_corrected)
    assert ceilo_obj.instrument is not None and hasattr(
        ceilo_obj.instrument, "model")
    if "cl61" in ceilo_obj.instrument.model.lower():
        ceilo_obj.data["depolarisation"].mask = ceilo_obj.data["beta"].mask
        ceilo_obj.remove_raw_data()
    ceilo_obj.screen_depol()
    ceilo_obj.prepare_data()
    ceilo_obj.data_to_cloudnet_arrays()
    attributes = output.add_time_attribute(ATTRIBUTES, ceilo_obj.date)
    output.update_attributes(ceilo_obj.data, attributes)
    for key in ("beta", "beta_smooth"):
        ceilo_obj.add_snr_info(key, snr_limit)
    uuid = output.save_level1b(ceilo_obj, output_file, uuid)
    return uuid
Beispiel #7
0
def mira2nc(
    raw_mira: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> str:
    """Converts METEK MIRA-35 cloud radar data into Cloudnet Level 1b netCDF file.

    This function converts raw MIRA file(s) into a much smaller file that
    contains only the relevant data and can be used in further processing
    steps.

    Args:
        raw_mira: Filename of a daily MIRA .mmclx file. Can be also a folder containing several
            non-concatenated .mmclx files from one day.
        output_file: Output filename.
        site_meta: Dictionary containing information about the site. Required key value pair
            is `name`.
        uuid: Set specific UUID for the file.
        date: Expected date as YYYY-MM-DD of all profiles in the file.

    Returns:
        UUID of the generated file.

    Raises:
        ValidTimeStampError: No valid timestamps found.

    Examples:
          >>> from cloudnetpy.instruments import mira2nc
          >>> site_meta = {'name': 'Vehmasmaki'}
          >>> mira2nc('raw_radar.mmclx', 'radar.nc', site_meta)
          >>> mira2nc('/one/day/of/mira/mmclx/files/', 'radar.nc', site_meta)

    """
    keymap = {
        "Zg": "Zh",
        "VELg": "v",
        "RMSg": "width",
        "LDRg": "ldr",
        "SNRg": "SNR",
        "elv": "elevation",
        "azi": "azimuth_angle",
        "aziv": "azimuth_velocity",
        "nfft": "nfft",
        "nave": "nave",
        "prf": "prf",
        "rg0": "rg0",
    }

    if os.path.isdir(raw_mira):
        temp_file = NamedTemporaryFile()  # pylint: disable=R1732
        mmclx_filename = temp_file.name
        valid_filenames = utils.get_sorted_filenames(raw_mira, ".mmclx")
        valid_filenames = general.get_files_with_common_range(valid_filenames)
        variables = list(keymap.keys())
        concat_lib.concatenate_files(valid_filenames,
                                     mmclx_filename,
                                     variables=variables)
    else:
        mmclx_filename = raw_mira

    mira = Mira(mmclx_filename, site_meta)
    mira.init_data(keymap)
    if date is not None:
        mira.screen_by_date(date)
        mira.date = date.split("-")
    mira.sort_timestamps()
    mira.remove_duplicate_timestamps()
    general.linear_to_db(mira, ("Zh", "ldr", "SNR"))
    mira.screen_by_snr()
    mira.mask_invalid_data()
    mira.add_time_and_range()
    general.add_site_geolocation(mira)
    general.add_radar_specific_variables(mira)
    valid_indices = mira.add_solar_angles()
    general.screen_time_indices(mira, valid_indices)
    general.add_height(mira)
    mira.close()
    attributes = output.add_time_attribute(ATTRIBUTES, mira.date)
    output.update_attributes(mira.data, attributes)
    uuid = output.save_level1b(mira, output_file, uuid)
    return uuid