Esempio n. 1
0
def generate_classification(categorize_file, output_file, keep_uuid=False):
    """Generates Cloudnet classification product.

    This function reads the initial classification masks from a
    categorize file and creates a more comprehensive classification
    for different atmospheric targets. The results are written in a
    netCDF file.

    Args:
        categorize_file (str): Categorize file name.
        output_file (str): Output file name.
        keep_uuid (bool, optional): If True, keeps the UUID of the old file,
            if that exists. Default is False when new UUID is generated.
    
    Returns:
        str: UUID of the generated file.

    Examples:
        >>> from cloudnetpy.products import generate_classification
        >>> generate_classification('categorize.nc', 'classification.nc')

    """
    data_handler = DataSource(categorize_file)
    categorize_bits = CategorizeBits(categorize_file)
    classification = _get_target_classification(categorize_bits)
    data_handler.append_data(classification, 'target_classification')
    status = _get_detection_status(categorize_bits)
    data_handler.append_data(status, 'detection_status')
    output.update_attributes(data_handler.data, CLASSIFICATION_ATTRIBUTES)
    uuid = output.save_product_file('classification', data_handler,
                                    output_file, keep_uuid)
    data_handler.close()
    return uuid
Esempio n. 2
0
def generate_iwc(categorize_file, output_file):
    """Generates Cloudnet ice water content product.

    This function calculates ice water content using the so-called Z-T method.
    In this method, ice water content is calculated from attenuated-corrected
    radar reflectivity and model temperature. The results are written in a
    netCDF file.

    Args:
        categorize_file (str): Categorize file name.
        output_file (str): Output file name.

    Examples:
        >>> from cloudnetpy.products import generate_iwc
        >>> generate_iwc('categorize.nc', 'iwc.nc')

    References:
        Hogan, R.J., M.P. Mittermaier, and A.J. Illingworth, 2006:
        The Retrieval of Ice Water Content from Radar Reflectivity Factor and
        Temperature and Its Use in Evaluating a Mesoscale Model.
        J. Appl. Meteor. Climatol., 45, 301–317, https://doi.org/10.1175/JAM2340.1

    """
    iwc_data = IwcSource(categorize_file)
    ice_class = _IceClassification(categorize_file)
    _append_iwc_including_rain(iwc_data, ice_class)
    _append_iwc(iwc_data, ice_class)
    _append_iwc_bias(iwc_data)
    _append_iwc_error(iwc_data, ice_class)
    _append_iwc_sensitivity(iwc_data)
    _append_iwc_status(iwc_data, ice_class)
    output.update_attributes(iwc_data.data, IWC_ATTRIBUTES)
    output.save_product_file('iwc', iwc_data, output_file)
    iwc_data.close()
Esempio n. 3
0
def generate_classification(categorize_file, output_file):
    """Generates Cloudnet classification product.

    This function reads the initial classification masks from a
    categorize file and creates a more comprehensive classification
    for different atmospheric targets. The results are written in a
    netCDF file.

    Args:
        categorize_file (str): Categorize file name.

        output_file (str): Output file name.

    Examples:
        >>> from cloudnetpy.products import generate_classification
        >>> generate_classification('categorize.nc', 'classification.nc')

    """
    data_handler = DataSource(categorize_file)
    categorize_bits = CategorizeBits(categorize_file)
    classification = _get_target_classification(categorize_bits)
    data_handler.append_data(classification, 'target_classification')
    status = _get_detection_status(categorize_bits)
    data_handler.append_data(status, 'detection_status')
    output.update_attributes(data_handler.data, CLASSIFICATION_ATTRIBUTES)
    output.save_product_file('classification', data_handler, output_file)
    data_handler.close()
Esempio n. 4
0
def mira2nc(mmclx_file, output_file, site_meta, rebin_data=False):
    """Converts METEK MIRA-35 cloud radar Level 1 file into netCDF file.

    This function converts raw cloud radar file into a much smaller file that
    contains only the relevant data and can be used in further processing
    steps.

    Args:
        mmclx_file (str): Raw radar file in netCDF format.
        output_file (str): Output file name.
        site_meta (dict): Dictionary containing information about the
            site. Required key value pair is `name`.
        rebin_data (bool, optional): If True, rebins data to 30s resolution.
            Otherwise keeps the native resolution. Default is False.

    Examples:
          >>> from cloudnetpy.instruments import mira2nc
          >>> site_meta = {'name': 'Vehmasmaki'}
          >>> mira2nc('raw_radar.mmclx', 'radar.nc', site_meta)

    """
    raw_mira = Mira(mmclx_file, site_meta)
    raw_mira.linear_to_db(('Ze', 'ldr', 'SNR'))
    if rebin_data:
        snr_gain = raw_mira.rebin_fields()
    else:
        snr_gain = 1
    raw_mira.screen_by_snr(snr_gain)
    raw_mira.add_meta()
    output.update_attributes(raw_mira.data, MIRA_ATTRIBUTES)
    _save_mira(mmclx_file, raw_mira, output_file)
Esempio n. 5
0
def rpg2nc(path_to_l1_files, output_file, site_meta):
    """Converts RPG cloud radar binary files into netCDF file.

    This function reads one day of RPG Level 1 cloud radar binary files,
    concatenates the data and writes it into netCDF file.

    Args:
        path_to_l1_files (str): Folder containing one day of RPG LV1 files.
        output_file (str): Output file name.
        site_meta (dict): Dictionary containing information about the
            site. Required key value pairs are `altitude` (metres above mean
            sea level) and `name`.

    Raises:
        RuntimeError: Failed to read the binary data.

    Examples:
        >>> from cloudnetpy.instruments import rpg2nc
        >>> site_meta = {'name': 'Hyytiala', 'altitude': 174}
        >>> rpg2nc('/path/to/files/', 'test.nc', site_meta)

    """
    l1_files = get_rpg_files(path_to_l1_files)
    one_day_of_data = _create_one_day_data_record(l1_files)
    rpg = Rpg(one_day_of_data, site_meta)
    rpg.linear_to_db(('Ze', 'antenna_gain'))
    output.update_attributes(rpg.data, RPG_ATTRIBUTES)
    _save_rpg(rpg, output_file)
Esempio n. 6
0
def generate_lwc(categorize_file, output_file):
    """Generates Cloudnet liquid water content product.

    This function calculates cloud liquid water content using the so-called
    adiabatic-scaled method. In this method, liquid water content measured by
    microwave radiometer is used to constrain the theoretical liquid water
    content of observed liquid clouds. The results are written in a netCDF file.

    Args:
        categorize_file (str): Categorize file name.
        output_file (str): Output file name.

    Examples:
        >>> from cloudnetpy.products import generate_lwc
        >>> generate_lwc('categorize.nc', 'lwc.nc')

    References:
        Illingworth, A.J., R.J. Hogan, E. O'Connor, D. Bouniol, M.E. Brooks,
        J. Delanoé, D.P. Donovan, J.D. Eastment, N. Gaussiat, J.W. Goddard,
        M. Haeffelin, H.K. Baltink, O.A. Krasnov, J. Pelon, J. Piriou, A. Protat,
        H.W. Russchenberg, A. Seifert, A.M. Tompkins, G. van Zadelhoff, F. Vinit,
        U. Willén, D.R. Wilson, and C.L. Wrench, 2007: Cloudnet.
        Bull. Amer. Meteor. Soc., 88, 883–898, https://doi.org/10.1175/BAMS-88-6-883

    """
    lwc_source = LwcSource(categorize_file)
    lwc_obj = Lwc(lwc_source)
    cloud_obj = LwcStatus(lwc_source, lwc_obj)
    error_obj = LwcError(lwc_source, lwc_obj)
    _append_data(lwc_source, lwc_obj, cloud_obj, error_obj)
    output.update_attributes(lwc_source.data, LWC_ATTRIBUTES)
    output.save_product_file('lwc', lwc_source, output_file,
                             copy_from_cat=('lwp', 'lwp_error'))
    lwc_source.close()
Esempio n. 7
0
def rpg2nc(
    path_to_l1_files: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> Tuple[str, list]:
    """Converts RPG-FMCW-94 cloud radar data into Cloudnet Level 1b netCDF file.

    This function reads one day of RPG Level 1 cloud radar binary files,
    concatenates the data and writes a netCDF file.

    Args:
        path_to_l1_files: Folder containing one day of RPG LV1 files.
        output_file: Output file name.
        site_meta: Dictionary containing information about the
            site. Required key value pairs are `altitude` (metres above mean
            sea level) and `name`.
        uuid: Set specific UUID for the file.
        date: Expected date in the input files. If not set,
            all files will be used. This might cause unexpected behavior if
            there are files from several days. If date is set as 'YYYY-MM-DD',
            only files that match the date will be used.

    Returns:
        2-element tuple containing

        - UUID of the generated file.
        - Files used in the processing.

    Raises:
        ValidTimeStampError: No valid timestamps found.

    Examples:
        >>> from cloudnetpy.instruments import rpg2nc
        >>> site_meta = {'name': 'Hyytiala', 'altitude': 174}
        >>> rpg2nc('/path/to/files/', 'test.nc', site_meta)

    """
    l1_files = utils.get_sorted_filenames(path_to_l1_files, ".LV1")
    fmcw94_objects, valid_files = _get_fmcw94_objects(l1_files, date)
    one_day_of_data = create_one_day_data_record(fmcw94_objects)
    if not valid_files:
        return "", []
    print_info(one_day_of_data)
    fmcw = Fmcw(one_day_of_data, site_meta)
    fmcw.convert_time_to_fraction_hour()
    fmcw.mask_invalid_ldr()
    fmcw.mask_invalid_width()
    fmcw.sort_timestamps()
    fmcw.remove_duplicate_timestamps()
    general.linear_to_db(fmcw, ("Zh", "antenna_gain"))
    general.add_site_geolocation(fmcw)
    fmcw.add_solar_angles()
    general.add_height(fmcw)
    attributes = output.add_time_attribute(RPG_ATTRIBUTES, fmcw.date)
    output.update_attributes(fmcw.data, attributes)
    uuid = output.save_level1b(fmcw, output_file, uuid)
    return uuid, valid_files
Esempio n. 8
0
def hatpro2nc(
    path_to_lwp_files: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> Tuple[str, list]:
    """Converts RPG HATPRO microwave radiometer data (LWP) into Cloudnet Level 1b netCDF file.

    This function reads one day of RPG HATPRO .LWP binary files,
    concatenates the data and writes it into netCDF file.

    Args:
        path_to_lwp_files: Folder containing one day of RPG HATPRO files.
        output_file: Output file name.
        site_meta: Dictionary containing information about the site with keys:

            - `name`: Name of the site (required)
            - `altitude`: Site altitude in [m] (optional).
            - `latitude` (optional).
            - `longitude` (optional).

        uuid: Set specific UUID for the file.
        date: Expected date in the input files. If not set,
            all files will be used. This might cause unexpected behavior if
            there are files from several days. If date is set as 'YYYY-MM-DD',
            only files that match the date will be used.

    Returns:
        2-element tuple containing

        - UUID of the generated file.
        - Files used in the processing.

    Raises:
        ValidTimeStampError: No valid timestamps found.

    Examples:
        >>> from cloudnetpy.instruments import hatpro2nc
        >>> site_meta = {'name': 'Hyytiala', 'altitude': 174}
        >>> hatpro2nc('/path/to/files/', 'hatpro.nc', site_meta)

    """
    all_files = utils.get_sorted_filenames(path_to_lwp_files, ".LWP")
    hatpro_objects, valid_files = _get_hatpro_objects(all_files, date)
    if not valid_files:
        raise ValidTimeStampError
    one_day_of_data = rpg.create_one_day_data_record(hatpro_objects)
    hatpro = rpg.Hatpro(one_day_of_data, site_meta)
    hatpro.sort_timestamps()
    hatpro.convert_time_to_fraction_hour("float64")
    general.add_site_geolocation(hatpro)
    hatpro.remove_duplicate_timestamps()
    attributes = output.add_time_attribute(ATTRIBUTES, hatpro.date)
    output.update_attributes(hatpro.data, attributes)
    uuid = output.save_level1b(hatpro, output_file, uuid)
    return uuid, valid_files
Esempio n. 9
0
def basta2nc(
    basta_file: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> str:
    """Converts BASTA cloud radar data into Cloudnet Level 1b netCDF file.

    This function converts daily BASTA file into a much smaller file that
    contains only the relevant data and can be used in further processing
    steps.

    Args:
        basta_file: Filename of a daily BASTA .nc file.
        output_file: Output filename.
        site_meta: Dictionary containing information about the site. Required key is `name`.
        uuid: Set specific UUID for the file.
        date: Expected date of the measurements as YYYY-MM-DD.

    Returns:
        UUID of the generated file.

    Raises:
        ValueError: Timestamps do not match the expected date.

    Examples:
          >>> from cloudnetpy.instruments import basta2nc
          >>> site_meta = {'name': 'Palaiseau', 'latitude': 48.718, 'longitude': 2.207}
          >>> basta2nc('basta_file.nc', 'radar.nc', site_meta)

    """
    keymap = {
        "reflectivity": "Zh",
        "velocity": "v",
        "radar_pitch": "radar_pitch",
        "radar_yaw": "radar_yaw",
        "radar_roll": "radar_roll",
    }

    basta = Basta(basta_file, site_meta)
    basta.init_data(keymap)
    if date is not None:
        basta.validate_date(date)
    basta.screen_data(keymap)
    basta.add_time_and_range()
    general.add_site_geolocation(basta)
    basta.add_zenith_angle()
    general.add_radar_specific_variables(basta)
    general.add_height(basta)
    basta.close()
    attributes = output.add_time_attribute(ATTRIBUTES, basta.date)
    output.update_attributes(basta.data, attributes)
    uuid = output.save_level1b(basta, output_file, uuid)
    return uuid
Esempio n. 10
0
def rpg2nc(path_to_l1_files: str,
           output_file: str,
           site_meta: dict,
           keep_uuid: Optional[bool] = False,
           uuid: Optional[str] = None,
           date: Optional[str] = None) -> Tuple[str, list]:
    """Converts RPG FMCW-94 cloud radar data into Cloudnet Level 1b netCDF file.

    This function reads one day of RPG Level 1 cloud radar binary files,
    concatenates the data and writes it into netCDF file.

    Args:
        path_to_l1_files: Folder containing one day of RPG LV1 files.
        output_file: Output file name.
        site_meta: Dictionary containing information about the
            site. Required key value pairs are `altitude` (metres above mean
            sea level) and `name`.
        keep_uuid: If True, keeps the UUID of the old file,
            if that exists. Default is False when new UUID is generated.
        uuid: Set specific UUID for the file.
        date: Expected date in the input files. If not set,
            all files will be used. This might cause unexpected behavior if
            there are files from several days. If date is set as 'YYYY-MM-DD',
            only files that match the date will be used.

    Returns:
        2-element tuple containing

        - UUID of the generated file.
        - Files used in the processing.

    Raises:
        RuntimeError: Failed to read the binary data.

    Examples:
        >>> from cloudnetpy.instruments import rpg2nc
        >>> site_meta = {'name': 'Hyytiala', 'altitude': 174}
        >>> rpg2nc('/path/to/files/', 'test.nc', site_meta)

    """
    l1_files = utils.get_sorted_filenames(path_to_l1_files, '.LV1')
    fmcw94_objects, valid_files = _get_fmcw94_objects(l1_files, date)
    one_day_of_data = create_one_day_data_record(fmcw94_objects)
    if not valid_files:
        return '', []
    rpg = Rpg(one_day_of_data, site_meta, 'RPG-FMCW-94')
    rpg.convert_time_to_fraction_hour()
    rpg.mask_invalid_ldr()
    rpg.linear_to_db(('Ze', 'antenna_gain'))
    rpg.add_height()
    attributes = output.add_time_attribute(RPG_ATTRIBUTES, rpg.date)
    output.update_attributes(rpg.data, attributes)
    return save_rpg(rpg, output_file, valid_files, keep_uuid, uuid)
Esempio n. 11
0
def generate_drizzle(categorize_file: str,
                     output_file: str,
                     keep_uuid: bool = False,
                     uuid: Union[str, None] = None) -> str:
    """Generates Cloudnet drizzle product.

    This function calculates different drizzle properties from
    cloud radar and lidar measurements. The results are written in a netCDF file.

    Args:
        categorize_file (str): Categorize file name.
        output_file (str): Output file name.
        keep_uuid (bool, optional): If True, keeps the UUID of the old file,
            if that exists. Default is False when new UUID is generated.
        uuid (str, optional): Set specific UUID for the file.

    Returns:
        str: UUID of the generated file.

    Examples:
        >>> from cloudnetpy.products import generate_drizzle
        >>> generate_drizzle('categorize.nc', 'drizzle.nc')

    References:
        O’Connor, E.J., R.J. Hogan, and A.J. Illingworth, 2005:
        Retrieving Stratocumulus Drizzle Parameters Using Doppler Radar and Lidar.
        J. Appl. Meteor., 44, 14–27, https://doi.org/10.1175/JAM-2181.1

    """
    drizzle_source = DrizzleSource(categorize_file)
    drizzle_class = DrizzleClassification(categorize_file)
    spectral_width = SpectralWidth(categorize_file)
    drizzle_solver = DrizzleSolver(drizzle_source, drizzle_class,
                                   spectral_width)
    derived_products = DrizzleProducts(drizzle_source, drizzle_solver)
    errors = get_drizzle_error(drizzle_source, drizzle_solver)
    retrieval_status = RetrievalStatus(drizzle_class)
    results = {
        **drizzle_solver.params,
        **derived_products.derived_products,
        **errors
    }
    results = _screen_rain(results, drizzle_class)
    results['drizzle_retrieval_status'] = retrieval_status.retrieval_status
    _append_data(drizzle_source, results)
    date = drizzle_source.get_date()
    attributes = output.add_time_attribute(DRIZZLE_ATTRIBUTES, date)
    output.update_attributes(drizzle_source.data, attributes)
    uuid = output.save_product_file('drizzle', drizzle_source, output_file,
                                    keep_uuid, uuid)
    drizzle_source.close()
    return uuid
Esempio n. 12
0
def ceilo2nc(full_path: str,
             output_file: str,
             site_meta: dict,
             keep_uuid: Optional[bool] = False,
             uuid: Optional[str] = None,
             date: Optional[str] = None) -> str:
    """Converts Vaisala / Lufft ceilometer data into Cloudnet Level 1b netCDF file.

    This function reads raw Vaisala (CT25k, CL31, CL51) and Lufft (CHM15k)
    ceilometer files and writes the data into netCDF file. Three variants
    of the attenuated backscatter are saved in the file:

        1. Raw backscatter, `beta_raw`
        2. Signal-to-noise screened backscatter, `beta`
        3. SNR-screened backscatter with smoothed weak background, `beta_smooth`

    Args:
        full_path: Ceilometer file name. For Vaisala it is a text file, for CHM15k it is
            a netCDF file.
        output_file: Output file name, e.g. 'ceilo.nc'.
        site_meta: Dictionary containing information about the site and instrument.
            Required key value pairs are `name` and `altitude` (metres above mean sea level).
            Also 'calibration_factor' is recommended because the default value is probably
            incorrect.
        keep_uuid: If True, keeps the UUID of the old file, if that exists. Default is False
            when new UUID is generated.
        uuid: Set specific UUID for the file.
        date: Expected date as YYYY-MM-DD of all profiles in the file.

    Returns:
        UUID of the generated file.

    Raises:
        RuntimeError: Failed to read or process raw ceilometer data.

    Examples:
        >>> from cloudnetpy.instruments import ceilo2nc
        >>> site_meta = {'name': 'Mace-Head', 'altitude': 5}
        >>> ceilo2nc('vaisala_raw.txt', 'vaisala.nc', site_meta)
        >>> site_meta = {'name': 'Juelich', 'altitude': 108, 'calibration_factor': 2.3e-12}
        >>> ceilo2nc('chm15k_raw.nc', 'chm15k.nc', site_meta)

    """
    ceilo = _initialize_ceilo(full_path, date)
    ceilo.read_ceilometer_file(site_meta.get('calibration_factor', None))
    beta_variants = ceilo.calc_beta()
    _append_data(ceilo, beta_variants)
    _append_height(ceilo, site_meta['altitude'])
    attributes = output.add_time_attribute(ATTRIBUTES, ceilo.date)
    output.update_attributes(ceilo.data, attributes)
    return _save_ceilo(ceilo, output_file, site_meta['name'], keep_uuid, uuid)
Esempio n. 13
0
def generate_lwc(categorize_file: str,
                 output_file: str,
                 uuid: Optional[str] = None) -> str:
    """Generates Cloudnet liquid water content product.

    This function calculates cloud liquid water content using the so-called
    adiabatic-scaled method. In this method, liquid water content measured by
    microwave radiometer is used to constrain the theoretical liquid water
    content of observed liquid clouds. The results are written in a netCDF file.

    Args:
        categorize_file: Categorize file name.
        output_file: Output file name.
        uuid: Set specific UUID for the file.

    Returns:
        str: UUID of the generated file.

    Examples:
        >>> from cloudnetpy.products import generate_lwc
        >>> generate_lwc('categorize.nc', 'lwc.nc')

    References:
        Illingworth, A.J., R.J. Hogan, E. O'Connor, D. Bouniol, M.E. Brooks,
        J. Delanoé, D.P. Donovan, J.D. Eastment, N. Gaussiat, J.W. Goddard,
        M. Haeffelin, H.K. Baltink, O.A. Krasnov, J. Pelon, J. Piriou, A. Protat,
        H.W. Russchenberg, A. Seifert, A.M. Tompkins, G. van Zadelhoff, F. Vinit,
        U. Willén, D.R. Wilson, and C.L. Wrench, 2007: Cloudnet.
        Bull. Amer. Meteor. Soc., 88, 883–898, https://doi.org/10.1175/BAMS-88-6-883

    """
    lwc_source = LwcSource(categorize_file)
    lwc = Lwc(lwc_source)
    clouds = CloudAdjustor(lwc_source, lwc)
    lwc_error = LwcError(lwc_source, lwc)
    lwc_source.append_results(lwc.lwc, clouds.status, lwc_error.error)
    date = lwc_source.get_date()
    attributes = output.add_time_attribute(LWC_ATTRIBUTES, date)
    output.update_attributes(lwc_source.data, attributes)
    uuid = output.save_product_file(
        "lwc",
        lwc_source,
        output_file,
        uuid,
        copy_from_cat=(
            "lwp",
            "lwp_error",
        ),
    )
    lwc_source.close()
    return uuid
Esempio n. 14
0
def basta2nc(basta_file: str,
             output_file: str,
             site_meta: dict,
             keep_uuid: Optional[bool] = False,
             uuid: Optional[str] = None,
             date: Optional[str] = None) -> str:
    """Converts BASTA cloud radar data into Cloudnet Level 1b netCDF file.

    This function converts daily BASTA file into a much smaller file that
    contains only the relevant data and can be used in further processing
    steps.

    Args:
        basta_file: Filename of a daily BASTA .nc file.
        output_file: Output filename.
        site_meta: Dictionary containing information about the site. Required key is `name`.
        keep_uuid: If True, keeps the UUID of the old file, if that exists. Default is False
            when new UUID is generated.
        uuid: Set specific UUID for the file.
        date: Expected date of the measurements as YYYY-MM-DD.

    Returns:
        UUID of the generated file.

    Raises:
        ValueError: Timestamps do not match the expected date.

    Examples:
          >>> from cloudnetpy.instruments import basta2nc
          >>> site_meta = {'name': 'Palaiseau', 'latitude': 48.718, 'longitude': 2.207}
          >>> basta2nc('basta_file.nc', 'radar.nc', site_meta)

    """
    keymap = {'reflectivity': 'Ze', 'velocity': 'v'}

    basta = Basta(basta_file, site_meta)
    basta.init_data(keymap)
    if date is not None:
        basta.validate_date(date)
    basta.screen_data(keymap)
    basta.add_meta()
    basta.add_geolocation()
    basta.add_height()
    basta.close()
    attributes = output.add_time_attribute(ATTRIBUTES, basta.date)
    output.update_attributes(basta.data, attributes)
    fields_from_source = ('elevation', 'pulse_width')
    return output.save_radar_level1b(basta_file, basta, output_file, keep_uuid,
                                     uuid, fields_from_source)
Esempio n. 15
0
def hatpro2nc(path_to_lwp_files: str,
              output_file: str,
              site_meta: dict,
              keep_uuid: Optional[bool] = False,
              uuid: Optional[str] = None,
              date: Optional[str] = None) -> Tuple[str, list]:
    """Converts RPG HATPRO microwave radiometer data (LWP) into Cloudnet Level 1b netCDF file.

    This function reads one day of RPG HATPRO .LWP binary files,
    concatenates the data and writes it into netCDF file.

    Args:
        path_to_lwp_files: Folder containing one day of RPG HATPRO files.
        output_file: Output file name.
        site_meta: Dictionary containing information about the
            site. Required key value pairs are `altitude` (metres above mean
            sea level) and `name`.
        keep_uuid: If True, keeps the UUID of the old file,
            if that exists. Default is False when new UUID is generated.
        uuid: Set specific UUID for the file.
        date: Expected date in the input files. If not set,
            all files will be used. This might cause unexpected behavior if
            there are files from several days. If date is set as 'YYYY-MM-DD',
            only files that match the date will be used.

    Returns:
        2-element tuple containing

        - UUID of the generated file.
        - Files used in the processing.

    Raises:
        RuntimeError: Failed to read the binary data.

    Examples:
        >>> from cloudnetpy.instruments import hatpro2nc
        >>> site_meta = {'name': 'Hyytiala', 'altitude': 174}
        >>> hatpro2nc('/path/to/files/', 'hatpro.nc', site_meta)

    """
    all_files = utils.get_sorted_filenames(path_to_lwp_files, '.LWP')
    hatpro_objects, valid_files = _get_hatpro_objects(all_files, date)
    one_day_of_data = rpg.create_one_day_data_record(hatpro_objects)
    if not valid_files:
        return '', []
    hatpro = rpg.Rpg(one_day_of_data, site_meta, 'RPG-HATPRO')
    output.update_attributes(hatpro.data, ATTRIBUTES)
    return rpg.save_rpg(hatpro, output_file, valid_files, keep_uuid, uuid)
Esempio n. 16
0
def disdrometer2nc(
    disdrometer_file: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> str:
    """Converts disdrometer data into Cloudnet Level 1b netCDF file. Accepts measurements from
    OTT Parsivel-2 and Thies-LNM disdrometers.

    Args:
        disdrometer_file: Filename of disdrometer .log file.
        output_file: Output filename.
        site_meta: Dictionary containing information about the site. Required key is `name`.
        uuid: Set specific UUID for the file.
        date: Expected date of the measurements as YYYY-MM-DD.

    Returns:
        UUID of the generated file.

    Raises:
        DisdrometerDataError: Timestamps do not match the expected date, or unable to read
            the disdrometer file.

    Examples:
        >>> from cloudnetpy.instruments import disdrometer2nc
        >>> site_meta = {'name': 'Lindenberg', 'altitude': 104, 'latitude': 52.2, 'longitude': 14.1}
        >>> uuid = disdrometer2nc('thies-lnm.log', 'thies-lnm.nc', site_meta)

    """
    disdrometer: Union[Thies, Parsivel]
    try:
        disdrometer = Parsivel(disdrometer_file, site_meta)
    except ValueError:
        try:
            disdrometer = Thies(disdrometer_file, site_meta)
        except (ValueError, IndexError) as err:
            raise DisdrometerDataError("Can not read disdrometer file") from err
    if date is not None:
        disdrometer.validate_date(date)
    disdrometer.init_data()
    if date is not None:
        disdrometer.sort_time()
    disdrometer.add_meta()
    disdrometer.convert_units()
    attributes = output.add_time_attribute(ATTRIBUTES, disdrometer.date)
    output.update_attributes(disdrometer.data, attributes)
    return save_disdrometer(disdrometer, output_file, uuid)
Esempio n. 17
0
def pollyxt2nc(
    input_folder: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> str:
    """
    Converts PollyXT Raman lidar data into Cloudnet Level 1b netCDF file.

    Args:
        input_folder: Path to pollyxt netCDF files.
        output_file: Output filename.
        site_meta: Dictionary containing information about the site with keys:

            - `name`: Name of the site (mandatory)
            - `altitude`: Site altitude in [m] (mandatory).
            - `latitude` (optional).
            - `longitude` (optional).
            - `zenith_angle`: If not the default 5 degrees (optional).
            - `snr_limit`: If not the default 2 (optional).
        uuid: Set specific UUID for the file.
        date: Expected date of the measurements as YYYY-MM-DD.

    Returns:
        UUID of the generated file.

    Examples:
        >>> from cloudnetpy.instruments import pollyxt2nc
        >>> site_meta = {'name': 'Mindelo', 'altitude': 13, 'zenith_angle': 6, 'snr_limit': 3}
        >>> pollyxt2nc('/path/to/files/', 'pollyxt.nc', site_meta)

    """
    snr_limit = site_meta.get("snr_limit", 2)
    polly = PollyXt(site_meta, date)
    epoch = polly.fetch_data(input_folder)
    polly.get_date_and_time(epoch)
    polly.fetch_zenith_angle()
    polly.calc_screened_products(snr_limit)
    polly.mask_nan_values()
    polly.prepare_data()
    polly.data_to_cloudnet_arrays()
    attributes = output.add_time_attribute(ATTRIBUTES, polly.date)
    output.update_attributes(polly.data, attributes)
    polly.add_snr_info("beta", snr_limit)
    uuid = output.save_level1b(polly, output_file, uuid)
    return uuid
Esempio n. 18
0
def generate_iwc(categorize_file: str,
                 output_file: str,
                 keep_uuid: Optional[bool] = False,
                 uuid: Optional[str] = None) -> str:
    """Generates Cloudnet ice water content product.

    This function calculates ice water content using the so-called Z-T method.
    In this method, ice water content is calculated from attenuated-corrected
    radar reflectivity and model temperature. The results are written in a
    netCDF file.

    Args:
        categorize_file: Categorize file name.
        output_file: Output file name.
        keep_uuid: If True, keeps the UUID of the old file, if that exists. Default is False when
            new UUID is generated.
        uuid: Set specific UUID for the file.

    Returns:
        UUID of the generated file.

    Examples:
        >>> from cloudnetpy.products import generate_iwc
        >>> generate_iwc('categorize.nc', 'iwc.nc')

    References:
        Hogan, R.J., M.P. Mittermaier, and A.J. Illingworth, 2006:
        The Retrieval of Ice Water Content from Radar Reflectivity Factor and
        Temperature and Its Use in Evaluating a Mesoscale Model.
        J. Appl. Meteor. Climatol., 45, 301–317, https://doi.org/10.1175/JAM2340.1

    """
    iwc_source = IwcSource(categorize_file)
    ice_classification = IceClassification(categorize_file)
    iwc_source.append_iwc_including_rain(ice_classification)
    iwc_source.append_iwc(ice_classification)
    iwc_source.append_bias()
    iwc_source.append_sensitivity()
    iwc_source.append_error(ice_classification)
    iwc_source.append_status(ice_classification)
    date = iwc_source.get_date()
    attributes = output.add_time_attribute(IWC_ATTRIBUTES, date)
    output.update_attributes(iwc_source.data, attributes)
    uuid = output.save_product_file('iwc', iwc_source, output_file, keep_uuid,
                                    uuid)
    iwc_source.close()
    return uuid
Esempio n. 19
0
def generate_classification(categorize_file: str,
                            output_file: str,
                            keep_uuid: bool = False,
                            uuid: Optional[str] = None) -> str:
    """Generates Cloudnet classification product.

    This function reads the initial classification masks from a
    categorize file and creates a more comprehensive classification
    for different atmospheric targets. The results are written in a
    netCDF file.

    Args:
        categorize_file: Categorize file name.
        output_file: Output file name.
        keep_uuid: If True, keeps the UUID of the old file, if that exists. Default is False when new UUID is generated.
        uuid: Set specific UUID for the file.

    Returns:
        str: UUID of the generated file.

    Examples:
        >>> from cloudnetpy.products import generate_classification
        >>> generate_classification('categorize.nc', 'classification.nc')

    """
    product_container = DataSource(categorize_file)
    categorize_bits = CategorizeBits(categorize_file)
    classification = _get_target_classification(categorize_bits)
    product_container.append_data(classification, 'target_classification')
    status = _get_detection_status(categorize_bits)
    product_container.append_data(status, 'detection_status')
    bases, tops = _get_cloud_base_and_top_heights(classification,
                                                  product_container)
    product_container.append_data(bases, 'cloud_base_height_amsl')
    product_container.append_data(tops, 'cloud_top_height_amsl')
    product_container.append_data(bases - product_container.altitude,
                                  'cloud_base_height_agl')
    product_container.append_data(tops - product_container.altitude,
                                  'cloud_top_height_agl')
    date = product_container.get_date()
    attributes = output.add_time_attribute(CLASSIFICATION_ATTRIBUTES, date)
    output.update_attributes(product_container.data, attributes)
    uuid = output.save_product_file('classification', product_container,
                                    output_file, keep_uuid, uuid)
    product_container.close()
    return uuid
Esempio n. 20
0
def ceilo2nc(input_file, output_file, site_meta, keep_uuid=False):
    """Converts Vaisala and Jenoptik raw files into netCDF file.

    This function reads raw Vaisala (CT25k, CL31, CL51) and Jenoptik (CHM15k)
    ceilometer files and writes the data into netCDF file. Three variants
    of the attenuated backscatter are saved in the file:

        1. Raw backscatter, `beta_raw`
        2. Signal-to-noise screened backscatter, `beta`
        3. SNR-screened backscatter with smoothed weak background, `beta_smooth`

    Args:
        input_file (str): Ceilometer file name. For Vaisala it is a text file,
            for Jenoptik it is a netCDF file.
        output_file (str): Output file name, e.g. 'ceilo.nc'.
        site_meta (dict): Dictionary containing information about the
            site. Required key value pairs are `name` and `altitude`
            (metres above mean sea level).
        keep_uuid (bool, optional): If True, keeps the UUID of the old file,
            if that exists. Default is False when new UUID is generated.
    
    Returns:
        str: UUID of the generated file.

    Raises:
        RuntimeError: Failed to read or process raw ceilometer data.

    Examples:
        >>> from cloudnetpy.instruments import ceilo2nc
        >>> site_meta = {'name': 'Mace-Head', 'altitude':5}
        >>> ceilo2nc('vaisala_raw.txt', 'vaisala.nc', site_meta)
        >>> ceilo2nc('jenoptik_raw.nc', 'jenoptik.nc', site_meta)

    """
    ceilo = _initialize_ceilo(input_file, site_meta['name'])
    ceilo.read_ceilometer_file()
    beta_variants = ceilo.calc_beta()
    _append_data(ceilo, beta_variants)
    _append_height(ceilo, site_meta['altitude'])
    output.update_attributes(ceilo.data, ATTRIBUTES)
    return _save_ceilo(ceilo, output_file, site_meta['name'], keep_uuid)
Esempio n. 21
0
def generate_drizzle(categorize_file, output_file):
    """Generates Cloudnet drizzle product.

    This function calculates different drizzle properties from
    cloud radar and lidar measurements. The results are written in a netCDF file.

    Args:
        categorize_file (str): Categorize file name.
        output_file (str): Output file name.

    Examples:
        >>> from cloudnetpy.products import generate_drizzle
        >>> generate_drizzle('categorize.nc', 'drizzle.nc')

    References:
        O’Connor, E.J., R.J. Hogan, and A.J. Illingworth, 2005:
        Retrieving Stratocumulus Drizzle Parameters Using Doppler Radar and Lidar.
        J. Appl. Meteor., 44, 14–27, https://doi.org/10.1175/JAM-2181.1

    """
    drizzle_data = DrizzleSource(categorize_file)
    drizzle_class = DrizzleClassification(categorize_file)
    spectral_width = CorrectSpectralWidth(categorize_file)
    drizzle_parameters = DrizzleSolving(drizzle_data, drizzle_class,
                                        spectral_width)
    derived_products = CalculateProducts(drizzle_data, drizzle_parameters)
    errors = get_drizzle_error(drizzle_data, drizzle_parameters)
    retrieval_status = RetrievalStatus(drizzle_class)
    results = {
        **drizzle_parameters.params,
        **derived_products.derived_products,
        **errors
    }
    results = _screen_rain(results, drizzle_class)
    results['drizzle_retrieval_status'] = retrieval_status.retrieval_status
    _append_data(drizzle_data, results)
    output.update_attributes(drizzle_data.data, DRIZZLE_ATTRIBUTES)
    output.save_product_file('drizzle', drizzle_data, output_file)
    drizzle_data.close()
Esempio n. 22
0
def radiometrics2nc(
    full_path: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> str:
    """Converts Radiometrics .csv file into Cloudnet Level 1b netCDF file.

    Args:
        full_path: Input file name.
        output_file: Output file name, e.g. 'radiometrics.nc'.
        site_meta: Dictionary containing information about the site and instrument.
            Required key value pairs are `name` and `altitude` (metres above mean sea level).
        uuid: Set specific UUID for the file.
        date: Expected date as YYYY-MM-DD of all profiles in the file.

    Returns:
        UUID of the generated file.

    Examples:
        >>> from cloudnetpy.instruments import radiometrics2nc
        >>> site_meta = {'name': 'Soverato', 'altitude': 21}
        >>> radiometrics2nc('radiometrics.csv', 'radiometrics.nc', site_meta)

    """
    radiometrics = Radiometrics(full_path, site_meta)
    radiometrics.read_raw_data()
    radiometrics.read_lwp()
    radiometrics.read_timestamps()
    radiometrics.screen_time(date)
    radiometrics.data_to_cloudnet_arrays()
    radiometrics.add_meta()
    attributes = output.add_time_attribute({}, radiometrics.date)
    output.update_attributes(radiometrics.data, attributes)
    uuid = output.save_level1b(radiometrics, output_file, uuid)
    return uuid
Esempio n. 23
0
def generate_categorize(input_files: dict,
                        output_file: str,
                        keep_uuid: Optional[bool] = False,
                        uuid: Optional[str] = None) -> str:
    """Generates Cloudnet Level 1c categorize file.

    The measurements are rebinned into a common height / time grid,
    and classified as different types of scatterers such as ice, liquid,
    insects, etc. Next, the radar signal is corrected for atmospheric
    attenuation, and error estimates are computed. Results are saved
    in *ouput_file* which is a compressed netCDF4 file.

    Args:
        input_files: dict containing file names for calibrated `radar`, `lidar`, `model` and
            `mwr` files.
        output_file: Full path of the output file.
        keep_uuid: If True, keeps the UUID of the old file, if that exists. Default is False
            when new UUID is generated.
        uuid: Set specific UUID for the file.

    Returns:
        UUID of the generated file.

    Raises:
        RuntimeError: Failed to create the categorize file.

    Notes:
        Separate mwr-file is not needed when using RPG cloud radar which
        measures liquid water path. Then, the radar file can be used as
        a mwr-file as well, i.e. {'mwr': 'radar.nc'}.

    Examples:
        >>> from cloudnetpy.categorize import generate_categorize
        >>> input_files = {'radar': 'radar.nc',
                           'lidar': 'lidar.nc',
                           'model': 'model.nc',
                           'mwr': 'mwr.nc'}
        >>> generate_categorize(input_files, 'output.nc')

    """
    def _interpolate_to_cloudnet_grid():
        wl_band = utils.get_wl_band(data['radar'].radar_frequency)
        data['model'].interpolate_to_common_height(wl_band)
        data['model'].interpolate_to_grid(time, height)
        data['mwr'].rebin_to_grid(time)
        data['radar'].rebin_to_grid(time)
        data['lidar'].rebin_to_grid(time, height)

    def _prepare_output() -> dict:
        data['radar'].add_meta()
        data['model'].screen_sparse_fields()
        for key in ('category_bits', 'insect_prob', 'is_rain',
                    'is_undetected_melting'):
            data['radar'].append_data(getattr(classification, key), key)
        for key in ('radar_liquid_atten', 'radar_gas_atten'):
            data['radar'].append_data(attenuations[key], key)
        data['radar'].append_data(quality['quality_bits'], 'quality_bits')
        return {
            **data['radar'].data,
            **data['lidar'].data,
            **data['model'].data,
            **data['model'].data_sparse,
            **data['mwr'].data
        }

    def _define_dense_grid():
        return utils.time_grid(), data['radar'].height

    def _close_all():
        for obj in data.values():
            obj.close()

    data = {
        'radar': Radar(input_files['radar']),
        'lidar': Lidar(input_files['lidar']),
        'mwr': Mwr(input_files['mwr'])
    }
    data['model'] = Model(input_files['model'], data['radar'].altitude)

    time, height = _define_dense_grid()
    _interpolate_to_cloudnet_grid()
    if 'rpg' in data['radar'].type.lower():
        data['radar'].filter_speckle_noise()
        data['radar'].filter_1st_gate_artifact()
    for variable in ('v', 'v_sigma', 'ldr'):
        data['radar'].filter_stripes(variable)
    data['radar'].remove_incomplete_pixels()
    data['model'].calc_wet_bulb()
    classification = classify.classify_measurements(data)
    attenuations = atmos.get_attenuations(data, classification)
    data['radar'].correct_atten(attenuations)
    data['radar'].calc_errors(attenuations, classification)
    quality = classify.fetch_quality(data, classification, attenuations)
    cloudnet_arrays = _prepare_output()
    date = data['radar'].get_date()
    attributes = output.add_time_attribute(CATEGORIZE_ATTRIBUTES, date)
    output.update_attributes(cloudnet_arrays, attributes)
    uuid = _save_cat(output_file, data, cloudnet_arrays, keep_uuid, uuid)
    _close_all()
    return uuid
Esempio n. 24
0
def mira2nc(
    raw_mira: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> str:
    """Converts METEK MIRA-35 cloud radar data into Cloudnet Level 1b netCDF file.

    This function converts raw MIRA file(s) into a much smaller file that
    contains only the relevant data and can be used in further processing
    steps.

    Args:
        raw_mira: Filename of a daily MIRA .mmclx file. Can be also a folder containing several
            non-concatenated .mmclx files from one day.
        output_file: Output filename.
        site_meta: Dictionary containing information about the site. Required key value pair
            is `name`.
        uuid: Set specific UUID for the file.
        date: Expected date as YYYY-MM-DD of all profiles in the file.

    Returns:
        UUID of the generated file.

    Raises:
        ValidTimeStampError: No valid timestamps found.

    Examples:
          >>> from cloudnetpy.instruments import mira2nc
          >>> site_meta = {'name': 'Vehmasmaki'}
          >>> mira2nc('raw_radar.mmclx', 'radar.nc', site_meta)
          >>> mira2nc('/one/day/of/mira/mmclx/files/', 'radar.nc', site_meta)

    """
    keymap = {
        "Zg": "Zh",
        "VELg": "v",
        "RMSg": "width",
        "LDRg": "ldr",
        "SNRg": "SNR",
        "elv": "elevation",
        "azi": "azimuth_angle",
        "aziv": "azimuth_velocity",
        "nfft": "nfft",
        "nave": "nave",
        "prf": "prf",
        "rg0": "rg0",
    }

    if os.path.isdir(raw_mira):
        temp_file = NamedTemporaryFile()  # pylint: disable=R1732
        mmclx_filename = temp_file.name
        valid_filenames = utils.get_sorted_filenames(raw_mira, ".mmclx")
        valid_filenames = general.get_files_with_common_range(valid_filenames)
        variables = list(keymap.keys())
        concat_lib.concatenate_files(valid_filenames,
                                     mmclx_filename,
                                     variables=variables)
    else:
        mmclx_filename = raw_mira

    mira = Mira(mmclx_filename, site_meta)
    mira.init_data(keymap)
    if date is not None:
        mira.screen_by_date(date)
        mira.date = date.split("-")
    mira.sort_timestamps()
    mira.remove_duplicate_timestamps()
    general.linear_to_db(mira, ("Zh", "ldr", "SNR"))
    mira.screen_by_snr()
    mira.mask_invalid_data()
    mira.add_time_and_range()
    general.add_site_geolocation(mira)
    general.add_radar_specific_variables(mira)
    valid_indices = mira.add_solar_angles()
    general.screen_time_indices(mira, valid_indices)
    general.add_height(mira)
    mira.close()
    attributes = output.add_time_attribute(ATTRIBUTES, mira.date)
    output.update_attributes(mira.data, attributes)
    uuid = output.save_level1b(mira, output_file, uuid)
    return uuid
Esempio n. 25
0
def mira2nc(raw_mira: str,
            output_file: str,
            site_meta: dict,
            rebin_data: Optional[bool] = False,
            keep_uuid: Optional[bool] = False,
            uuid: Optional[str] = None,
            date: Optional[str] = None) -> str:
    """Converts METEK MIRA-35 cloud radar data into Cloudnet Level 1b netCDF file.

    This function converts raw MIRA file(s) into a much smaller file that
    contains only the relevant data and can be used in further processing
    steps.

    Args:
        raw_mira: Filename of a daily MIRA .mmclx file. Can be also a folder containing several
            non-concatenated .mmclx files from one day.
        output_file: Output filename.
        site_meta: Dictionary containing information about the site. Required key value pair
            is `name`.
        rebin_data: If True, rebins data to 30s resolution. Otherwise keeps the native resolution.
            Default is False.
        keep_uuid: If True, keeps the UUID of the old file, if that exists. Default is False when
            new UUID is generated.
        uuid: Set specific UUID for the file.
        date: Expected date as YYYY-MM-DD of all profiles in the file.

    Returns:
        UUID of the generated file.

    Raises:
        ValueError: Timestamps from several days or timestamps do not match the expected date.

    Examples:
          >>> from cloudnetpy.instruments import mira2nc
          >>> site_meta = {'name': 'Vehmasmaki'}
          >>> mira2nc('raw_radar.mmclx', 'radar.nc', site_meta)
          >>> mira2nc('/one/day/of/mira/mmclx/files/', 'radar.nc', site_meta)

    """
    keymap = {
        'Zg': 'Ze',
        'VELg': 'v',
        'RMSg': 'width',
        'LDRg': 'ldr',
        'SNRg': 'SNR'
    }

    if os.path.isdir(raw_mira):
        temp_file = NamedTemporaryFile()
        mmclx_filename = temp_file.name
        valid_filenames = utils.get_sorted_filenames(raw_mira, '.mmclx')
        concat_lib.concatenate_files(valid_filenames,
                                     mmclx_filename,
                                     variables=list(keymap.keys()))
    else:
        mmclx_filename = raw_mira

    mira = Mira(mmclx_filename, site_meta)
    mira.init_data(keymap)
    if date is not None:
        mira.screen_time(date)
        mira.date = date.split('-')
    mira.linear_to_db(('Ze', 'ldr', 'SNR'))
    if rebin_data:
        snr_gain = mira.rebin_fields()
    else:
        snr_gain = 1
    mira.screen_by_snr(snr_gain)
    mira.mask_invalid_data()
    mira.add_meta()
    mira.add_geolocation()
    mira.add_height()
    mira.close()
    attributes = output.add_time_attribute(ATTRIBUTES, mira.date)
    output.update_attributes(mira.data, attributes)
    fields_from_source = ('nfft', 'prf', 'nave', 'zrg', 'rg0', 'drg')
    return output.save_radar_level1b(mmclx_filename, mira, output_file,
                                     keep_uuid, uuid, fields_from_source)
Esempio n. 26
0
def ceilo2nc(
    full_path: str,
    output_file: str,
    site_meta: dict,
    uuid: Optional[str] = None,
    date: Optional[str] = None,
) -> str:
    """Converts Vaisala / Lufft ceilometer data into Cloudnet Level 1b netCDF file.

    This function reads raw Vaisala (CT25k, CL31, CL51, CL61-D) and Lufft (CHM15k, CHM15kx)
    ceilometer files and writes the data into netCDF file. Three variants
    of the attenuated backscatter are saved in the file:

        1. Raw backscatter, `beta_raw`
        2. Signal-to-noise screened backscatter, `beta`
        3. SNR-screened backscatter with smoothed weak background, `beta_smooth`

    With CL61-D `beta_raw` is not saved due to large file size. Instead, two dditional
    depolarisation parameters are saved:

        1. Signal-to-noise screened depolarisation, `depolarisation`
        2. SNR-screened depolarisation with smoothed weak background, `depolarisation_smooth`

    Args:
        full_path: Ceilometer file name. For Vaisala it is a text file, for CHM15k(x) it is
        a netCDF file.
        output_file: Output file name, e.g. 'ceilo.nc'.
        site_meta: Dictionary containing information about the site and instrument.
            Required key value pairs are `name` and `altitude` (metres above mean sea level).
            Also 'calibration_factor' is recommended because the default value is probably
            incorrect. If the backround noise is *not* range-corrected, you must define:
            {'range_corrected': False}.
        uuid: Set specific UUID for the file.
        date: Expected date as YYYY-MM-DD of all profiles in the file.

    Returns:
        UUID of the generated file.

    Raises:
        RuntimeError: Failed to read or process raw ceilometer data.

    Examples:
        >>> from cloudnetpy.instruments import ceilo2nc
        >>> site_meta = {'name': 'Mace-Head', 'altitude': 5}
        >>> ceilo2nc('vaisala_raw.txt', 'vaisala.nc', site_meta)
        >>> site_meta = {'name': 'Juelich', 'altitude': 108, 'calibration_factor': 2.3e-12}
        >>> ceilo2nc('chm15k_raw.nc', 'chm15k.nc', site_meta)

    """
    snr_limit = 5
    ceilo_obj = _initialize_ceilo(full_path, site_meta, date)
    calibration_factor = site_meta.get("calibration_factor", None)
    range_corrected = site_meta.get("range_corrected", True)
    ceilo_obj.read_ceilometer_file(calibration_factor)
    ceilo_obj.data["beta"] = ceilo_obj.calc_screened_product(
        ceilo_obj.data["beta_raw"], snr_limit, range_corrected)
    ceilo_obj.data["beta_smooth"] = ceilo_obj.calc_beta_smooth(
        ceilo_obj.data["beta"], snr_limit, range_corrected)
    assert ceilo_obj.instrument is not None and hasattr(
        ceilo_obj.instrument, "model")
    if "cl61" in ceilo_obj.instrument.model.lower():
        ceilo_obj.data["depolarisation"].mask = ceilo_obj.data["beta"].mask
        ceilo_obj.remove_raw_data()
    ceilo_obj.screen_depol()
    ceilo_obj.prepare_data()
    ceilo_obj.data_to_cloudnet_arrays()
    attributes = output.add_time_attribute(ATTRIBUTES, ceilo_obj.date)
    output.update_attributes(ceilo_obj.data, attributes)
    for key in ("beta", "beta_smooth"):
        ceilo_obj.add_snr_info(key, snr_limit)
    uuid = output.save_level1b(ceilo_obj, output_file, uuid)
    return uuid
Esempio n. 27
0
def generate_categorize(input_files: dict,
                        output_file: str,
                        uuid: Optional[str] = None) -> str:
    """Generates Cloudnet Level 1c categorize file.

    The measurements are rebinned into a common height / time grid,
    and classified as different types of scatterers such as ice, liquid,
    insects, etc. Next, the radar signal is corrected for atmospheric
    attenuation, and error estimates are computed. Results are saved
    in *ouput_file* which is a compressed netCDF4 file.

    Args:
        input_files: dict containing file names for calibrated `radar`, `lidar`, `model` and
            `mwr` files.
        output_file: Full path of the output file.
        uuid: Set specific UUID for the file.

    Returns:
        UUID of the generated file.

    Raises:
        RuntimeError: Failed to create the categorize file.

    Notes:
        Separate mwr-file is not needed when using RPG cloud radar which
        measures liquid water path. Then, the radar file can be used as
        a mwr-file as well, i.e. {'mwr': 'radar.nc'}.

    Examples:
        >>> from cloudnetpy.categorize import generate_categorize
        >>> input_files = {'radar': 'radar.nc',
                           'lidar': 'lidar.nc',
                           'model': 'model.nc',
                           'mwr': 'mwr.nc'}
        >>> generate_categorize(input_files, 'output.nc')

    """
    def _interpolate_to_cloudnet_grid() -> list:
        wl_band = utils.get_wl_band(data["radar"].radar_frequency)
        data["model"].interpolate_to_common_height(wl_band)
        data["model"].interpolate_to_grid(time, height)
        data["mwr"].rebin_to_grid(time)
        radar_data_gap_indices = data["radar"].rebin_to_grid(time)
        lidar_data_gap_indices = data["lidar"].interpolate_to_grid(
            time, height)
        bad_time_indices = list(
            set(radar_data_gap_indices + lidar_data_gap_indices))
        valid_ind = [
            ind for ind in range(len(time)) if ind not in bad_time_indices
        ]
        return valid_ind

    def _screen_bad_time_indices(valid_indices: list) -> None:
        n_time_full = len(time)
        data["radar"].time = time[valid_indices]
        for var in ("radar", "lidar", "mwr", "model"):
            for key, item in data[var].data.items():
                if utils.isscalar(item.data):
                    continue
                array = item[:]
                if array.shape[0] == n_time_full:
                    if array.ndim == 1:
                        array = array[valid_indices]
                    elif array.ndim == 2:
                        array = array[valid_indices, :]
                    else:
                        continue
                    data[var].data[key].data = array
        for key, item in data["model"].data_dense.items():
            data["model"].data_dense[key] = item[valid_indices, :]

    def _prepare_output() -> dict:
        data["radar"].add_meta()
        data["model"].screen_sparse_fields()
        for key in ("category_bits", "rain_rate"):
            data["radar"].append_data(getattr(classification, key), key)
        for key in ("radar_liquid_atten", "radar_gas_atten"):
            data["radar"].append_data(attenuations[key], key)
        data["radar"].append_data(quality["quality_bits"], "quality_bits")
        return {
            **data["radar"].data,
            **data["lidar"].data,
            **data["model"].data,
            **data["model"].data_sparse,
            **data["mwr"].data,
        }

    def _define_dense_grid():
        return utils.time_grid(), data["radar"].height

    def _close_all():
        for obj in data.values():
            obj.close()

    data = {
        "radar": Radar(input_files["radar"]),
        "lidar": Lidar(input_files["lidar"]),
        "mwr": Mwr(input_files["mwr"]),
    }
    assert data["radar"].altitude is not None
    data["model"] = Model(input_files["model"], data["radar"].altitude)
    time, height = _define_dense_grid()
    valid_ind = _interpolate_to_cloudnet_grid()
    _screen_bad_time_indices(valid_ind)
    if "rpg" in data["radar"].type.lower(
    ) or "basta" in data["radar"].type.lower():
        data["radar"].filter_speckle_noise()
        data["radar"].filter_1st_gate_artifact()
    for variable in ("v", "v_sigma", "ldr"):
        data["radar"].filter_stripes(variable)
    data["radar"].remove_incomplete_pixels()
    data["model"].calc_wet_bulb()
    classification = classify.classify_measurements(data)
    attenuations = atmos.get_attenuations(data, classification)
    data["radar"].correct_atten(attenuations)
    data["radar"].calc_errors(attenuations, classification)
    quality = classify.fetch_quality(data, classification, attenuations)
    cloudnet_arrays = _prepare_output()
    date = data["radar"].get_date()
    attributes = output.add_time_attribute(CATEGORIZE_ATTRIBUTES, date)
    attributes = output.add_time_attribute(attributes, date, "model_time")
    attributes = output.add_source_attribute(attributes, data)
    output.update_attributes(cloudnet_arrays, attributes)
    uuid = _save_cat(output_file, data, cloudnet_arrays, uuid)
    _close_all()
    return uuid
Esempio n. 28
0
def generate_categorize(input_files, output_file):
    """Generates Cloudnet categorize file.

    The measurements are rebinned into a common height / time grid,
    and classified as different types of scatterers such as ice, liquid,
    insects, etc. Next, the radar signal is corrected for atmospheric
    attenuation, and error estimates are computed. Results are saved
    in *ouput_file* which is a compressed netCDF4 file.

    Args:
        input_files (dict): dict containing file names for calibrated
            `radar`, `lidar`, `model` and `mwr` files.
        output_file (str): Full path of the output file.

    Raises:
        RuntimeError: Failed to create the categorize file.

    Notes:
        Separate mwr-file is not needed when using RPG cloud radar which
        measures liquid water path. Then, the radar file can be used as
        a mwr-file as well, i.e. {'mwr': 'radar.nc'}.

    Examples:
        >>> from cloudnetpy.categorize import generate_categorize
        >>> input_files = {'radar': 'radar.nc',
                           'lidar': 'lidar.nc',
                           'model': 'model.nc',
                           'mwr': 'mwr.nc'}
        >>> generate_categorize(input_files, 'output.nc')

    """
    def _interpolate_to_cloudnet_grid():
        model.interpolate_to_common_height(radar.wl_band)
        model.interpolate_to_grid(time, height)
        mwr.rebin_to_grid(time)
        radar.rebin_to_grid(time)
        lidar.rebin_to_grid(time, height)

    def _prepare_output():
        radar.add_meta()
        model.screen_sparse_fields()
        for key in ('category_bits', 'insect_prob', 'is_rain',
                    'is_undetected_melting'):
            radar.append_data(getattr(classification, key), key)
        for key in ('radar_liquid_atten', 'radar_gas_atten'):
            radar.append_data(attenuations[key], key)
        radar.append_data(quality['quality_bits'], 'quality_bits')
        return {
            **radar.data,
            **lidar.data,
            **model.data,
            **model.data_sparse,
            **mwr.data
        }

    def _define_dense_grid():
        return utils.time_grid(), radar.height

    def _close_all():
        for obj in (radar, lidar, model, mwr):
            obj.close()

    radar = Radar(input_files['radar'])
    lidar = Lidar(input_files['lidar'])
    model = Model(input_files['model'], radar.altitude)
    mwr = Mwr(input_files['mwr'])
    time, height = _define_dense_grid()
    _interpolate_to_cloudnet_grid()
    if 'rpg' in radar.type.lower():
        radar.filter_speckle_noise()
    radar.remove_incomplete_pixels()
    model.calc_wet_bulb()
    classification = classify.classify_measurements(radar, lidar, model, mwr)
    attenuations = atmos.get_attenuations(model, mwr, classification)
    radar.correct_atten(attenuations)
    radar.calc_errors(attenuations, classification)
    quality = classify.fetch_quality(radar, lidar, classification,
                                     attenuations)
    output_data = _prepare_output()
    output.update_attributes(output_data, CATEGORIZE_ATTRIBUTES)
    _save_cat(output_file, radar, lidar, model, output_data)
    _close_all()