def rpg2nc( path_to_l1_files: str, output_file: str, site_meta: dict, uuid: Optional[str] = None, date: Optional[str] = None, ) -> Tuple[str, list]: """Converts RPG-FMCW-94 cloud radar data into Cloudnet Level 1b netCDF file. This function reads one day of RPG Level 1 cloud radar binary files, concatenates the data and writes a netCDF file. Args: path_to_l1_files: Folder containing one day of RPG LV1 files. output_file: Output file name. site_meta: Dictionary containing information about the site. Required key value pairs are `altitude` (metres above mean sea level) and `name`. uuid: Set specific UUID for the file. date: Expected date in the input files. If not set, all files will be used. This might cause unexpected behavior if there are files from several days. If date is set as 'YYYY-MM-DD', only files that match the date will be used. Returns: 2-element tuple containing - UUID of the generated file. - Files used in the processing. Raises: ValidTimeStampError: No valid timestamps found. Examples: >>> from cloudnetpy.instruments import rpg2nc >>> site_meta = {'name': 'Hyytiala', 'altitude': 174} >>> rpg2nc('/path/to/files/', 'test.nc', site_meta) """ l1_files = utils.get_sorted_filenames(path_to_l1_files, ".LV1") fmcw94_objects, valid_files = _get_fmcw94_objects(l1_files, date) one_day_of_data = create_one_day_data_record(fmcw94_objects) if not valid_files: return "", [] print_info(one_day_of_data) fmcw = Fmcw(one_day_of_data, site_meta) fmcw.convert_time_to_fraction_hour() fmcw.mask_invalid_ldr() fmcw.mask_invalid_width() fmcw.sort_timestamps() fmcw.remove_duplicate_timestamps() general.linear_to_db(fmcw, ("Zh", "antenna_gain")) general.add_site_geolocation(fmcw) fmcw.add_solar_angles() general.add_height(fmcw) attributes = output.add_time_attribute(RPG_ATTRIBUTES, fmcw.date) output.update_attributes(fmcw.data, attributes) uuid = output.save_level1b(fmcw, output_file, uuid) return uuid, valid_files
def hatpro2nc( path_to_lwp_files: str, output_file: str, site_meta: dict, uuid: Optional[str] = None, date: Optional[str] = None, ) -> Tuple[str, list]: """Converts RPG HATPRO microwave radiometer data (LWP) into Cloudnet Level 1b netCDF file. This function reads one day of RPG HATPRO .LWP binary files, concatenates the data and writes it into netCDF file. Args: path_to_lwp_files: Folder containing one day of RPG HATPRO files. output_file: Output file name. site_meta: Dictionary containing information about the site with keys: - `name`: Name of the site (required) - `altitude`: Site altitude in [m] (optional). - `latitude` (optional). - `longitude` (optional). uuid: Set specific UUID for the file. date: Expected date in the input files. If not set, all files will be used. This might cause unexpected behavior if there are files from several days. If date is set as 'YYYY-MM-DD', only files that match the date will be used. Returns: 2-element tuple containing - UUID of the generated file. - Files used in the processing. Raises: ValidTimeStampError: No valid timestamps found. Examples: >>> from cloudnetpy.instruments import hatpro2nc >>> site_meta = {'name': 'Hyytiala', 'altitude': 174} >>> hatpro2nc('/path/to/files/', 'hatpro.nc', site_meta) """ all_files = utils.get_sorted_filenames(path_to_lwp_files, ".LWP") hatpro_objects, valid_files = _get_hatpro_objects(all_files, date) if not valid_files: raise ValidTimeStampError one_day_of_data = rpg.create_one_day_data_record(hatpro_objects) hatpro = rpg.Hatpro(one_day_of_data, site_meta) hatpro.sort_timestamps() hatpro.convert_time_to_fraction_hour("float64") general.add_site_geolocation(hatpro) hatpro.remove_duplicate_timestamps() attributes = output.add_time_attribute(ATTRIBUTES, hatpro.date) output.update_attributes(hatpro.data, attributes) uuid = output.save_level1b(hatpro, output_file, uuid) return uuid, valid_files
def rpg2nc(path_to_l1_files: str, output_file: str, site_meta: dict, keep_uuid: Optional[bool] = False, uuid: Optional[str] = None, date: Optional[str] = None) -> Tuple[str, list]: """Converts RPG FMCW-94 cloud radar data into Cloudnet Level 1b netCDF file. This function reads one day of RPG Level 1 cloud radar binary files, concatenates the data and writes it into netCDF file. Args: path_to_l1_files: Folder containing one day of RPG LV1 files. output_file: Output file name. site_meta: Dictionary containing information about the site. Required key value pairs are `altitude` (metres above mean sea level) and `name`. keep_uuid: If True, keeps the UUID of the old file, if that exists. Default is False when new UUID is generated. uuid: Set specific UUID for the file. date: Expected date in the input files. If not set, all files will be used. This might cause unexpected behavior if there are files from several days. If date is set as 'YYYY-MM-DD', only files that match the date will be used. Returns: 2-element tuple containing - UUID of the generated file. - Files used in the processing. Raises: RuntimeError: Failed to read the binary data. Examples: >>> from cloudnetpy.instruments import rpg2nc >>> site_meta = {'name': 'Hyytiala', 'altitude': 174} >>> rpg2nc('/path/to/files/', 'test.nc', site_meta) """ l1_files = utils.get_sorted_filenames(path_to_l1_files, '.LV1') fmcw94_objects, valid_files = _get_fmcw94_objects(l1_files, date) one_day_of_data = create_one_day_data_record(fmcw94_objects) if not valid_files: return '', [] rpg = Rpg(one_day_of_data, site_meta, 'RPG-FMCW-94') rpg.convert_time_to_fraction_hour() rpg.mask_invalid_ldr() rpg.linear_to_db(('Ze', 'antenna_gain')) rpg.add_height() attributes = output.add_time_attribute(RPG_ATTRIBUTES, rpg.date) output.update_attributes(rpg.data, attributes) return save_rpg(rpg, output_file, valid_files, keep_uuid, uuid)
def hatpro2nc(path_to_lwp_files: str, output_file: str, site_meta: dict, keep_uuid: Optional[bool] = False, uuid: Optional[str] = None, date: Optional[str] = None) -> Tuple[str, list]: """Converts RPG HATPRO microwave radiometer data (LWP) into Cloudnet Level 1b netCDF file. This function reads one day of RPG HATPRO .LWP binary files, concatenates the data and writes it into netCDF file. Args: path_to_lwp_files: Folder containing one day of RPG HATPRO files. output_file: Output file name. site_meta: Dictionary containing information about the site. Required key value pairs are `altitude` (metres above mean sea level) and `name`. keep_uuid: If True, keeps the UUID of the old file, if that exists. Default is False when new UUID is generated. uuid: Set specific UUID for the file. date: Expected date in the input files. If not set, all files will be used. This might cause unexpected behavior if there are files from several days. If date is set as 'YYYY-MM-DD', only files that match the date will be used. Returns: 2-element tuple containing - UUID of the generated file. - Files used in the processing. Raises: RuntimeError: Failed to read the binary data. Examples: >>> from cloudnetpy.instruments import hatpro2nc >>> site_meta = {'name': 'Hyytiala', 'altitude': 174} >>> hatpro2nc('/path/to/files/', 'hatpro.nc', site_meta) """ all_files = utils.get_sorted_filenames(path_to_lwp_files, '.LWP') hatpro_objects, valid_files = _get_hatpro_objects(all_files, date) one_day_of_data = rpg.create_one_day_data_record(hatpro_objects) if not valid_files: return '', [] hatpro = rpg.Rpg(one_day_of_data, site_meta, 'RPG-HATPRO') output.update_attributes(hatpro.data, ATTRIBUTES) return rpg.save_rpg(hatpro, output_file, valid_files, keep_uuid, uuid)
def mira2nc(raw_mira: str, output_file: str, site_meta: dict, rebin_data: Optional[bool] = False, keep_uuid: Optional[bool] = False, uuid: Optional[str] = None, date: Optional[str] = None) -> str: """Converts METEK MIRA-35 cloud radar data into Cloudnet Level 1b netCDF file. This function converts raw MIRA file(s) into a much smaller file that contains only the relevant data and can be used in further processing steps. Args: raw_mira: Filename of a daily MIRA .mmclx file. Can be also a folder containing several non-concatenated .mmclx files from one day. output_file: Output filename. site_meta: Dictionary containing information about the site. Required key value pair is `name`. rebin_data: If True, rebins data to 30s resolution. Otherwise keeps the native resolution. Default is False. keep_uuid: If True, keeps the UUID of the old file, if that exists. Default is False when new UUID is generated. uuid: Set specific UUID for the file. date: Expected date as YYYY-MM-DD of all profiles in the file. Returns: UUID of the generated file. Raises: ValueError: Timestamps from several days or timestamps do not match the expected date. Examples: >>> from cloudnetpy.instruments import mira2nc >>> site_meta = {'name': 'Vehmasmaki'} >>> mira2nc('raw_radar.mmclx', 'radar.nc', site_meta) >>> mira2nc('/one/day/of/mira/mmclx/files/', 'radar.nc', site_meta) """ keymap = { 'Zg': 'Ze', 'VELg': 'v', 'RMSg': 'width', 'LDRg': 'ldr', 'SNRg': 'SNR' } if os.path.isdir(raw_mira): temp_file = NamedTemporaryFile() mmclx_filename = temp_file.name valid_filenames = utils.get_sorted_filenames(raw_mira, '.mmclx') concat_lib.concatenate_files(valid_filenames, mmclx_filename, variables=list(keymap.keys())) else: mmclx_filename = raw_mira mira = Mira(mmclx_filename, site_meta) mira.init_data(keymap) if date is not None: mira.screen_time(date) mira.date = date.split('-') mira.linear_to_db(('Ze', 'ldr', 'SNR')) if rebin_data: snr_gain = mira.rebin_fields() else: snr_gain = 1 mira.screen_by_snr(snr_gain) mira.mask_invalid_data() mira.add_meta() mira.add_geolocation() mira.add_height() mira.close() attributes = output.add_time_attribute(ATTRIBUTES, mira.date) output.update_attributes(mira.data, attributes) fields_from_source = ('nfft', 'prf', 'nave', 'zrg', 'rg0', 'drg') return output.save_radar_level1b(mmclx_filename, mira, output_file, keep_uuid, uuid, fields_from_source)
def mira2nc( raw_mira: str, output_file: str, site_meta: dict, uuid: Optional[str] = None, date: Optional[str] = None, ) -> str: """Converts METEK MIRA-35 cloud radar data into Cloudnet Level 1b netCDF file. This function converts raw MIRA file(s) into a much smaller file that contains only the relevant data and can be used in further processing steps. Args: raw_mira: Filename of a daily MIRA .mmclx file. Can be also a folder containing several non-concatenated .mmclx files from one day. output_file: Output filename. site_meta: Dictionary containing information about the site. Required key value pair is `name`. uuid: Set specific UUID for the file. date: Expected date as YYYY-MM-DD of all profiles in the file. Returns: UUID of the generated file. Raises: ValidTimeStampError: No valid timestamps found. Examples: >>> from cloudnetpy.instruments import mira2nc >>> site_meta = {'name': 'Vehmasmaki'} >>> mira2nc('raw_radar.mmclx', 'radar.nc', site_meta) >>> mira2nc('/one/day/of/mira/mmclx/files/', 'radar.nc', site_meta) """ keymap = { "Zg": "Zh", "VELg": "v", "RMSg": "width", "LDRg": "ldr", "SNRg": "SNR", "elv": "elevation", "azi": "azimuth_angle", "aziv": "azimuth_velocity", "nfft": "nfft", "nave": "nave", "prf": "prf", "rg0": "rg0", } if os.path.isdir(raw_mira): temp_file = NamedTemporaryFile() # pylint: disable=R1732 mmclx_filename = temp_file.name valid_filenames = utils.get_sorted_filenames(raw_mira, ".mmclx") valid_filenames = general.get_files_with_common_range(valid_filenames) variables = list(keymap.keys()) concat_lib.concatenate_files(valid_filenames, mmclx_filename, variables=variables) else: mmclx_filename = raw_mira mira = Mira(mmclx_filename, site_meta) mira.init_data(keymap) if date is not None: mira.screen_by_date(date) mira.date = date.split("-") mira.sort_timestamps() mira.remove_duplicate_timestamps() general.linear_to_db(mira, ("Zh", "ldr", "SNR")) mira.screen_by_snr() mira.mask_invalid_data() mira.add_time_and_range() general.add_site_geolocation(mira) general.add_radar_specific_variables(mira) valid_indices = mira.add_solar_angles() general.screen_time_indices(mira, valid_indices) general.add_height(mira) mira.close() attributes = output.add_time_attribute(ATTRIBUTES, mira.date) output.update_attributes(mira.data, attributes) uuid = output.save_level1b(mira, output_file, uuid) return uuid
def test_get_sorted_filenames(example_files): dir_name = example_files.dirname + "/data" result = ["/".join((dir_name, x)) for x in ("f.LV1", "f.lv1", "g.LV1")] assert utils.get_sorted_filenames(dir_name, ".LV1") == result
def test_get_sorted_filenames(example_files): dir_name = example_files.dirname + '/data' result = ['/'.join((dir_name, x)) for x in ('f.LV1', 'f.lv1', 'g.LV1')] assert utils.get_sorted_filenames(dir_name, '.LV1') == result