def test_does_not_append_with_invalid_file(self): temp_file = NamedTemporaryFile() non_nc_file = f"{SCRIPT_PATH}/data/vaisala/cl51.DAT" files = [self.files[0], self.files[2]] concat_lib.concatenate_files(files, temp_file.name, concat_dimension="profile") succ = concat_lib.update_nc(temp_file.name, non_nc_file) assert succ == 0
def test_does_not_append_to_beginning(self): temp_file = NamedTemporaryFile() concat_lib.concatenate_files(self.files[1:3], temp_file.name, concat_dimension="profile") succ = concat_lib.update_nc(temp_file.name, self.files[0]) assert succ == 0 nc = netCDF4.Dataset(temp_file.name) time = nc.variables["time"][:] assert len(time) == 2 * 12 for ind, timestamp in enumerate(time[:-1]): assert timestamp < time[ind + 1]
def test_date_argument(): daily_file = NamedTemporaryFile() temp_file = NamedTemporaryFile() concat_lib.concatenate_files(FILES, daily_file.name, concat_dimension="profile") ceilo2nc(daily_file.name, temp_file.name, SITE_META, date="2021-08-30") nc = netCDF4.Dataset(temp_file.name) assert len(nc.variables["time"]) == 12 assert np.all(np.diff(nc.variables["time"][:]) > 0) assert nc.year == "2021" assert nc.month == "08" assert nc.day == "30" nc.close()
def test_concatenate_files_with_mira(): files = [f'{SCRIPT_PATH}/data/mira/20210102_1400.mmclx', f'{SCRIPT_PATH}/data/mira/20210102_0000.mmclx'] output_file = NamedTemporaryFile() variables = ['microsec', 'SNRCorFaCo'] concat_lib.concatenate_files(files, output_file.name, variables=variables, new_attributes={'kissa': 50}) nc = netCDF4.Dataset(output_file.name) assert len(nc.variables['time']) == 145 + 146 assert len(nc.variables['range']) == 413 assert nc.data_model == 'NETCDF4_CLASSIC' for var in ('prf', 'microsec'): assert var in nc.variables for var in ('VELg', 'elv'): assert var not in nc.variables assert nc.kissa == 50 nc.close()
class TestWithRealData(Check): files = glob.glob(f"{SCRIPT_PATH}/data/chm15k/*.nc") date = "2020-10-22" site_meta = SITE_META daily_temp_file = NamedTemporaryFile() temp_file = NamedTemporaryFile() concat_lib.concatenate_files(files, daily_temp_file.name) uuid = ceilo2nc(daily_temp_file.name, temp_file.name, site_meta) def test_variable_names(self): keys = { "beta", "beta_raw", "beta_smooth", "calibration_factor", "range", "height", "zenith_angle", "time", "altitude", "latitude", "longitude", "wavelength", } assert set(self.nc.variables.keys()) == keys def test_common_lidar(self): lidar_fun = LidarFun(self.nc, self.site_meta, self.date, self.uuid) for name, method in LidarFun.__dict__.items(): if "test_" in name: getattr(lidar_fun, name)() def test_variable_values(self): assert self.nc.variables["wavelength"][:] == 1064 assert self.nc.variables["zenith_angle"][:] == 0 def test_comments(self): for key in ("beta", "beta_smooth"): assert "SNR threshold applied: 5" in self.nc.variables[key].comment def test_global_attributes(self): assert self.nc.source == "Lufft CHM15k" assert self.nc.title == f'CHM15k ceilometer from {self.site_meta["name"]}' def test_date_argument(self): temp_file = NamedTemporaryFile() ceilo2nc(self.daily_temp_file.name, temp_file.name, self.site_meta, date="2020-10-22") nc = netCDF4.Dataset(temp_file.name) assert len(nc.variables["time"]) == 20 assert nc.year == "2020" assert nc.month == "10" assert nc.day == "22" nc.close() with pytest.raises(ValidTimeStampError): ceilo2nc( self.daily_temp_file.name, self.temp_file.name, self.site_meta, date="2020-10-23" )
def test_concatenate_files_with_mira(): files = [ f"{SCRIPT_PATH}/data/mira/20210102_1400.mmclx", f"{SCRIPT_PATH}/data/mira/20210102_0000.mmclx", ] output_file = NamedTemporaryFile() variables = ["microsec", "SNRCorFaCo"] concat_lib.concatenate_files( files, output_file.name, variables=variables, new_attributes={"kissa": 50} ) nc = netCDF4.Dataset(output_file.name) assert len(nc.variables["time"]) == 145 + 146 assert len(nc.variables["range"]) == 413 assert nc.data_model == "NETCDF4_CLASSIC" for var in ("prf", "microsec"): assert var in nc.variables for var in ("VELg", "elv"): assert var not in nc.variables assert nc.kissa == 50 nc.close()
class TestCl61d(Check): site_meta = SITE_META date = "2021-08-29" daily_file = NamedTemporaryFile() concat_lib.concatenate_files(FILES, daily_file.name, concat_dimension="profile") temp_file = NamedTemporaryFile() uuid = ceilo2nc(daily_file.name, temp_file.name, site_meta, date=date) def test_variable_names(self): keys = { "beta", "beta_smooth", "calibration_factor", "range", "height", "zenith_angle", "time", "depolarisation", "altitude", "latitude", "longitude", "wavelength", } assert set(self.nc.variables.keys()) == keys def test_common_lidar(self): lidar_fun = LidarFun(self.nc, self.site_meta, self.date, self.uuid) for name, method in LidarFun.__dict__.items(): if "test_" in name: getattr(lidar_fun, name)() def test_variable_values(self): assert abs(self.nc.variables["wavelength"][:] - 910.55) < 0.001 assert self.nc.variables["zenith_angle"][:] == 3.0 assert ma.max(self.nc.variables["depolarisation"][:]) < 1 assert ma.min(self.nc.variables["depolarisation"][:]) > -0.1 def test_comments(self): assert "SNR threshold applied: 5" in self.nc.variables["beta"].comment def test_global_attributes(self): assert self.nc.source == "Vaisala CL61d" assert self.nc.title == f'CL61d ceilometer from {self.site_meta["name"]}'
def mira2nc(raw_mira: str, output_file: str, site_meta: dict, rebin_data: Optional[bool] = False, keep_uuid: Optional[bool] = False, uuid: Optional[str] = None, date: Optional[str] = None) -> str: """Converts METEK MIRA-35 cloud radar data into Cloudnet Level 1b netCDF file. This function converts raw MIRA file(s) into a much smaller file that contains only the relevant data and can be used in further processing steps. Args: raw_mira: Filename of a daily MIRA .mmclx file. Can be also a folder containing several non-concatenated .mmclx files from one day. output_file: Output filename. site_meta: Dictionary containing information about the site. Required key value pair is `name`. rebin_data: If True, rebins data to 30s resolution. Otherwise keeps the native resolution. Default is False. keep_uuid: If True, keeps the UUID of the old file, if that exists. Default is False when new UUID is generated. uuid: Set specific UUID for the file. date: Expected date as YYYY-MM-DD of all profiles in the file. Returns: UUID of the generated file. Raises: ValueError: Timestamps from several days or timestamps do not match the expected date. Examples: >>> from cloudnetpy.instruments import mira2nc >>> site_meta = {'name': 'Vehmasmaki'} >>> mira2nc('raw_radar.mmclx', 'radar.nc', site_meta) >>> mira2nc('/one/day/of/mira/mmclx/files/', 'radar.nc', site_meta) """ keymap = { 'Zg': 'Ze', 'VELg': 'v', 'RMSg': 'width', 'LDRg': 'ldr', 'SNRg': 'SNR' } if os.path.isdir(raw_mira): temp_file = NamedTemporaryFile() mmclx_filename = temp_file.name valid_filenames = utils.get_sorted_filenames(raw_mira, '.mmclx') concat_lib.concatenate_files(valid_filenames, mmclx_filename, variables=list(keymap.keys())) else: mmclx_filename = raw_mira mira = Mira(mmclx_filename, site_meta) mira.init_data(keymap) if date is not None: mira.screen_time(date) mira.date = date.split('-') mira.linear_to_db(('Ze', 'ldr', 'SNR')) if rebin_data: snr_gain = mira.rebin_fields() else: snr_gain = 1 mira.screen_by_snr(snr_gain) mira.mask_invalid_data() mira.add_meta() mira.add_geolocation() mira.add_height() mira.close() attributes = output.add_time_attribute(ATTRIBUTES, mira.date) output.update_attributes(mira.data, attributes) fields_from_source = ('nfft', 'prf', 'nave', 'zrg', 'rg0', 'drg') return output.save_radar_level1b(mmclx_filename, mira, output_file, keep_uuid, uuid, fields_from_source)
def mira2nc( raw_mira: str, output_file: str, site_meta: dict, uuid: Optional[str] = None, date: Optional[str] = None, ) -> str: """Converts METEK MIRA-35 cloud radar data into Cloudnet Level 1b netCDF file. This function converts raw MIRA file(s) into a much smaller file that contains only the relevant data and can be used in further processing steps. Args: raw_mira: Filename of a daily MIRA .mmclx file. Can be also a folder containing several non-concatenated .mmclx files from one day. output_file: Output filename. site_meta: Dictionary containing information about the site. Required key value pair is `name`. uuid: Set specific UUID for the file. date: Expected date as YYYY-MM-DD of all profiles in the file. Returns: UUID of the generated file. Raises: ValidTimeStampError: No valid timestamps found. Examples: >>> from cloudnetpy.instruments import mira2nc >>> site_meta = {'name': 'Vehmasmaki'} >>> mira2nc('raw_radar.mmclx', 'radar.nc', site_meta) >>> mira2nc('/one/day/of/mira/mmclx/files/', 'radar.nc', site_meta) """ keymap = { "Zg": "Zh", "VELg": "v", "RMSg": "width", "LDRg": "ldr", "SNRg": "SNR", "elv": "elevation", "azi": "azimuth_angle", "aziv": "azimuth_velocity", "nfft": "nfft", "nave": "nave", "prf": "prf", "rg0": "rg0", } if os.path.isdir(raw_mira): temp_file = NamedTemporaryFile() # pylint: disable=R1732 mmclx_filename = temp_file.name valid_filenames = utils.get_sorted_filenames(raw_mira, ".mmclx") valid_filenames = general.get_files_with_common_range(valid_filenames) variables = list(keymap.keys()) concat_lib.concatenate_files(valid_filenames, mmclx_filename, variables=variables) else: mmclx_filename = raw_mira mira = Mira(mmclx_filename, site_meta) mira.init_data(keymap) if date is not None: mira.screen_by_date(date) mira.date = date.split("-") mira.sort_timestamps() mira.remove_duplicate_timestamps() general.linear_to_db(mira, ("Zh", "ldr", "SNR")) mira.screen_by_snr() mira.mask_invalid_data() mira.add_time_and_range() general.add_site_geolocation(mira) general.add_radar_specific_variables(mira) valid_indices = mira.add_solar_angles() general.screen_time_indices(mira, valid_indices) general.add_height(mira) mira.close() attributes = output.add_time_attribute(ATTRIBUTES, mira.date) output.update_attributes(mira.data, attributes) uuid = output.save_level1b(mira, output_file, uuid) return uuid