def test_create_ds_template_context(self, mock_pnu, mock_create_template_dataset): temp_name = ''.join(random.choices(string.ascii_lowercase, k=6)) context = setup_test_context( anomaly_db_url="sqlite:///anomaly_" + temp_name + ".db", metadata_db_url="sqlite:///metadata_" + temp_name + ".db", create_dbs=True) dim_sizes_dict = {"n_w": 271, "n_s": 10} variables_dict_defs = {"def1": "vars1", "def2": "vars2"} metadata_defs = {"def1": {}, "def2": {}} hdsb = HypernetsDSBuilder(context=context, variables_dict_defs=variables_dict_defs, metadata_defs=metadata_defs) ds = hdsb.create_ds_template(dim_sizes_dict, ds_format="def1", propagate_ds="propagate_ds") # test calls to create_template_dataset mock_pnu.return_value.create_product_name.assert_called_once_with( "def1") mock_create_template_dataset.assert_called_once_with( "vars1", dim_sizes_dict, metadata={ "product_name": mock_pnu.return_value.create_product_name.return_value }, propagate_ds="propagate_ds", metadata_db=context.metadata_db, metadata_db_query={}) teardown_test_context(context, remove_dbs=True)
def test_create_ds_template_context_no_db(self, mock_pnu, mock_create_template_dataset): context = setup_test_context() dim_sizes_dict = {"n_w": 271, "n_s": 10} variables_dict_defs = {"def1": "vars1", "def2": "vars2"} metadata_defs = {"def1": {}, "def2": {}} hdsb = HypernetsDSBuilder(context=context, variables_dict_defs=variables_dict_defs, metadata_defs=metadata_defs) ds = hdsb.create_ds_template(dim_sizes_dict, ds_format="def1", propagate_ds="propagate_ds") # test calls to create_template_dataset mock_pnu.return_value.create_product_name.assert_called_once_with( "def1") mock_create_template_dataset.assert_called_once_with( "vars1", dim_sizes_dict, metadata={ "product_name": mock_pnu.return_value.create_product_name.return_value }, propagate_ds="propagate_ds", metadata_db=context.metadata_db, metadata_db_query=None)
def test_return_ds_format_dim_names(self): variables_dict_defs = { "def1": { "var1": { "dim": ["dim1", "dim2"] }, "var2": { "dim": ["dim2", "dim3", "dim4"] } }, "def2": { "var3": { "dim": ["dim10"] }, "var4": { "dim": ["dim12"] } } } metadata_defs = {"def2": "meta1"} hdsb = HypernetsDSBuilder(variables_dict_defs=variables_dict_defs, metadata_defs=metadata_defs) ds_format_dims = hdsb.return_ds_format_dim_names("def1") self.assertCountEqual(ds_format_dims, ["dim1", "dim2", "dim3", "dim4"])
def test_create_empty_dim_sizes_dict(self): variables_dict_defs = { "def1": { "var1": { "dim": ["dim1", "dim2"] }, "var2": { "dim": ["dim2", "dim3", "dim4"] } }, "def2": { "var3": { "dim": ["dim10"] }, "var4": { "dim": ["dim12"] } } } metadata_defs = {"def2": "meta1"} hdsb = HypernetsDSBuilder(variables_dict_defs=variables_dict_defs, metadata_defs=metadata_defs) dim_sizes_dict = hdsb.create_empty_dim_sizes_dict("def1") expect_dim_sizes_dict = { "dim1": None, "dim2": None, "dim3": None, "dim4": None } self.assertDictEqual(dim_sizes_dict, expect_dim_sizes_dict)
def test_create_ds_template_runs_with_default_defs( self, mock_create_template_dataset): dim_sizes_dict = {"n_w": 271, "n_s": 10} hdsb = HypernetsDSBuilder() ds = hdsb.create_ds_template(dim_sizes_dict, "L_L2A") # test calls to create_template_dataset mock_create_template_dataset.assert_called()
def test_return_ds_formats(self): variables_dict_defs = {"def1": "vars1", "def2": "vars2"} metadata_defs = {"def2": "meta1"} hdsb = HypernetsDSBuilder(variables_dict_defs=variables_dict_defs, metadata_defs=metadata_defs) ds_formats = hdsb.return_ds_formats() self.assertCountEqual(ds_formats, ["def1", "def2"])
def test_create_ds_template_no_context_no_meta( self, mock_create_template_dataset): dim_sizes_dict = {"n_w": 271, "n_s": 10} variables_dict_defs = {"def1": "vars1", "def2": "vars2"} metadata_defs = {"def2": "meta1"} hdsb = HypernetsDSBuilder(variables_dict_defs=variables_dict_defs, metadata_defs=metadata_defs) self.assertRaises(RuntimeWarning, hdsb.create_ds_template, dim_sizes_dict=dim_sizes_dict, ds_format="def1", propagate_ds=None)
def run(self): # """ # Runs hypernets data processing jobs # """ # run L0 # set_dir = "~/OneDrive/projects/hypernets_processor/hypernets_processor" # settings_file = set_dir + '/data/settings/default.txt' server_dir = os.path.join(this_directory_path, "data_io/tests/reader/") seq_id = self.context.get_config_value("sequence_id") # seq_dir=server_dir+"reader/SEQ20200625T095941/" seq_dir = server_dir + seq_id + "/" wavs_vis = np.genfromtxt("../examples/outdoor_test/wavs_vis.dat") wavs_swir = np.genfromtxt("../examples/outdoor_test/wavs_swir.dat") spec_vis = np.genfromtxt("../examples/outdoor_test/spec_vis.dat") spec_swir = np.genfromtxt("../examples/outdoor_test/spec_swir.dat") times_vis = np.genfromtxt("../examples/outdoor_test/spec_vis.dat", dtype="U20") times_swir = np.genfromtxt("../examples/outdoor_test/spec_swir.dat", dtype="U20") ID_vis = spec_vis[:, 1] ID_swir = spec_swir[:, 1] hdsb = HypernetsDSBuilder(context=self.context) scanDim = 320 dim_sizes_dict = {"wavelength": len(wavs_vis), "scan": scanDim} # use template from variables and metadata in format l0_rad = hdsb.create_ds_template(dim_sizes_dict=dim_sizes_dict, ds_format="L0_RAD") l0_rad["wavelength"] = wavs_vis l0_rad["scan"] = np.linspace(1, scanDim, scanDim) l0_rad["digital_number"].values = spec_vis[np.where(ID_vis == 1), 5::][0].T l0_rad["acquisition_time"].values = spec_vis[np.where(ID_vis == 1), 2][0] l0_rad["integration_time"].values = spec_vis[np.where(ID_vis == 1), 3][0] l0_rad["series_id"].values = np.concatenate( (np.ones(int(scanDim / 2)), 2 * np.ones(int(scanDim / 2)))) for i in range(scanDim): acquisitionTime = times_vis[np.where(ID_vis == 1), 2][0][i] acquisitionTime = datetime.strptime( "20200821T" + acquisitionTime + "UTC", '%Y%m%dT%H:%M:%S.%f%Z') acquisitionTime = acquisitionTime.replace(tzinfo=timezone.utc) l0_rad["acquisition_time"][i] = datetime.timestamp(acquisitionTime) scanDim = 40 dim_sizes_dict = {"wavelength": len(wavs_vis), "scan": scanDim} l0_irr = hdsb.create_ds_template(dim_sizes_dict=dim_sizes_dict, ds_format="L0_IRR") l0_irr["wavelength"] = wavs_vis l0_irr["scan"] = np.linspace(1, scanDim, scanDim) l0_irr["digital_number"].values = spec_vis[np.where(ID_vis == 2), 5::][0].T l0_irr["acquisition_time"].values = spec_vis[np.where(ID_vis == 2), 2][0] l0_irr["integration_time"].values = spec_vis[np.where(ID_vis == 2), 3][0] l0_irr["series_id"].values = np.concatenate( (np.ones(int(scanDim / 2)), 2 * np.ones(int(scanDim / 2)))) for i in range(scanDim): acquisitionTime = times_vis[np.where(ID_vis == 2), 2][0][i] acquisitionTime = datetime.strptime( "20200821T" + acquisitionTime + "UTC", '%Y%m%dT%H:%M:%S.%f%Z') acquisitionTime = acquisitionTime.replace(tzinfo=timezone.utc) l0_irr["acquisition_time"][i] = datetime.timestamp(acquisitionTime) scanDim = 360 dim_sizes_dict = {"wavelength": len(wavs_vis), "scan": scanDim} l0_bla = hdsb.create_ds_template(dim_sizes_dict=dim_sizes_dict, ds_format="L0_BLA") l0_bla["wavelength"] = wavs_vis l0_bla["scan"] = np.linspace(1, scanDim, scanDim) l0_bla["digital_number"].values = spec_vis[np.where(ID_vis == 0), 5::][0].T l0_bla["acquisition_time"].values = spec_vis[np.where(ID_vis == 0), 2][0] l0_bla["integration_time"].values = spec_vis[np.where(ID_vis == 0), 3][0] l0_bla["series_id"].values = np.concatenate( (np.ones(int(scanDim / 2)), 2 * np.ones(int(scanDim / 2)))) for i in range(scanDim): acquisitionTime = times_vis[np.where(ID_vis == 0), 2][0][i] acquisitionTime = datetime.strptime( "20200821T" + acquisitionTime + "UTC", '%Y%m%dT%H:%M:%S.%f%Z') acquisitionTime = acquisitionTime.replace(tzinfo=timezone.utc) l0_bla["acquisition_time"][i] = datetime.timestamp(acquisitionTime) scanDim = 10 dim_sizes_dict = {"wavelength": len(wavs_swir), "scan": scanDim} # use template from variables and metadata in format l0_swir_rad = hdsb.create_ds_template(dim_sizes_dict=dim_sizes_dict, ds_format="L0_RAD") l0_swir_rad["wavelength"] = wavs_swir l0_swir_rad["scan"] = np.linspace(1, scanDim, scanDim) l0_swir_rad["digital_number"].values = spec_swir[np.where( ID_swir == 1), 5::][0].T l0_swir_rad["acquisition_time"].values = spec_swir[np.where( ID_swir == 1), 2][0] l0_swir_rad["integration_time"].values = spec_swir[np.where( ID_swir == 1), 3][0] l0_swir_rad["series_id"].values = np.concatenate( (np.ones(int(scanDim / 2)), 2 * np.ones(int(scanDim / 2)))) for i in range(scanDim): acquisitionTime = times_swir[np.where(ID_swir == 1), 2][0][i] acquisitionTime = datetime.strptime( "20200821T" + acquisitionTime + "UTC", '%Y%m%dT%H:%M:%S.%f%Z') acquisitionTime = acquisitionTime.replace(tzinfo=timezone.utc) l0_swir_rad["acquisition_time"][i] = datetime.timestamp( acquisitionTime) scanDim = 10 dim_sizes_dict = {"wavelength": len(wavs_swir), "scan": scanDim} l0_swir_irr = hdsb.create_ds_template(dim_sizes_dict=dim_sizes_dict, ds_format="L0_IRR") l0_swir_irr["wavelength"] = wavs_swir l0_swir_irr["scan"] = np.linspace(1, scanDim, scanDim) l0_swir_irr["digital_number"].values = spec_swir[np.where( ID_swir == 2), 5::][0].T l0_swir_irr["acquisition_time"].values = spec_swir[np.where( ID_swir == 2), 2][0] l0_swir_irr["integration_time"].values = spec_swir[np.where( ID_swir == 2), 3][0] l0_swir_irr["series_id"].values = np.concatenate( (np.ones(int(scanDim / 2)), 2 * np.ones(int(scanDim / 2)))) for i in range(scanDim): acquisitionTime = times_swir[np.where(ID_swir == 2), 2][0][i] acquisitionTime = datetime.strptime( "20200821T" + acquisitionTime + "UTC", '%Y%m%dT%H:%M:%S.%f%Z') acquisitionTime = acquisitionTime.replace(tzinfo=timezone.utc) l0_swir_irr["acquisition_time"][i] = datetime.timestamp( acquisitionTime) scanDim = 20 dim_sizes_dict = {"wavelength": len(wavs_swir), "scan": scanDim} l0_swir_bla = hdsb.create_ds_template(dim_sizes_dict=dim_sizes_dict, ds_format="L0_BLA") l0_swir_bla["wavelength"] = wavs_swir l0_swir_bla["scan"] = np.linspace(1, scanDim, scanDim) l0_swir_bla["digital_number"].values = spec_swir[np.where( ID_swir == 0), 5::][0].T l0_swir_bla["acquisition_time"].values = spec_swir[np.where( ID_swir == 0), 2][0] l0_swir_bla["integration_time"].values = spec_swir[np.where( ID_swir == 0), 3][0] l0_swir_bla["series_id"].values = np.concatenate( (np.ones(int(scanDim / 2)), 2 * np.ones(int(scanDim / 2)))) for i in range(scanDim): acquisitionTime = times_swir[np.where(ID_swir == 0), 2][0][i] acquisitionTime = datetime.strptime( "20200821T" + acquisitionTime + "UTC", '%Y%m%dT%H:%M:%S.%f%Z') acquisitionTime = acquisitionTime.replace(tzinfo=timezone.utc) l0_swir_bla["acquisition_time"][i] = datetime.timestamp( acquisitionTime) reader = HypernetsReader(self.context) calcon = CalibrationConverter(self.context) cal = Calibrate(self.context, MCsteps=100) surf = SurfaceReflectance(self.context, MCsteps=1000) if self.context.get_config_value("network") == "l": comb = CombineSWIR(self.context, MCsteps=100) intp = Interpolate(self.context, MCsteps=1000) # Read L0 # self.context.logger.debug("Reading raw data...") # l0_irr,l0_rad,l0_bla,l0_swir_irr,l0_swir_rad,l0_swir_bla = reader.read_sequence( # sequence_path) # self.context.logger.debug("Done") l0_rad["digital_number"].values[:, 0] = l0_rad[ "digital_number"].values[:, 0] / 1.25 l0_irr["digital_number"].values[:, 0] = l0_irr[ "digital_number"].values[:, 0] / 1.25 #Calibrate to L1a (calibration_data_rad, calibration_data_irr, calibration_data_swir_rad, calibration_data_swir_irr) = calcon.read_calib_files() self.context.logger.debug("Processing to L1a...") print("Processing to L1a radiance...") t1 = time.time() L1a_rad = cal.calibrate_l1a("radiance", l0_rad, l0_bla, calibration_data_rad) L1a_irr = cal.calibrate_l1a("irradiance", l0_irr, l0_bla, calibration_data_irr) t2 = time.time() print(t2 - t1) L1a_swir_rad = cal.calibrate_l1a("radiance", l0_swir_rad, l0_swir_bla, calibration_data_swir_rad, swir=True) L1a_swir_irr = cal.calibrate_l1a("irradiance", l0_swir_irr, l0_swir_bla, calibration_data_swir_irr, swir=True) t3 = time.time() print(t3 - t2) self.context.logger.debug("Done") self.context.logger.debug("Processing to L1b radiance...") print("Processing to L1b radiance...") L1b_rad = comb.combine("radiance", L1a_rad, L1a_swir_rad) self.context.logger.debug("Done") self.context.logger.debug("Processing to L1b irradiance...") L1b_irr = comb.combine("irradiance", L1a_irr, L1a_swir_irr) self.context.logger.debug("Done") # L1b_rad=xarray.open_dataset(r"C:\Users\pdv\PycharmProjects\hypernets_processor\hypernets_processor\out\HYPERNETS_L_OUTD_L1B_RAD_v0.1.nc") # L1b_irr=xarray.open_dataset(r"C:\Users\pdv\PycharmProjects\hypernets_processor\hypernets_processor\out\HYPERNETS_L_OUTD_L1B_IRR_v0.1.nc") self.context.logger.debug("Processing to L1c...") L1c = intp.interpolate_l1c(L1b_rad, L1b_irr) self.context.logger.debug("Done") self.context.logger.debug("Processing to L2a...") L2a = surf.process_l2(L1c) self.context.logger.debug("Done") # COMPUTE WATER LEAVING RADIANCE LWN, REFLECTANCE RHOW_NOSC FOR EACH Lu SCAN! # wind=RhymerHypstar(context).retrieve_wind(L1c) # lw_all, rhow_all, rhow_nosc_all, epsilon, fresnel_coeff = RhymerHypstar(context).fresnelrefl_qc_simil(L1c, wind) # print(lw_all) # print(rhow_all) # print(fresnel_coeff) # L1c= # average all scans to series # L1d # AVERAGE LWN, RHOW and RHOW_NOSC # L2a # print(L1b) # # L2a=surf.process(L1c,"LandNetworkProtocol") self.context.logger.info("all done!") print("all done!") return None
def __init__(self, context): self.context = context self.hdsb = HypernetsDSBuilder(context=context)
class DataTemplates: def __init__(self, context): self.context = context self.hdsb = HypernetsDSBuilder(context=context) def calibration_dataset(self, wavs, nonlinearcals, wavcoef, caldates, nonlineardates, wavdates): """ Makes all L1 templates for the data, and propagates the appropriate keywords from the L0 datasets. :param datasetl0: :type datasetl0: :return: :rtype: """ cal_dim_sizes_dict = { "wavelength": len(wavs), "nonlinearcoef": len(nonlinearcals), "wavcoef": len(wavcoef), "calibrationdates": len(caldates), "nonlineardates": len(nonlineardates), "wavdates": len(wavdates) } dataset_cal = self.hdsb.create_ds_template(cal_dim_sizes_dict, ds_format="CAL") dataset_cal = dataset_cal.assign_coords(wavelength=wavs) dataset_cal = dataset_cal.assign_coords( nonlinearcoef=range(len(nonlinearcals))) dataset_cal = dataset_cal.assign_coords(wavcoef=range(len(wavcoef))) dataset_cal = dataset_cal.assign_coords(calibrationdates=caldates) dataset_cal = dataset_cal.assign_coords(nonlineardates=nonlineardates) dataset_cal = dataset_cal.assign_coords(wavdates=wavdates) return dataset_cal def l0_template_dataset(self, wvl, scanDim, fileformat, swir=False): """ Makes all L1 templates for the data, and propagates the appropriate keywords from the L0 datasets. :param datasetl0: :type datasetl0: :return: :rtype: """ dim_sizes_dict = {"wavelength": len(wvl), "scan": scanDim} # use template from variables and metadata in format dataset_l0 = self.hdsb.create_ds_template( dim_sizes_dict=dim_sizes_dict, ds_format=fileformat, swir=swir) dataset_l0.assign_coords(wavelength=wvl) dataset_l0.assign_coords(scan=np.linspace(1, scanDim, scanDim)) return dataset_l0 def l1a_template_from_l0_dataset(self, measurandstring, dataset_l0, swir=False): """ Makes all L1 templates for the data, and propagates the appropriate keywords from the L0 datasets. :param datasetl0: :type datasetl0: :return: :rtype: """ l1a_dim_sizes_dict = { "wavelength": len(dataset_l0["wavelength"]), "scan": len(dataset_l0["scan"]) } if measurandstring == "radiance": dataset_l1a = self.hdsb.create_ds_template(l1a_dim_sizes_dict, ds_format="L_L1A_RAD", propagate_ds=dataset_l0, ds=dataset_l0, swir=swir) elif measurandstring == "irradiance": dataset_l1a = self.hdsb.create_ds_template(l1a_dim_sizes_dict, "L_L1A_IRR", propagate_ds=dataset_l0, ds=dataset_l0, swir=swir) dataset_l1a = dataset_l1a.assign_coords( wavelength=dataset_l0.wavelength) return dataset_l1a def l1c_int_template_from_l1a_dataset_water(self, dataset_l1a): """ Makes all L1 templates for the data, and propagates the appropriate keywords from the L0 datasets. :param datasetl0: :type datasetl0: :return: :rtype: """ upscan = [ i for i, e in enumerate(dataset_l1a['viewing_zenith_angle'].values) if e <= 90 ] l1b_dim_sizes_dict = { "wavelength": len(dataset_l1a["wavelength"]), "scan": len(upscan) } dataset_l1b = self.hdsb.create_ds_template(l1b_dim_sizes_dict, "W_L1C", propagate_ds=dataset_l1a, ds=dataset_l1a) dataset_l1b = dataset_l1b.assign_coords( wavelength=dataset_l1a.wavelength) # todo check whether here some additional keywords need to propagated (see land version). return dataset_l1b def l1b_template_from_l1a_dataset_water(self, measurandstring, dataset_l1a): """ Makes all L1 templates for the data, and propagates the appropriate keywords from the L0 datasets. :param datasetl0: :type datasetl0: :return: :rtype: """ print(np.unique(dataset_l1a['series_id'])) l1b_dim_sizes_dict = { "wavelength": len(dataset_l1a["wavelength"]), "series": len(np.unique(dataset_l1a['series_id'])) } if measurandstring == "radiance": dataset_l1b = self.hdsb.create_ds_template( l1b_dim_sizes_dict, "W_L1B_RAD", propagate_ds=dataset_l1a, ds=dataset_l1a) elif measurandstring == "irradiance": dataset_l1b = self.hdsb.create_ds_template( l1b_dim_sizes_dict, "W_L1B_IRR", propagate_ds=dataset_l1a, ds=dataset_l1a) dataset_l1b = dataset_l1b.assign_coords( wavelength=dataset_l1a.wavelength) series_id = np.unique(dataset_l1a['series_id']) dataset_l1b["series_id"].values = series_id for variablestring in [ "acquisition_time", "viewing_azimuth_angle", "viewing_zenith_angle", "solar_azimuth_angle", "solar_zenith_angle" ]: temp_arr = np.empty(len(series_id)) for i in range(len(series_id)): ids = np.where( (dataset_l1a['series_id'] == series_id[i]) & np.invert( DatasetUtil.unpack_flags(dataset_l1a["quality_flag"]) ["outliers"])) temp_arr[i] = np.mean(dataset_l1a[variablestring].values[ids]) dataset_l1b[variablestring].values = temp_arr return dataset_l1b def l1b_template_from_l1a_dataset_land(self, measurandstring, dataset_l1a): """ Makes all L1 templates for the data, and propagates the appropriate keywords from the L0 datasets. :param datasetl0: :type datasetl0: :return: :rtype: """ l1b_dim_sizes_dict = { "wavelength": len(dataset_l1a["wavelength"]), "series": len(np.unique(dataset_l1a['series_id'])) } if measurandstring == "radiance": dataset_l1b = self.hdsb.create_ds_template( l1b_dim_sizes_dict, "L_L1B_RAD", propagate_ds=dataset_l1a, ds=dataset_l1a) elif measurandstring == "irradiance": dataset_l1b = self.hdsb.create_ds_template( l1b_dim_sizes_dict, "L_L1B_IRR", propagate_ds=dataset_l1a, ds=dataset_l1a) dataset_l1b = dataset_l1b.assign_coords( wavelength=dataset_l1a.wavelength) series_id = np.unique(dataset_l1a['series_id']) dataset_l1b["series_id"].values = series_id for variablestring in [ "acquisition_time", "viewing_azimuth_angle", "viewing_zenith_angle", "solar_azimuth_angle", "solar_zenith_angle" ]: temp_arr = np.empty(len(series_id)) for i in range(len(series_id)): ids = np.where( (dataset_l1a['series_id'] == series_id[i]) & np.invert( DatasetUtil.unpack_flags(dataset_l1a["quality_flag"]) ["outliers"])) temp_arr[i] = np.mean(dataset_l1a[variablestring].values[ids]) dataset_l1b[variablestring].values = temp_arr return dataset_l1b def l1b_template_from_combine(self, measurementstring, dataset, dataset_SWIR): wavs_vis = dataset["wavelength"].values wavs_swir = dataset_SWIR["wavelength"].values wavs = np.append( wavs_vis[np.where( wavs_vis <= self.context.get_config_value("combine_lim_wav"))], wavs_swir[np.where( wavs_swir > self.context.get_config_value("combine_lim_wav"))]) l1b_dim_sizes_dict = { "wavelength": len(wavs), "series": len(dataset['series']) } if measurementstring is "radiance": dataset_l1b = self.hdsb.create_ds_template(l1b_dim_sizes_dict, "L_L1B_RAD", propagate_ds=dataset, ds=dataset) if measurementstring is "irradiance": dataset_l1b = self.hdsb.create_ds_template(l1b_dim_sizes_dict, "L_L1B_IRR", propagate_ds=dataset, ds=dataset) dataset_l1b = dataset_l1b.assign_coords(wavelength=wavs) return dataset_l1b def l1c_from_l1b_dataset(self, dataset_l1b): """ Makes a L2 template of the data, and propagates the appropriate keywords from L1. :param datasetl0: :type datasetl0: :return: :rtype: """ if self.context.get_config_value("network").lower() == "l": l1c_dim_sizes_dict = { "wavelength": len(dataset_l1b["wavelength"]), "series": len(dataset_l1b['series']) } dataset_l1c = self.hdsb.create_ds_template( l1c_dim_sizes_dict, "L_L1C", propagate_ds=dataset_l1b, ds=dataset_l1b) dataset_l1c = dataset_l1c.assign_coords( wavelength=dataset_l1b.wavelength) elif self.context.get_config_value("network").lower() == "w": l1c_dim_sizes_dict = { "wavelength": len(dataset_l1b["wavelength"]), "scan": len(np.unique(dataset_l1b['scan'])) } dataset_l1c = self.hdsb.create_ds_template( l1c_dim_sizes_dict, "W_L1C", propagate_ds=dataset_l1b, ds=dataset_l1b) dataset_l1c = dataset_l1c.assign_coords( wavelength=dataset_l1b.wavelength) return dataset_l1c def l1ctemp_dataset(self, dataset_l1b, dataset_l1b_irr): """ Makes a L2 template of the data, and propagates the appropriate keywords from L1. :param datasetl0: :type datasetl0: :return: :rtype: """ if self.context.get_config_value("network").lower() == "l": l1c_dim_sizes_dict = { "wavelength": len(dataset_l1b["wavelength"]), "series": len(dataset_l1b_irr['series']) } dataset_l1c = self.hdsb.create_ds_template( l1c_dim_sizes_dict, "L_L1C", propagate_ds=dataset_l1b, ds=dataset_l1b) dataset_l1c = dataset_l1c.assign_coords( wavelength=dataset_l1b.wavelength) elif self.context.get_config_value("network").lower() == "w": l1c_dim_sizes_dict = { "wavelength": len(dataset_l1b["wavelength"]), "scan": len(np.unique(dataset_l1b_irr['series'])) } dataset_l1c = self.hdsb.create_ds_template( l1c_dim_sizes_dict, "W_L1C", propagate_ds=dataset_l1b, ds=dataset_l1b) dataset_l1c = dataset_l1c.assign_coords( wavelength=dataset_l1b.wavelength) return dataset_l1c def l1d_from_l1c_dataset(self, datasetl1c): """ Makes a L2 template of the data, and propagates the appropriate keywords from L1. :param datasetl0: :type datasetl0: :return: :rtype: """ if self.context.get_config_value("network").lower() == "l": print("No L1D level for land") elif self.context.get_config_value("network").lower() == "w": l1d_dim_sizes_dict = { "wavelength": len(datasetl1c["wavelength"]), "scan": len(datasetl1c["scan"]) } dataset_l1d = self.hdsb.create_ds_template(l1d_dim_sizes_dict, "W_L1D", propagate_ds=datasetl1c, ds=datasetl1c) dataset_l1d = dataset_l1d.assign_coords( wavelength=datasetl1c.wavelength) return dataset_l1d def l2_from_l1d_dataset(self, datasetl1d): if self.context.get_config_value("network").lower() == "w": l2a_dim_sizes_dict = { "wavelength": len(datasetl1d["wavelength"]), "series": len(np.unique(datasetl1d['series_id'])) } dataset_l2a = self.hdsb.create_ds_template(l2a_dim_sizes_dict, "W_L2A", propagate_ds=datasetl1d, ds=datasetl1d) dataset_l2a = dataset_l2a.assign_coords( wavelength=datasetl1d.wavelength) series_id = np.unique(datasetl1d['series_id']) dataset_l2a["series_id"].values = series_id for variablestring in [ "acquisition_time", "viewing_azimuth_angle", "viewing_zenith_angle", "solar_azimuth_angle", "solar_zenith_angle" ]: temp_arr = np.empty(len(series_id)) for i in range(len(series_id)): ids = np.where((datasetl1d['series_id'] == series_id[i]) & (datasetl1d['quality_flag'] == 1)) temp_arr[i] = np.mean( datasetl1d[variablestring].values[ids]) dataset_l2a[variablestring].values = temp_arr return dataset_l2a def l2_from_l1c_dataset(self, datasetl1c): """ Makes a L2 template of the data, and propagates the appropriate keywords from L1. :param datasetl0: :type datasetl0: :return: :rtype: """ if self.context.get_config_value("network").lower() == "l": l2a_dim_sizes_dict = { "wavelength": len(datasetl1c["wavelength"]), "series": len(datasetl1c['series_id']) } dataset_l2a = self.hdsb.create_ds_template(l2a_dim_sizes_dict, "L_L2A", propagate_ds=datasetl1c, ds=datasetl1c) dataset_l2a = dataset_l2a.assign_coords( wavelength=datasetl1c.wavelength) return dataset_l2a
def create_test_ds(ds_format): """ Returns sample ds with random data :type ds_format: str :param ds_format: format string of dataset :return: test ds :rtype: xarray.Dataset """ context = setup_test_context() dsb = HypernetsDSBuilder(context=context) dim_sizes_dict = dsb.create_empty_dim_sizes_dict(ds_format) variable_names = dsb.return_ds_format_variable_names(ds_format) dim_values = TEST_DS_DIM_SIZES_L if ds_format[0] == "W": dim_values = TEST_DS_DIM_SIZES_W for k in dim_sizes_dict.keys(): dim_sizes_dict[k] = dim_values[k] ds = dsb.create_ds_template(dim_sizes_dict, ds_format) remaining_variables = copy(variable_names) for variable_name in variable_names: if variable_name == "wavelength": wavelength_data = np.concatenate( (np.arange(400, 1000, 3), np.arange(1000, 1700 + 10, 10))) ds = ds.assign_coords( coords={ "wavelength": ds.wavelength.copy( data=wavelength_data).variable }) elif variable_name == "bandwidth": ds[variable_name].data = np.random.normal( 1.0, 0.5, len(ds[variable_name].data)) elif variable_name == "acquisition_time": ds[variable_name].data = np.arange(10000, 10000 + len(ds[variable_name].data), dtype=int) # geometry data elif "angle" in variable_name: ds[variable_name].data = np.linspace(30, 60, len(ds[variable_name].data)) # geometry data elif "acceleration" in variable_name: ds[variable_name].data = np.random.normal( 1.0, 0.5, ds[variable_name].data.shape) # observation data elif "reflectance" in variable_name: if variable_name[0] == "u": ds[variable_name].data = np.random.normal( 1.0, 0.5, ds[variable_name].data.shape) if variable_name[:3] == "cov": ds[variable_name].data = np.random.normal( 1.0, 0.5, ds[variable_name].data.shape) else: ds[variable_name].data = np.round( np.random.rand(*ds[variable_name].data.shape), 3) elif "radiance" in variable_name: if variable_name[0] == "u": ds[variable_name].data = np.random.normal( 1.0, 0.5, ds[variable_name].data.shape) if variable_name[:3] == "cov": ds[variable_name].data = np.random.normal( 1.0, 0.5, ds[variable_name].data.shape) else: ds[variable_name].data = np.round( np.random.rand(*ds[variable_name].data.shape) * 100, 3) elif "digital_number" in variable_name: if variable_name[0] == "u": ds[variable_name].data = np.random.normal( 1.0, 0.5, ds[variable_name].data.shape) if variable_name[:3] == "cov": ds[variable_name].data = np.random.normal( 1.0, 0.5, ds[variable_name].data.shape) else: ds[variable_name].data = ( np.random.rand(*ds[variable_name].data.shape) * 200).astype(int) else: continue remaining_variables.remove(variable_name) remaining_variables.remove("quality_flag") return ds