def orchestrate(dir: str) -> None: """ This function deals with orchestration it does not take any arguments and creates all input and output by itself. :return: """ roi = 'POLYGON ((48.0 11.3, 48.2 11.3, 48.1 11.1, 48.0 11.0, 48.0 11.3))' start_time_as_string = '2017-01-01' end_time_as_string = '2017-01-10' data_access_component = DataAccessComponent() data_access_component.read_data_stores(dir + '/frascati_data_stores.yml') # data_access_component.read_data_stores( # 'C:/Users/tonio/Projekte/multiply/workshop/frascati-20180205/frascati_data_stores.yml') priors_sm_dir = data_access_component.get_data_urls( roi, start_time_as_string, end_time_as_string, 'SoilMoisture')[0] priors_veg_dir = data_access_component.get_data_urls( roi, start_time_as_string, end_time_as_string, 'Vegetation')[0] config = Orchestrator._get_config(priors_sm_dir=priors_sm_dir, priors_veg_dir=priors_veg_dir, roi=roi, start_time=start_time_as_string, end_time=end_time_as_string) config_file_name = dir + '/config.yml' yaml.dump(config, open(config_file_name, 'w+')) config_as_dict = AttributeDict(**config) # mcd43_urls = data_access_component.get_data_urls(roi, start_time_as_string, end_time_as_string, 'MCD43') # todo get SAR pre-processed data # todo get MODIS data # todo get S2 pre-processed data # todo get priors parameters = config_as_dict['Inference']['parameters'] start_time = datetime.strptime(config_as_dict['General']['start_time'], "%Y-%m-%d") end_time = datetime.strptime(config_as_dict['General']['end_time'], "%Y-%m-%d") time_interval = config_as_dict['General']['time_interval'] time_interval_unit = config_as_dict['General']['time_interval_unit'] current_time = start_time prior_file_dicts = [] while current_time < end_time: time_string = current_time.strftime("%Y-%m-%d") prior_engine = PriorEngine(datestr=time_string, variables=parameters, config=(config_file_name)) prior_file_dicts.append(prior_engine.get_priors()) current_time = Orchestrator._increase_time_step( current_time, time_interval, time_interval_unit)
def _get_prior_files(self): prior_file_dicts = [] while current_time < end_time: time_string = current_time.strftime("%Y-%m-%d") prior_engine = PriorEngine(datestr=time_string, variables=variables, config=(config_file_name)) prior_file_dicts.append(prior_engine.get_priors()) current_time = utils.increase_time_step(current_time, time_interval, time_interval_unit) return prior_file_dicts
def get_priors_from_config_file(start_time: str, end_time: str, priors_directory: str, parameter_list: List[str], config_file: str): time = datetime.datetime.strptime(start_time, '%Y-%m-%d') end_time = datetime.datetime.strptime(end_time, '%Y-%m-%d') create_dir(priors_directory) while time <= end_time: prior_engine = PriorEngine(config=config_file, datestr=time.strftime('%Y-%m-%d'), variables=parameter_list) prior_engine.get_priors() time = time + datetime.timedelta(days=1)
def get_priors_old(temp_dir: str, roi: str, start_time: str, end_time: str, time_interval: str, priors_directory: str, parameter_list: List[str]): config_file = create_config_file(temp_dir, roi, start_time, end_time, time_interval, priors_directory, parameter_list) time = datetime.datetime.strptime(start_time, '%Y-%m-%d') end_time = datetime.datetime.strptime(end_time, '%Y-%m-%d') create_dir(priors_directory) while time <= end_time: prior_engine = PriorEngine(config=config_file, datestr=time.strftime('%Y-%m-%d'), variables=parameter_list) prior_engine.get_priors() time = time + datetime.timedelta(days=1)
def test_prior_engine(): prior_engine = PriorEngine(datestr="2017-03-01", variables=['sm', 'lai', 'cab'], config=CONFIG_FILE) priors = prior_engine.get_priors() assert 3, len(priors.keys()) assert 'cab' in priors.keys() assert 1 == len(priors['cab']) assert 'database' in priors['cab'] assert ('./test/output_dir/Priors_cab_060_global.vrt' == priors['cab'] ['database']) assert 'lai' in priors.keys() assert 1 == len(priors['lai']) assert 'database' in priors['lai'] assert ('./test/output_dir/Priors_lai_060_global.vrt' == priors['lai'] ['database']) assert 'sm' in priors.keys() assert 'climatology' in priors['sm'] assert 'coarse' in priors['sm'] assert ('./test/output_dir/sm_prior_climatology_20170301.vrt' == priors['sm']['climatology'])
def process_prior( self, parameters: List[str], time: Union[str, datetime], state_grid: np.array, inv_cov: bool = True ) -> (np.array, scipy.sparse.coo_matrix, np.array): if type(time) is datetime: time = datetime.strftime(time, "%Y-%m-%d") prior_engine = PriorEngine(datestr=time, variables=parameters, config=self._prior_engine_config_file) priors = prior_engine.get_priors() num_pixels = state_grid.sum() num_params = len(parameters) state_vector_shape = num_pixels * num_params matrix_shape = (num_pixels, num_params, num_params) mean_state_vector = np.zeros(shape=state_vector_shape, dtype=np.float32) matrix = np.zeros(shape=matrix_shape, dtype=np.float32) for i, parameter in enumerate(parameters): vrt_dataset = list(priors[parameter].values())[0] reprojected_vrt_dataset = reproject_image(vrt_dataset, self._reference_dataset) data = reprojected_vrt_dataset.GetRasterBand( 1).ReadAsArray()[state_grid] data[data < 0.01] = 0.01 mean_state_vector[i::num_params] = data mean_state_vector, state_grid, matrix = \ _check_state_vector_for_nan(mean_state_vector, state_grid, matrix, num_params) matrix_data = reprojected_vrt_dataset.GetRasterBand( 2).ReadAsArray()[state_grid] matrix_data[matrix_data < 0.01] = 0.01 matrix[:, i, i] = matrix_data**2 mean_state_vector, state_grid, matrix = \ _check_matrix_for_nan(mean_state_vector, state_grid, matrix, num_params) if inv_cov: matrix[matrix != 0] = 1. / matrix[matrix != 0] return mean_state_vector, block_diag(matrix), matrix
required_priors = [] for model in parameters['Inference']['forward_models']: if model['data_type'] == 'Sentinel-2': required_priors = model['required_priors'] start_time = datetime.datetime.strptime(start, '%Y-%m-%d') end_time = datetime.datetime.strptime(end, '%Y-%m-%d') # execute the Prior engine for the requested times time = start_time num_days = (end_time - start_time).days + 1 i = 0 while time <= end_time: print(time) PE = PriorEngine(config=configuration_file, datestr=time.strftime('%Y-%m-%d'), variables=required_priors) script_progress_logger.info( f'{int((i/num_days) * 100)}-{int(((i+1)/num_days) * 100)}') priors = PE.get_priors() time = time + datetime.timedelta(days=1) i += 1 # create output_dir (if not already exist) if not os.path.exists(output_root_dir): os.makedirs(output_root_dir) # put the files for the 'vegetation priors' into the proper directory if 'General' in parameters['Prior']: directory = parameters['Prior']['output_directory'] os.system("cp " + directory + "/*.vrt " + output_root_dir + "/")
prior_file.url, os.path.join(s1_priors_dir, prior_file.url.split('/')[-1])) else: priors_to_be_retrieved.append(prior) else: priors_to_be_retrieved.append(prior) if len(priors_to_be_retrieved) == 0: script_progress_logger.info('50-100') else: # execute the Prior engine for the requested times time = start_time num_days = (end_time - start_time).days + 1 i = 0 while time <= end_time: print(time) PE = PriorEngine(config=configuration_file, datestr=time.strftime('%Y-%m-%d'), variables=priors_to_be_retrieved) script_progress_logger.info( f'{int(50+((i/num_days) * 50))}-{int(50+(((i+1)/num_days) * 50))}') priors = PE.get_priors() time = time + datetime.timedelta(days=1) i += 1 # put the files into the proper directory if 'General' in parameters['Prior']: directory = parameters['Prior']['output_directory'] os.system("cp " + directory + "/*.vrt " + s1_priors_dir + "/") script_progress_logger.info('100-100')
'lr': lr }, time_start=t1, time_stop=t2, tstate=tstate, landcover='abc.nc', luts={'roughness': 'nix.lut'}) # run the system aux_data_constraints = [] aux_data_provider = dummy_aux_data_provider.DummyAuxDataProvider( ) # config always used when new instances are created some_aux_data = aux_data_provider.read_aux_data(aux_data_constraints) prior_engine = PriorEngine(config=config, priors={'sm': { 'type': 'climatology' }}) priors = prior_engine.get_priors() #prior_1 = prior_engine.create_prior(some_aux_data) # this command is expected to be executed internally whenever a prior is created #prior_engine.save_prior(prior_1) # TODO prior1 is used nowhere! #prior_id = 'My prior' #prior_2 = prior_engine.get_prior(prior_id) brdf_archive = dummy_brdf_archive.DummyBRDFArchive() brdf_archive.has_brdf_descriptor(config, 'somepara') brdf_descriptor = brdf_archive.get_brdf_descriptor('a', 'b') coarse_res_data_constraints = [] coarse_res_provider = dummy_coarse_res_data_provider.DummyCoarseResDataProvider( )