def predict_using_model(patch_dir, model_file, method, window_size): ''' Defines a workflow that will perform the prediction step on a given EOPatch. For a given EOPatch, use the specified model to apply prediction step. Parameters: - patch_dir: the directory that contains the patch - model_file; the path to the model file. - method: The local noramalization method, one of 'min', 'median' or 'mean'. This should be the same as the one used to train the model. - window_size: The window_size used in the local normalization step. Should be the same as that used to train the model. Returns: Nothing. Updates the EOPatch on disk. ''' path = patch_dir if (type(path) != str): path = str(path) save = SaveTask(path=path, overwrite_permission=OverwritePermission.OVERWRITE_PATCH) load_task = LoadTask(path=path) local_norm = LocalNormalization() detect_plastics = DetectPlastics(model_file=model_file) workflow = LinearWorkflow(load_task, local_norm, detect_plastics, save) workflow.execute( {local_norm: { 'method': method, 'window_size': window_size }})
def test_get_tasks(self): in_task = InputTask() inc_task = Inc() pow_task = Pow() task_names = ['InputTask', 'Inc', 'Pow'] workflow_tasks = [in_task, inc_task, pow_task] eow = LinearWorkflow(*workflow_tasks) returned_tasks = eow.get_tasks() # check if tasks are present for task_name in task_names: self.assertIn(task_name, returned_tasks.keys()) # check if tasks still work arguments_dict = { in_task: {'val': 2}, inc_task: {'d': 2}, pow_task: {'n': 3} } res_workflow = eow.execute(arguments_dict) res_workflow_value = [res_workflow[key] for key in res_workflow.keys()][0] for idx, task in enumerate(workflow_tasks): if idx == 0: res_tasks_value = task.execute(**arguments_dict[task]) else: res_tasks_value = task.execute(res_tasks_value, **arguments_dict[task]) self.assertEqual(res_workflow_value, res_tasks_value)
def test_get_tasks(self): in_task = InputTask() inc_task = Inc() task_names = ['InputTask', 'Inc', 'Inc_1', 'Inc_2'] eow = LinearWorkflow(in_task, inc_task, inc_task, inc_task) returned_tasks = eow.get_tasks() # check if tasks are present self.assertEqual(sorted(task_names), sorted(returned_tasks)) # check if tasks still work arguments_dict = {in_task: {'val': 2}, inc_task: {'d': 2}} res_workflow = eow.execute(arguments_dict) res_workflow_value = list(res_workflow.values()) res_tasks_values = [] for idx, task in enumerate(returned_tasks.values()): res_tasks_values = [ task.execute(*res_tasks_values, **arguments_dict.get(task, {})) ] self.assertEqual(res_workflow_value, res_tasks_values)
def test_linear_workflow(self): in_task = InputTask() in_task_name = 'My input task' inc_task = Inc() pow_task = Pow() eow = LinearWorkflow((in_task, in_task_name), inc_task, inc_task, pow_task) res = eow.execute({ in_task: { 'val': 2 }, inc_task: { 'd': 2 }, # Note that this will assign value only to one instance of Inc task pow_task: { 'n': 3 } }) self.assertEqual(res[pow_task], (2 + 2 + 1)**3) task_map = eow.get_tasks() self.assertTrue( in_task_name in task_map, "A task with name '{}' should be amongst tasks".format( in_task_name)) self.assertEqual( task_map[in_task_name], in_task, "A task with name '{}' should map into {}".format( in_task_name, in_task))
def process_feature(feature, feature_index): '''A function to download a given target pixel and it's surroundings as an EOPatch Parameters: feature (GeoSeries): A row from the GeoDataFrame produced by load_fetures_from_file feature_index (int): The integer used in saving the EOPatch to disk. Returns: Nothing ''' save = SaveTask(path=f'{base_dir}/feature_{feature_index}/', overwrite_permission=OverwritePermission.OVERWRITE_PATCH) train_test_workflow = LinearWorkflow(input_task,true_color,add_l2a,ndvi,ndwi,add_fdi,cloud_detection,water_detection,combine_mask,save ) feature_result = train_test_workflow.execute({ input_task: { 'bbox':BBox(bounds.iloc[feature_index],bbox_list[0].crs), 'time_interval': [feature.date_start, feature.date_end] }, combine_mask:{ 'use_water': False #(target.reduced_label != 'Timber') }, add_fdi:{ 'band_layer': USE_BANDS, 'band_names': band_names } }) patch = feature_result.eopatch() return patch
def test_linear_workflow(self): in_task = InputTask() inc_task = Inc() pow_task = Pow() eow = LinearWorkflow((in_task, 'task name'), inc_task, inc_task, pow_task) res = eow.execute({ in_task: {'val': 2}, inc_task: {'d': 2}, # Note that this will assign value only to one instance of Inc task pow_task: {'n': 3} }) self.assertEqual(res[pow_task], (2 + 2 + 1) ** 3)
def test_linear_workflow(self): in_task = InputTask() inc_task = Inc() pow_task = Pow() eow = LinearWorkflow(in_task, inc_task, pow_task) res = eow.execute({ in_task: {'val': 2}, inc_task: {'d': 2}, pow_task: {'n': 3} }) self.assertEqual(res[pow_task], (2+2)**3)
def get_and_process_patch(bounds, time_range, base_dir, index): ''' Defines a workflow that will download and process a specific EOPatch. The pipline has the folowing steps - Download data - Calculate NDVI - Calculate NDWI - Calculate FDI - Add cloud mask - Add water mask - Combine all masks - Perform local noramalization - Save the results. Parameters: - bounds: The bounding box of the EOPatch we wish to process - time_range: An array of [start_time,end_time]. Any satelite pass in that range will be procesed. - base_dir: the directory to save the patches to - index: An index to label this patch Returns: The EOPatch for this region and time range. ''' save = SaveTask(path=f'{base_dir}/feature_{index}/', overwrite_permission=OverwritePermission.OVERWRITE_PATCH) add_fdi = CalcFDI() water_detection = WaterDetector() combine_mask = CombineMask() local_norm = LocalNormalization() fetch_workflow = LinearWorkflow(input_task, true_color, add_l2a, ndvi_task(), ndwi_task(), add_fdi, cloud_classifier_task(), water_detection, combine_mask, local_norm, save) feature_result = fetch_workflow.execute({ input_task: { 'bbox': BBox(bounds, CRS.WGS84), 'time_interval': time_range }, combine_mask: { 'use_water': False }, local_norm: { 'method': 'min', 'window_size': 10, } }) patch = feature_result.eopatch() return patch
def get_elevation(self, bounds): INSTANCE_ID = '4aaea2ec-3a2c-4e1c-8a51-851e220d0273' roi = BBox(bbox=bounds, crs=CRS.WGS84) layer = 'MAPZEN_DEM' time_interval = ('2019-01-01', '2019-06-01') add_dem = DEMWCSInput(layer=layer, instance_id=INSTANCE_ID) input_task = S2L1CWCSInput(layer=layer, resx='30m', resy='30m', instance_id=INSTANCE_ID) workflow = LinearWorkflow(input_task, add_dem) result = workflow.execute( {input_task: { 'bbox': roi, 'time_interval': time_interval }}) eopatch = list(result.values())[0] return eopatch
count_val_sh, save) for idx, bbox in enumerate(bbox_list[patchIDs]): # define additional parameters of the workflow extra_param = { add_data: { 'bbox': bbox, 'time_interval': time_interval }, save: { 'eopatch_folder': 'eopatch_{}'.format(idx) } } workflow.execute(extra_param) print('Download finished') #%% Check the IS_VALID npy array #isvalid = np.load('./eopatches_large/eopatch_0/mask/IS_VALID.npy') #print(np.sum(isvalid)) #print(np.size(isvalid) - np.count_nonzero(isvalid)) #%% See the structure of a selected EOPatch EOPatch.load('./eopatches_large/eopatch_3/') #%% Section 5
ValidDataCoveragePredicate(cloud_coverage_threshold)) water_detection = WaterDetector() # Define the EOWorkflow workflow = LinearWorkflow(download_task, calculate_ndwi, add_nominal_water, add_valid_mask, add_coverage, remove_cloudy_scenes, water_detection) # Run the workflow time_interval = [input_json["startDate"], input_json["endDate"]] result = workflow.execute({ download_task: { 'bbox': dam_bbox, 'time_interval': time_interval }, }) eopatch = list(result.values())[-1] output = [] for i in range(len(eopatch.scalar['WATER_LEVEL'])): numpyData = { "measurement_date": eopatch.timestamp[i].strftime('%d/%m/%Y'), "bbox": eopatch.bbox.geometry.bounds, "crs": eopatch.bbox.crs.epsg,
eopatch.add_feature(FeatureType.SCALAR, 'WATER_LEVEL', water_levels[..., np.newaxis]) return eopatch water_det = WaterDetector() workflow = LinearWorkflow(input_task, add_ndwi, cloud_det, add_nominal_water, add_valmask, add_coverage, remove_cloudy_scenes, water_det) time_interval = ['2015-01-01', '2018-08-31'] result = workflow.execute({ input_task: { 'bbox': dam_bbox, 'time_interval': time_interval }, }) patch = list(result.values())[-1] from skimage.filters import sobel from skimage.morphology import disk from skimage.morphology import erosion, dilation, opening, closing, white_tophat def plot_rgb_w_water(eopatch, idx): ratio = np.abs(eopatch.bbox.max_x - eopatch.bbox.min_x) / np.abs(eopatch.bbox.max_y - eopatch.bbox.min_y) fig, ax = plt.subplots(figsize=(ratio * 10, 10))
# ---------------------------------------------------------------------------------------------------------- # # eo-learn request input_task = SentinelHubInputTask( data_collection=DataCollection.SENTINEL2_L1C, additional_data=[(FeatureType.DATA, 'CLP'), (FeatureType.MASK, 'dataMask')], time_difference=datetime.timedelta(seconds=1), resolution=60, config=config) timelapse = LinearWorkflow(input_task) try: result = timelapse.execute( {input_task: { 'bbox': full_bbox, 'time_interval': time_interval }}) except: continue # parse cloud images and register useful blocks blocks_per_timestamp = {} l = result.eopatch() for i in range(len(l.data['CLP'])): print('Parsing image {} of {}.'.format(i + 1, len(l.data['CLP']))) sys.stdout.flush() # get cloud mask and mask of valid pixels cloud_mask = np.asarray(l.data['CLP'][i], dtype=np.float32) / 255.0 mask = np.asarray(l.mask['dataMask'][i], dtype=np.int32)