コード例 #1
0
def process_feature(feature, feature_index):  
        '''A function to download a given target pixel and it's surroundings as an EOPatch
        
                Parameters:
                        feature (GeoSeries): A row from the GeoDataFrame produced by load_fetures_from_file
                        feature_index (int): The integer used in saving the EOPatch to disk.
                        

                Returns:
                        Nothing  
        ''' 

        save = SaveTask(path=f'{base_dir}/feature_{feature_index}/', overwrite_permission=OverwritePermission.OVERWRITE_PATCH)
        train_test_workflow = LinearWorkflow(input_task,true_color,add_l2a,ndvi,ndwi,add_fdi,cloud_detection,water_detection,combine_mask,save )

        feature_result = train_test_workflow.execute({
            input_task: {
                'bbox':BBox(bounds.iloc[feature_index],bbox_list[0].crs),
                'time_interval': [feature.date_start, feature.date_end]
            },
            combine_mask:{
                'use_water': False #(target.reduced_label != 'Timber')
            },
            add_fdi:{
                'band_layer': USE_BANDS,
                'band_names': band_names
            }
        })
        patch = feature_result.eopatch()
        return patch 
コード例 #2
0
    def test_linear_workflow(self):
        in_task = InputTask()
        in_task_name = 'My input task'
        inc_task = Inc()
        pow_task = Pow()
        eow = LinearWorkflow((in_task, in_task_name), inc_task, inc_task,
                             pow_task)
        res = eow.execute({
            in_task: {
                'val': 2
            },
            inc_task: {
                'd': 2
            },  # Note that this will assign value only to one instance of Inc task
            pow_task: {
                'n': 3
            }
        })
        self.assertEqual(res[pow_task], (2 + 2 + 1)**3)

        task_map = eow.get_tasks()
        self.assertTrue(
            in_task_name in task_map,
            "A task with name '{}' should be amongst tasks".format(
                in_task_name))
        self.assertEqual(
            task_map[in_task_name], in_task,
            "A task with name '{}' should map into {}".format(
                in_task_name, in_task))
コード例 #3
0
    def test_get_tasks(self):
        in_task = InputTask()
        inc_task = Inc()
        pow_task = Pow()

        task_names = ['InputTask', 'Inc', 'Pow']
        workflow_tasks = [in_task, inc_task, pow_task]
        eow = LinearWorkflow(*workflow_tasks)

        returned_tasks = eow.get_tasks()

        # check if tasks are present
        for task_name in task_names:
            self.assertIn(task_name, returned_tasks.keys())

        # check if tasks still work
        arguments_dict = {
            in_task: {'val': 2},
            inc_task: {'d': 2},
            pow_task: {'n': 3}
        }

        res_workflow = eow.execute(arguments_dict)
        res_workflow_value = [res_workflow[key] for key in res_workflow.keys()][0]

        for idx, task in enumerate(workflow_tasks):
            if idx == 0:
                res_tasks_value = task.execute(**arguments_dict[task])
            else:
                res_tasks_value = task.execute(res_tasks_value, **arguments_dict[task])

        self.assertEqual(res_workflow_value, res_tasks_value)
コード例 #4
0
    def test_get_tasks(self):
        in_task = InputTask()
        inc_task = Inc()

        task_names = ['InputTask', 'Inc', 'Inc_1', 'Inc_2']
        eow = LinearWorkflow(in_task, inc_task, inc_task, inc_task)

        returned_tasks = eow.get_tasks()

        # check if tasks are present
        self.assertEqual(sorted(task_names), sorted(returned_tasks))

        # check if tasks still work
        arguments_dict = {in_task: {'val': 2}, inc_task: {'d': 2}}

        res_workflow = eow.execute(arguments_dict)
        res_workflow_value = list(res_workflow.values())

        res_tasks_values = []
        for idx, task in enumerate(returned_tasks.values()):
            res_tasks_values = [
                task.execute(*res_tasks_values, **arguments_dict.get(task, {}))
            ]

        self.assertEqual(res_workflow_value, res_tasks_values)
コード例 #5
0
def predict_using_model(patch_dir, model_file, method, window_size):
    ''' Defines a workflow that will perform the prediction step on a given EOPatch.

        For a given EOPatch, use the specified model to apply prediction step.
        
        Parameters:
            
            - patch_dir: the directory that contains the patch
            - model_file; the path to the model file.
            - method: The local noramalization method, one of 'min', 'median' or 'mean'. This should be the same as the one used to train the model. 
            - window_size: The window_size used in the local normalization step. Should be the same as that used to train the model. 
            
       
        Returns:
            Nothing. Updates the EOPatch on disk.
    '''

    path = patch_dir
    if (type(path) != str):
        path = str(path)
    save = SaveTask(path=path,
                    overwrite_permission=OverwritePermission.OVERWRITE_PATCH)
    load_task = LoadTask(path=path)
    local_norm = LocalNormalization()

    detect_plastics = DetectPlastics(model_file=model_file)
    workflow = LinearWorkflow(load_task, local_norm, detect_plastics, save)
    workflow.execute(
        {local_norm: {
            'method': method,
            'window_size': window_size
        }})
コード例 #6
0
 def test_linear_workflow(self):
     in_task = InputTask()
     inc_task = Inc()
     pow_task = Pow()
     eow = LinearWorkflow((in_task, 'task name'), inc_task, inc_task, pow_task)
     res = eow.execute({
         in_task: {'val': 2},
         inc_task: {'d': 2},  # Note that this will assign value only to one instance of Inc task
         pow_task: {'n': 3}
     })
     self.assertEqual(res[pow_task], (2 + 2 + 1) ** 3)
コード例 #7
0
 def test_linear_workflow(self):
     in_task = InputTask()
     inc_task = Inc()
     pow_task = Pow()
     eow = LinearWorkflow(in_task, inc_task, pow_task)
     res = eow.execute({
         in_task: {'val': 2},
         inc_task: {'d': 2},
         pow_task: {'n': 3}
     })
     self.assertEqual(res[pow_task], (2+2)**3)
コード例 #8
0
def get_and_process_patch(bounds, time_range, base_dir, index):
    ''' Defines a workflow that will download and process a specific EOPatch.

        The pipline has the folowing steps 
            - Download data 
            - Calculate NDVI 
            - Calculate NDWI
            - Calculate FDI
            - Add cloud mask
            - Add water mask
            - Combine all masks
            - Perform local noramalization
            - Save the results.

        Parameters:
            - bounds: The bounding box of the EOPatch we wish to process
            - time_range: An array of [start_time,end_time]. Any satelite pass in that range will be procesed.
            - base_dir: the directory to save the patches to 
            - index: An index to label this patch
        
        Returns:
            The EOPatch for this region and time range.
    '''
    save = SaveTask(path=f'{base_dir}/feature_{index}/',
                    overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

    add_fdi = CalcFDI()
    water_detection = WaterDetector()
    combine_mask = CombineMask()
    local_norm = LocalNormalization()

    fetch_workflow = LinearWorkflow(input_task, true_color, add_l2a,
                                    ndvi_task(), ndwi_task(), add_fdi,
                                    cloud_classifier_task(), water_detection,
                                    combine_mask, local_norm, save)

    feature_result = fetch_workflow.execute({
        input_task: {
            'bbox': BBox(bounds, CRS.WGS84),
            'time_interval': time_range
        },
        combine_mask: {
            'use_water': False
        },
        local_norm: {
            'method': 'min',
            'window_size': 10,
        }
    })
    patch = feature_result.eopatch()
    return patch
コード例 #9
0
def get_tiffs_to_eopatches_workflow(config: TiffsToEopatchConfig,
                                    delete_tiffs: bool = False) -> EOWorkflow:
    """ Set up workflow to ingest tiff files into EOPatches """

    # Set up credentials in sh config
    sh_config = set_sh_config(config)

    import_bands = [(ImportFromTiff(
        (FeatureType.DATA, band),
        folder=f's3://{config.bucket_name}/{config.tiffs_folder}',
        config=sh_config), f'Import band {band}')
                    for band in config.band_names]
    import_clp = (ImportFromTiff(
        (FeatureType.DATA, config.clp_name),
        folder=f's3://{config.bucket_name}/{config.tiffs_folder}',
        config=sh_config), f'Import {config.clp_name}')

    import_mask = (ImportFromTiff(
        (FeatureType.MASK, config.mask_name),
        folder=f's3://{config.bucket_name}/{config.tiffs_folder}',
        config=sh_config), f'Import {config.mask_name}')

    rearrange_bands = (RearrangeBands(), 'Swap time and band axis')
    add_timestamps = (AddTimestampsUpdateTime(
        f's3://{config.bucket_name}/{config.tiffs_folder}'), 'Load timestamps')

    merge_bands = (MergeFeatureTask(
        input_features={FeatureType.DATA: config.band_names},
        output_feature=(FeatureType.DATA, config.data_name)),
                   'Merge band features')

    remove_bands = (RemoveFeature(
        features={FeatureType.DATA: config.band_names}), 'Remove bands')

    rename_mask = (RenameFeature((FeatureType.MASK, config.mask_name,
                                  config.is_data_mask)), 'Rename is data mask')

    calculate_clm = (CloudMasking(), 'Get CLM mask from CLP')

    save_task = (SaveTask(
        path=f's3://{config.bucket_name}/{config.eopatches_folder}',
        config=sh_config,
        overwrite_permission=OverwritePermission.OVERWRITE_FEATURES),
                 'Save EOPatch')

    filenames = [f'{band}.tif' for band in config.band_names] + \
                [f'{config.mask_name}.tif', f'{config.clp_name}.tif', 'userdata.json']
    delete_files = (DeleteFiles(path=config.tiffs_folder,
                                filenames=filenames), 'Delete batch files')

    workflow = [
        *import_bands, import_clp, import_mask, rearrange_bands,
        add_timestamps, merge_bands, remove_bands, rename_mask, calculate_clm,
        save_task
    ]

    if delete_tiffs:
        workflow.append(delete_files)

    return LinearWorkflow(*workflow)
コード例 #10
0
    def _load_with_index(self):
        """
        Split image to a number of EOPatches(lazy load data) with given splitter,
        and index each EOPatch using two dimension list.

        :param feature: Feature to be loaded
        :type feature: (FeatureType, feature_name) or FeatureType
        """
        add_data = ImportFromGeogenius(feature=self.feature,
                                       geogenius_image=self.geogenius_image)
        tile_rows, tile_columns = self._get_tile_rows_columns()
        self.patch_index = [[0] * tile_columns for i in range(tile_rows)]
        index_feature = IndexTask(patch_index=self.patch_index)
        workflow = LinearWorkflow(add_data, index_feature)
        execution_args = []
        bbox_list = np.array(self.splitter.get_pixel_bbox_list())
        for idx, bbox in enumerate(bbox_list):
            row = idx % tile_rows
            column = idx // tile_rows
            execution_args.append({
                add_data: {
                    'pixelbox': bbox
                },
                index_feature: {
                    "row": row,
                    "column": column
                }
            })
        executor = EOExecutor(workflow, execution_args)
        executor.run(workers=1, multiprocess=False)
        return self.patch_index
コード例 #11
0
def get_gsaa_to_eopatch_workflow(config: GsaaToEopatchConfig) -> EOWorkflow:
    # set up AWS credentials
    sh_config = set_sh_config(config)

    # load patch
    load_task = LoadTask(path=f's3://{config.bucket_name}/{config.eopatches_folder}', config=sh_config)
    # add original vectors to patch
    vec2vec = DB2Vector(database=config.database,
                        user=config.user, password=config.password,
                        host=config.host, port=config.port, crs=config.crs,
                        vector_output_feature=config.vector_feature)
    # get extent mask from vector
    vec2ras = VectorToRaster(config.vector_feature,
                             config.extent_feature,
                             values=1, raster_shape=(config.width, config.height),
                             no_data_value=config.no_data_value,
                             buffer=config.buffer_poly, write_to_existing=False)
    # get boundary mask from extent mask
    ras2bound = Extent2Boundary(config.extent_feature,
                                config.boundary_feature,
                                structure=disk(config.disk_radius))
    # get distance from extent mask
    ras2dist = Extent2Distance(config.extent_feature,
                               config.distance_feature,
                               normalize=True)
    # save new features
    save_task = SaveTask(path=f's3://{config.bucket_name}/{config.eopatches_folder}',
                         features=[config.vector_feature,
                                   config.extent_feature,
                                   config.boundary_feature,
                                   config.distance_feature],
                         overwrite_permission=OverwritePermission.OVERWRITE_FEATURES, config=sh_config)

    return LinearWorkflow(load_task, vec2vec, vec2ras, ras2bound, ras2dist, save_task)
コード例 #12
0
ファイル: post_processing.py プロジェクト: bedr/orangegis
def get_exec_args(workflow: LinearWorkflow, eopatch_list: List[str], config: PostProcessConfig) -> List[dict]:
    """ Utility function to get execution arguments """
    exec_args = []
    tasks = workflow.get_tasks()

    load_bbox = LoadTask(path=f's3://{config.bucket_name}/{config.eopatches_folder}', features=[FeatureType.BBOX])

    for name in tqdm(eopatch_list):
        single_exec_dict = {}

        try:
            eop = load_bbox.execute(eopatch_folder=name)

            for task_name, task in tasks.items():
                if isinstance(task, ExportToTiff):
                    single_exec_dict[task] = dict(filename=f'{name}-{eop.bbox.crs.epsg}.tiff')

                if isinstance(task, (LoadTask, SaveTask)):
                    single_exec_dict[task] = dict(eopatch_folder=name)

            exec_args.append(single_exec_dict)

        except ResourceNotFound as exc:
            print(f'{name} - {exc}')

    return exec_args
コード例 #13
0
def create_workflow(resolution, land_cover_data, output_bucket):
    """
    Helper function for creating the EO-Learn workflow
    """

    # Maximum allowed cloud cover of original ESA tiles
    maxcc = 0.2

    # Task to get S2 L2A images
    input_task = S2L2AWCSInput(
        layer='TRUE_COLOR',
        resx='{}m'.format(resolution), # resolution x
        resy='{}m'.format(resolution), # resolution y
        maxcc=maxcc, # maximum allowed cloud cover of original ESA tiles
    )

    # Task to rasterize ground-truth from Corine Land Cover 2018
    rasterization_task = VectorToRaster(land_cover_data,
                                        (FeatureType.MASK_TIMELESS, 'LAND_COVER'),
                                        values_column='LABEL_ID',
                                        raster_shape=(FeatureType.MASK, 'IS_DATA'),
                                        raster_dtype=np.uint8)

    # Task to compute pixelwise median values pixelwise our time-series
    get_median_pixel_task = MedianPixel((FeatureType.DATA, 'TRUE_COLOR'),
                                        feature_out=(FeatureType.DATA_TIMELESS, 'MEDIAN_PIXEL'))

    save_task = SaveToGcp((FeatureType.DATA_TIMELESS, 'MEDIAN_PIXEL'),
                          (FeatureType.MASK_TIMELESS, 'LAND_COVER'),
                          output_bucket)

    # Putting workflow together
    workflow = LinearWorkflow(input_task, rasterization_task, get_median_pixel_task, save_task)

    return workflow, input_task, save_task
コード例 #14
0
    def test_exceptions(self):

        for params in [(None, ), (InputTask(), 'a string'),
                       (InputTask(), ('something', InputTask())),
                       ((InputTask(), 'name', 'something else'), ),
                       (('task', 'name'), )]:
            with self.assertRaises(ValueError):
                LinearWorkflow(*params)
コード例 #15
0
ファイル: post_processing.py プロジェクト: bedr/orangegis
def get_post_processing_workflow(config: PostProcessConfig) -> LinearWorkflow:
    sh_config = set_sh_config(config)

    load_task = LoadTask(path=f's3://{config.bucket_name}/{config.eopatches_folder}',
                         features=[config.feature_extent,
                                   config.feature_boundary,
                                   (FeatureType.MASK, 'CLM'),
                                   (FeatureType.MASK, 'IS_DATA'),
                                   FeatureType.TIMESTAMP,
                                   FeatureType.META_INFO,
                                   FeatureType.BBOX],
                         config=sh_config), 'Load EOPatch'

    merge_extent_tasks = [(TemporalMerging(feature=config.feature_extent,
                                           feature_merged=(FeatureType.DATA_TIMELESS,
                                                           f'{config.feature_extent[1]}_{month}'),
                                           woy_start=woy_start, woy_end=woy_end,
                                           percentile=config.percentile,
                                           max_cloud_coverage=config.max_cloud_coverage), f'Merge EXTENT for {month}')
                          for month, (woy_start, woy_end) in config.time_intervals.items()]

    merge_boundary_tasks = [(TemporalMerging(feature=config.feature_boundary,
                                             feature_merged=(FeatureType.DATA_TIMELESS,
                                                             f'{config.feature_boundary[1]}_{month}'),
                                             woy_start=woy_start, woy_end=woy_end,
                                             percentile=config.percentile,
                                             max_cloud_coverage=config.max_cloud_coverage),
                             f'Merge BOUNDARY for {month}')
                            for month, (woy_start, woy_end) in config.time_intervals.items()]

    combine_tasks = [(CombineUpsample(
        feature_extent=(FeatureType.DATA_TIMELESS, f'{config.feature_extent[1]}_{month}'),
        feature_boundary=(FeatureType.DATA_TIMELESS, f'{config.feature_boundary[1]}_{month}'),
        feature_output=(FeatureType.DATA_TIMELESS, f'PREDICTED_{config.model_version}_{month}'),
        scale_factor=config.scale_factor, disk_size=config.disk_size), f'Combine masks for {month}')
        for month in config.time_intervals]

    save_task = SaveTask(path=f's3://{config.bucket_name}/{config.eopatches_folder}',
                         features=[(FeatureType.DATA_TIMELESS, f'{config.feature_extent[1]}_{month}')
                                   for month in config.time_intervals] +
                                  [(FeatureType.DATA_TIMELESS, f'{config.feature_boundary[1]}_{month}')
                                   for month in config.time_intervals] +
                                  [(FeatureType.DATA_TIMELESS, f'PREDICTED_{config.model_version}_{month}')
                                   for month in config.time_intervals],
                         overwrite_permission=OverwritePermission.OVERWRITE_FEATURES,
                         config=sh_config), 'Save Task'

    export_tasks = [(ExportToTiff(feature=(FeatureType.DATA_TIMELESS, f'PREDICTED_{config.model_version}_{month}'),
                                  folder=f's3://{config.bucket_name}/{config.tiffs_folder}/{month}/',
                                  image_dtype=np.float32), f'Export tiffs for {month}')
                    for month in config.time_intervals]

    workflow = LinearWorkflow(load_task, *merge_extent_tasks, *merge_boundary_tasks,
                              *combine_tasks, save_task, *export_tasks)

    return workflow
コード例 #16
0
ファイル: views.py プロジェクト: OchiengHosea/FloodMonitor
    def get_elevation(self, bounds):
        INSTANCE_ID = '4aaea2ec-3a2c-4e1c-8a51-851e220d0273'
        roi = BBox(bbox=bounds, crs=CRS.WGS84)
        layer = 'MAPZEN_DEM'
        time_interval = ('2019-01-01', '2019-06-01')
        add_dem = DEMWCSInput(layer=layer, instance_id=INSTANCE_ID)
        input_task = S2L1CWCSInput(layer=layer,
                                   resx='30m',
                                   resy='30m',
                                   instance_id=INSTANCE_ID)
        workflow = LinearWorkflow(input_task, add_dem)
        result = workflow.execute(
            {input_task: {
                'bbox': roi,
                'time_interval': time_interval
            }})
        eopatch = list(result.values())[0]

        return eopatch
コード例 #17
0
def get_create_and_add_lpis_workflow(country, year, out_path):
    """
    Creates an EOWorkflow that:
    1. creates an empty patch
    2. add LPIS vector data fetched from Geopedia 
    3. calculate the ratio between area of all fields and area of EOPatch
    4. save newly created EOPatch to disk
    """
    print(f'Preparing EOWorkflow for {country} and year {year}:')

    # 1. create empty patch
    create = CreatePatch()
    print(f'   1. Creating empty EOPatch')

    # 2. Add LPIS vector data
    layer_id = GEOPEDIA_LPIS_LAYERS[f'{country}_LPIS_{year}']
    ftr_name = f'LPIS_{year}'
    year_filter = (
        GEOPEDIA_LPIS_YEAR_NAME[country],
        year) if GEOPEDIA_LPIS_YEAR_NAME[country] is not None else None

    add_lpis = AddGeopediaVectorFeature(
        (FeatureType.VECTOR_TIMELESS, ftr_name),
        layer=layer_id,
        year_filter=year_filter,
        drop_duplicates=True)
    print(f'   2. Adding LPIS vector data to feature {ftr_name}')
    print(f'      -> Geopedia layer ID: {layer_id}')
    if year_filter is not None:
        print(f'      -> with year filter applied: {year_filter}')

    # 3. Add Area Ratio
    area_ratio = AddAreaRatio(
        (FeatureType.VECTOR_TIMELESS, ftr_name),
        (FeatureType.SCALAR_TIMELESS, 'FIELD_AREA_RATIO'))
    print(f'   3. Calcuating the field area ratio')

    # 4. Save EOPatch to disk
    out_dir = str(out_path + '/' + country + '/' + str(year))
    save = SaveToDisk(out_dir,
                      overwrite_permission=OverwritePermission.OVERWRITE_PATCH)
    print(f'   4. Saving EOPatches to: {out_dir}')

    workflow = LinearWorkflow(create,
                              add_lpis,
                              area_ratio,
                              save,
                              task_names={
                                  create: 'create',
                                  add_lpis: 'add_lpis',
                                  area_ratio: ' area_ratio',
                                  save: 'save'
                              })

    return workflow
コード例 #18
0
    def save_patch(self,
                   save_folder,
                   feature=None,
                   overwrite_permission=OverwritePermission.OVERWRITE_PATCH,
                   compress_level=0):
        """
        Save indexed EOPatches to a folder.

        :param save_folder: folder to save eopatches
        :type save_folder: str
        :param feature: Feature to be exported
        :type feature: (FeatureType, feature_name) or FeatureType
        :param overwrite_permission: Permissions to overwrite exist EOPatch.
            Permissions are in the following hierarchy:
            - `ADD_ONLY` - Only new features can be added, anything that is already saved cannot be changed.
            - `OVERWRITE_FEATURES` - Overwrite only data for features which have to be saved. The remaining content of
             saved EOPatch will stay unchanged.
            - `OVERWRITE_PATCH` - Overwrite entire content of saved EOPatch and replace it with the new content.
        :type overwrite_permission: OverwritePermission
        :param compress_level: A level of data compression and can be specified with an integer from 0 (no compression)
            to 9 (highest compression).
        :type compress_level: int
        """
        if not feature:
            feature = self.feature
        if not self._is_loaded():
            self._load_with_index(feature=feature)
        tile_rows, tile_columns = self._get_tile_rows_columns()
        self._assure_folder_exist(save_folder)
        save_task = SaveToDisk(save_folder,
                               features=[feature, FeatureType.BBOX],
                               overwrite_permission=overwrite_permission,
                               compress_level=compress_level)
        workflow = LinearWorkflow(save_task)
        execution_args = []
        for row in range(tile_rows):
            for column in range(tile_columns):
                execution_args.append({
                    save_task: {
                        'eopatch_folder':
                        'patch_{row}_{column}'.format(row=row, column=column),
                        'eopatch':
                        self.patch_index[row][column]
                    }
                })
        executor = EOExecutor(workflow, execution_args)
        executor.run(workers=1, multiprocess=False)
コード例 #19
0
ファイル: test_eoexecutor.py プロジェクト: ntag1618/eo-learn
    def test_keyboardInterrupt(self):
        exeption_task = KeyboardExceptionTask()
        workflow = LinearWorkflow(exeption_task)
        execution_args = []
        for _ in range(10):
            execution_args.append({exeption_task: {'arg1': 1}})

        run_args = [{
            'workers': 1
        }, {
            'workers': 3,
            'multiprocess': True
        }, {
            'workers': 3,
            'multiprocess': False
        }]
        for arg in run_args:
            self.assertRaises(KeyboardInterrupt,
                              EOExecutor(workflow, execution_args).run, **arg)
コード例 #20
0
def download_patches(path, shp, bbox_list, indexes):
    add_data = S2L1CWCSInput(
        layer='BANDS-S2-L1C',
        feature=(FeatureType.DATA, 'BANDS'),  # save under name 'BANDS'
        resx='10m',  # resolution x
        resy='10m',  # resolution y
        maxcc=0.8,  # maximum allowed cloud cover of original ESA tiles
    )
    path_out = path + '/Slovenia/'
    if not os.path.isdir(path_out):
        os.makedirs(path_out)
    save = SaveTask(path_out,
                    overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

    workflow = LinearWorkflow(add_data, save)

    time_interval = ['2017-01-01',
                     '2017-12-31']  # time interval for the SH request

    execution_args = []
    for idx, bbox in zip(indexes, bbox_list[indexes]):
        execution_args.append({
            add_data: {
                'bbox': bbox,
                'time_interval': time_interval
            },
            save: {
                'eopatch_folder': 'eopatch_{}'.format(idx)
            }
        })

    start_time = time.time()
    executor = EOExecutor(workflow, execution_args, save_logs=True)
    executor.run(workers=1, multiprocess=False)
    file = open('timing.txt', 'a')
    running = str(
        dt.datetime.now()) + ' Running time: {}\n'.format(time.time() -
                                                          start_time)
    print(running)
    file.write(running)
    file.close()
コード例 #21
0
def load_LPIS(country, year, path, no_patches):
    patch_location = path + '/{}/'.format(country)
    load = LoadFromDisk(patch_location)
    save_path_location = patch_location
    if not os.path.isdir(save_path_location):
        os.makedirs(save_path_location)
    save = SaveToDisk(save_path_location,
                      overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

    # workflow_data = get_create_and_add_lpis_workflow(country, year, save_path_location)

    name_of_feature = 'LPIS_{}'.format(year)

    groups_to_number, crops_to_number = create_mapping(country)

    layer_id = GEOPEDIA_LPIS_LAYERS[f'{country}_LPIS_{year}']
    ftr_name = f'LPIS_{year}'
    year_filter = (
        GEOPEDIA_LPIS_YEAR_NAME[country],
        year) if GEOPEDIA_LPIS_YEAR_NAME[country] is not None else None
    add_lpis = AddGeopediaVectorFeature(
        (FeatureType.VECTOR_TIMELESS, ftr_name),
        layer=layer_id,
        year_filter=year_filter,
        drop_duplicates=True)
    area_ratio = AddAreaRatio(
        (FeatureType.VECTOR_TIMELESS, ftr_name),
        (FeatureType.SCALAR_TIMELESS, 'FIELD_AREA_RATIO'))
    fixlpis = FixLPIS(feature=name_of_feature, country=country)

    rasterize = VectorToRaster(vector_input=(FeatureType.VECTOR_TIMELESS,
                                             name_of_feature),
                               raster_feature=(FeatureType.MASK_TIMELESS,
                                               name_of_feature),
                               values=None,
                               values_column='GROUP',
                               raster_shape=(FeatureType.DATA, 'BANDS'),
                               raster_dtype=np.int16,
                               no_data_value=np.nan)

    add_group = AddGroup(crops_to_number, name_of_feature)
    remove_dtf = RemoveFeature(FeatureType.VECTOR_TIMELESS, name_of_feature)

    exclude = WorkflowExclude(area_ratio, fixlpis, add_group, rasterize,
                              remove_dtf)

    workflow = LinearWorkflow(load, add_lpis, exclude, save)

    execution_args = []
    for i in range(no_patches):
        execution_args.append({
            load: {
                'eopatch_folder': 'eopatch_{}'.format(i)
            },
            save: {
                'eopatch_folder': 'eopatch_{}'.format(i)
            }
        })
    ##### here you choose how many processes/threads you will run, workers=none is max of processors

    executor = EOExecutor(workflow,
                          execution_args,
                          save_logs=True,
                          logs_folder='ExecutionLogs')
    # executor.run(workers=None, multiprocess=True)
    executor.run()
コード例 #22
0
    geopedia_data = AddGeopediaFeature(
        (FeatureType.MASK_TIMELESS, 'TREE_COVER'),
        layer='ttl2275',
        theme='QP',
        raster_value=raster_value)
    # task to compute median values
    get_median_pixel = MedianPixel(
        (FeatureType.DATA, 'TRUE-COLOR-S2-L2A'),
        feature_out=(FeatureType.DATA_TIMELESS, 'MEDIAN_PIXEL'))
    # task to save to disk
    save = SaveTask(op.join(output_path, 'eopatch'),
                    overwrite_permission=OverwritePermission.OVERWRITE_PATCH,
                    compress_level=2)

    # initialize workflow
    workflow = LinearWorkflow(input_task, geopedia_data, get_median_pixel,
                              save)

    # use a function to run this workflow on a single bbox
    def execute_workflow(index):
        bbox = bbox_splitter.bbox_list[index]
        info = bbox_splitter.info_list[index]

        patch_name = 'eopatch_{0}_row-{1}_col-{2}'.format(
            index, info['index_x'], info['index_y'])

        results = workflow.execute({
            input_task: {
                'bbox': bbox,
                'time_interval': time_interval
            },
            save: {
コード例 #23
0
    'IS_VALID',  # name of existing mask
    'VALID_COUNT'  # name of output scalar
)

# TASK FOR SAVING TO OUTPUT (if needed)
path_out = './eopatches_large/'
if not os.path.isdir(path_out):
    os.makedirs(path_out)
save = SaveToDisk(path_out,
                  overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

#%% Section 4
#Execution of workflow: Filling patches with data
print('Starting download')

workflow = LinearWorkflow(add_data, add_clm, ndvi, ndwi, norm, add_sh_valmask,
                          count_val_sh, save)

for idx, bbox in enumerate(bbox_list[patchIDs]):

    # define additional parameters of the workflow
    extra_param = {
        add_data: {
            'bbox': bbox,
            'time_interval': time_interval
        },
        save: {
            'eopatch_folder': 'eopatch_{}'.format(idx)
        }
    }

    workflow.execute(extra_param)
コード例 #24
0
            raster_dtype=np.uint8)

        add_valid_mask = AddValidDataMaskTask(
            predicate=calculate_valid_data_mask)
        add_coverage = AddValidDataCoverage()

        cloud_coverage_threshold = 0.05
        remove_cloudy_scenes = SimpleFilterTask(
            (FeatureType.MASK, 'VALID_DATA'),
            ValidDataCoveragePredicate(cloud_coverage_threshold))

        water_detection = WaterDetector()

        # Define the EOWorkflow
        workflow = LinearWorkflow(download_task, calculate_ndwi,
                                  add_nominal_water, add_valid_mask,
                                  add_coverage, remove_cloudy_scenes,
                                  water_detection)

        # Run the workflow
        time_interval = [input_json["startDate"], input_json["endDate"]]

        result = workflow.execute({
            download_task: {
                'bbox': dam_bbox,
                'time_interval': time_interval
            },
        })

        eopatch = list(result.values())[-1]

        output = []
コード例 #25
0
            np.count_nonzero(mask) /
            np.count_nonzero(eopatch.mask_timeless['NOMINAL_WATER'])
            for mask in water_masks
        ])

        eopatch.add_feature(FeatureType.MASK, 'WATER_MASK', water_masks)
        eopatch.add_feature(FeatureType.SCALAR, 'WATER_LEVEL',
                            water_levels[..., np.newaxis])

        return eopatch


water_det = WaterDetector()

workflow = LinearWorkflow(input_task, add_ndwi, cloud_det, add_nominal_water,
                          add_valmask, add_coverage, remove_cloudy_scenes,
                          water_det)

time_interval = ['2015-01-01', '2018-08-31']
result = workflow.execute({
    input_task: {
        'bbox': dam_bbox,
        'time_interval': time_interval
    },
})

patch = list(result.values())[-1]

from skimage.filters import sobel
from skimage.morphology import disk
from skimage.morphology import erosion, dilation, opening, closing, white_tophat
コード例 #26
0
    # Fill EOPatches with data from geogenius platform:
    # Define ImportFromGeogenius task
    add_data = ImportFromGeogenius(feature=(FeatureType.DATA, 'BANDS'),
                                   geogenius_image=img)
    # Define Save EOPatch Task
    path_out = get_current_folder("eopatches")
    if not os.path.isdir(path_out):
        os.makedirs(path_out)
    save = SaveToDisk(path_out,
                      overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

    # patch = add_data.execute(bbox=bbox_list[patchIDs][0])
    # save.execute(patch, eopatch_folder="1")

    # Define workflow
    workflow = LinearWorkflow(add_data, save)

    # Execute the workflow
    # define additional parameters of the workflow
    execution_args = []
    for idx, bbox in enumerate(bbox_list[patchIDs]):
        execution_args.append({
            add_data: {
                'bbox': bbox
            },
            save: {
                'eopatch_folder': 'eopatch_{}'.format(idx)
            }
        })

    executor = EOExecutor(workflow, execution_args, save_logs=True)
コード例 #27
0
    ],
                     crs=CRS.WGS84)

    # ---------------------------------------------------------------------------------------------------------- #
    # Cloud-based image filtering to find viable blocks                                                          #
    # ---------------------------------------------------------------------------------------------------------- #

    # eo-learn request
    input_task = SentinelHubInputTask(
        data_collection=DataCollection.SENTINEL2_L1C,
        additional_data=[(FeatureType.DATA, 'CLP'),
                         (FeatureType.MASK, 'dataMask')],
        time_difference=datetime.timedelta(seconds=1),
        resolution=60,
        config=config)
    timelapse = LinearWorkflow(input_task)

    try:
        result = timelapse.execute(
            {input_task: {
                'bbox': full_bbox,
                'time_interval': time_interval
            }})
    except:
        continue

    # parse cloud images and register useful blocks
    blocks_per_timestamp = {}
    l = result.eopatch()
    for i in range(len(l.data['CLP'])):
        print('Parsing image {} of {}.'.format(i + 1, len(l.data['CLP'])))
コード例 #28
0
    path = '/home/beno/Documents/test/Slovenia/'
    size_small = (337, 333)
    size_big = (505, 500)

    load = LoadTask(path, lazy_loading=True)
    save_path_location = path
    if not os.path.isdir(save_path_location):
        os.makedirs(save_path_location)
    save = SaveTask(save_path_location,
                    overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

    dem = SentinelHubDemTask((FeatureType.DATA_TIMELESS, 'DEM'), size=size_big)
    grad = AddGradientTask((FeatureType.DATA_TIMELESS, 'DEM'),
                           (FeatureType.DATA_TIMELESS, 'INCLINATION'))

    workflow = LinearWorkflow(load, dem, grad, save)

    no_patches = 1061

    execution_args = []
    for i in range(no_patches):
        i = i + 2
        execution_args.append({
            load: {
                'eopatch_folder': 'eopatch_{}'.format(i)
            },
            save: {
                'eopatch_folder': 'eopatch_{}'.format(i)
            },
        })
    executor = EOExecutor(workflow,
コード例 #29
0
def download_data(path_save,
                  coords_top,
                  coords_bot,
                  patch_n,
                  s_date,
                  e_date,
                  debug=False):
    # before moving onto actual tasks, check setup
    check_sentinel_cfg()

    [lat_left_top, lon_left_top] = coords_top
    [lat_right_bot, lon_right_bot] = coords_bot
    # TASK FOR BAND DATA
    # add a request for B(B02), G(B03), R(B04), NIR (B08), SWIR1(B11), SWIR2(B12)
    # from default layer 'ALL_BANDS' at 10m resolution
    # Here we also do a simple filter of cloudy scenes. A detailed cloud cover
    # detection is performed in the next step
    custom_script = "return [B02, B03, B04, B08, B11, B12];"
    add_data = S2L1CWCSInput(
        layer="BANDS-S2-L1C",
        feature=(FeatureType.DATA, "BANDS"),  # save under name 'BANDS'
        # custom url for 6 specific bands
        custom_url_params={CustomUrlParam.EVALSCRIPT: custom_script},
        resx="10m",  # resolution x
        resy="10m",  # resolution y
        maxcc=0.1,  # maximum allowed cloud cover of original ESA tiles
    )

    # TASK FOR CLOUD INFO
    # cloud detection is performed at 80m resolution
    # and the resulting cloud probability map and mask
    # are scaled to EOPatch's resolution
    cloud_classifier = get_s2_pixel_cloud_detector(average_over=2,
                                                   dilation_size=1,
                                                   all_bands=False)
    add_clm = AddCloudMaskTask(
        cloud_classifier,
        "BANDS-S2CLOUDLESS",
        cm_size_y="80m",
        cm_size_x="80m",
        cmask_feature="CLM",  # cloud mask name
        cprobs_feature="CLP",  # cloud prob. map name
    )

    # TASKS FOR CALCULATING NEW FEATURES
    # NDVI: (B08 - B04)/(B08 + B04)
    # NDWI: (B03 - B08)/(B03 + B08)
    # NORM: sqrt(B02^2 + B03^2 + B04^2 + B08^2 + B11^2 + B12^2)
    ndvi = NormalizedDifferenceIndex("NDVI", "BANDS/3", "BANDS/2")
    ndwi = NormalizedDifferenceIndex("NDWI", "BANDS/1", "BANDS/3")
    norm = EuclideanNorm("NORM", "BANDS")

    # TASK FOR VALID MASK
    # validate pixels using SentinelHub's cloud detection mask and region of acquisition
    add_sh_valmask = AddValidDataMaskTask(
        SentinelHubValidData(),
        "IS_VALID"  # name of output mask
    )

    # TASK FOR COUNTING VALID PIXELS
    # count number of valid observations per pixel using valid data mask
    count_val_sh = CountValid(
        "IS_VALID",
        "VALID_COUNT"  # name of existing mask  # name of output scalar
    )

    # TASK FOR SAVING TO OUTPUT (if needed)
    path_save = Path(path_save)
    path_save.mkdir(exist_ok=True)
    # if not os.path.isdir(path_save):
    #     os.makedirs(path_save)
    save = SaveToDisk(path_save,
                      overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

    # Define the workflow
    workflow = LinearWorkflow(add_data, add_clm, ndvi, ndwi, norm,
                              add_sh_valmask, count_val_sh, save)
    # Execute the workflow

    # time interval for the SH request
    # TODO: need to check if specified time interval is valid
    time_interval = [s_date, e_date]

    # define additional parameters of the workflow
    execution_args = []

    path_EOPatch = path_save / f"eopatch_{patch_n}"

    execution_args.append({
        add_data: {
            "bbox":
            BBox(
                ((lon_left_top, lat_left_top), (lon_right_bot, lat_right_bot)),
                crs=CRS.WGS84,
            ),
            "time_interval":
            time_interval,
        },
        save: {
            "eopatch_folder": path_EOPatch.stem
        },
    })

    executor = EOExecutor(workflow, execution_args, save_logs=True)
    if debug:
        print("Downloading Satellite data ...")

    executor.run(workers=2, multiprocess=False)
    if executor.get_failed_executions():
        raise RuntimeError("EOExecutor failed in finishing tasks!")

    if debug:
        executor.make_report()
    if debug:
        print("Satellite data is downloaded")
    return path_EOPatch
コード例 #30
0
         band_names.index('B08')])

    ndbi = NormalizedDifferenceIndexTask(
        (FeatureType.DATA, 'BANDS'), (FeatureType.DATA, 'NDBI'),
        [band_names.index('B11'),
         band_names.index('B08')])

    #SaveTask
    save = SaveTask(SAVE_PATH,
                    overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

    #Workflow
    workflow = LinearWorkflow(
        add_data,
        ndvi,
        ndwi,
        ndbi,
        save,
    )

    time_interval = {
        '18/19': ['2018-11-01', '2019-05-01'],
        '19/20': ['2019-11-01', '2020-05-01'],
    }

    downloaded = os.listdir(SAVE_PATH)
    downloaded = list(map(lambda x: int(x.split('_')[1]), downloaded))
    execution_args = []

    for id, row in gdf.loc[~gdf.index.isin(downloaded), :].iterrows():
        bbox = row.geometry.bounds