def load_LPIS(country, year, path, no_patches):
    patch_location = path + '/{}/'.format(country)
    load = LoadFromDisk(patch_location)
    save_path_location = patch_location
    if not os.path.isdir(save_path_location):
        os.makedirs(save_path_location)
    save = SaveToDisk(save_path_location,
                      overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

    # workflow_data = get_create_and_add_lpis_workflow(country, year, save_path_location)

    name_of_feature = 'LPIS_{}'.format(year)

    groups_to_number, crops_to_number = create_mapping(country)

    layer_id = GEOPEDIA_LPIS_LAYERS[f'{country}_LPIS_{year}']
    ftr_name = f'LPIS_{year}'
    year_filter = (
        GEOPEDIA_LPIS_YEAR_NAME[country],
        year) if GEOPEDIA_LPIS_YEAR_NAME[country] is not None else None
    add_lpis = AddGeopediaVectorFeature(
        (FeatureType.VECTOR_TIMELESS, ftr_name),
        layer=layer_id,
        year_filter=year_filter,
        drop_duplicates=True)
    area_ratio = AddAreaRatio(
        (FeatureType.VECTOR_TIMELESS, ftr_name),
        (FeatureType.SCALAR_TIMELESS, 'FIELD_AREA_RATIO'))
    fixlpis = FixLPIS(feature=name_of_feature, country=country)

    rasterize = VectorToRaster(vector_input=(FeatureType.VECTOR_TIMELESS,
                                             name_of_feature),
                               raster_feature=(FeatureType.MASK_TIMELESS,
                                               name_of_feature),
                               values=None,
                               values_column='GROUP',
                               raster_shape=(FeatureType.DATA, 'BANDS'),
                               raster_dtype=np.int16,
                               no_data_value=np.nan)

    add_group = AddGroup(crops_to_number, name_of_feature)
    remove_dtf = RemoveFeature(FeatureType.VECTOR_TIMELESS, name_of_feature)

    exclude = WorkflowExclude(area_ratio, fixlpis, add_group, rasterize,
                              remove_dtf)

    workflow = LinearWorkflow(load, add_lpis, exclude, save)

    execution_args = []
    for i in range(no_patches):
        execution_args.append({
            load: {
                'eopatch_folder': 'eopatch_{}'.format(i)
            },
            save: {
                'eopatch_folder': 'eopatch_{}'.format(i)
            }
        })
    ##### here you choose how many processes/threads you will run, workers=none is max of processors

    executor = EOExecutor(workflow,
                          execution_args,
                          save_logs=True,
                          logs_folder='ExecutionLogs')
    # executor.run(workers=None, multiprocess=True)
    executor.run()
Exemple #2
0
    """
    Predicate that defines if a frame from EOPatch's time-series is valid or not. Frame is valid, if the
    valid data fraction is above the specified threshold.
    """
    def __init__(self, threshold):
        self.threshold = threshold

    def __call__(self, array):
        coverage = np.sum(array.astype(np.uint8)) / np.prod(array.shape)
        return coverage > self.threshold


#%%

# TASK TO LOAD EXISTING EOPATCHES
load = LoadFromDisk(path_out)

# TASK FOR CONCATENATION
concatenate = ConcatenateData('FEATURES', ['BANDS', 'NDVI', 'NDWI', 'NORM'])

# TASK FOR FILTERING OUT TOO CLOUDY SCENES
# keep frames with > 80 % valid coverage
valid_data_predicate = ValidDataFractionPredicate(0.8)
filter_task = SimpleFilterTask((FeatureType.MASK, 'IS_VALID'),
                               valid_data_predicate)

# TASK FOR LINEAR INTERPOLATION
# linear interpolation of full time-series and date resampling
resampled_range = ('2017-01-01', '2017-12-31', 16)
linear_interp = LinearInterpolation(
    'FEATURES',  # name of field to interpolate
            1)
        eopatch.add_feature(FeatureType.DATA, 'SAVI', savi)

        return eopatch


if __name__ == '__main__':

    # no_patches = 1085
    no_patches = 1

    path = '/home/beno/Documents/test'
    # path = 'E:/Data/PerceptiveSentinel'

    patch_location = path + '/Slovenija/'
    load = LoadFromDisk(patch_location)

    save_path_location = path + '/Slovenija/'
    if not os.path.isdir(save_path_location):
        os.makedirs(save_path_location)

    save = SaveToDisk(save_path_location,
                      overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

    addStreamNDVI = AddStreamTemporalFeaturesTask(data_feature='NDVI')
    addStreamSAVI = AddStreamTemporalFeaturesTask(data_feature='SAVI')
    addStreamEVI = AddStreamTemporalFeaturesTask(data_feature='EVI')
    addStreamARVI = AddStreamTemporalFeaturesTask(data_feature='ARVI')
    addStreamSIPI = AddStreamTemporalFeaturesTask(data_feature='SIPI')
    addStreamNDWI = AddStreamTemporalFeaturesTask(data_feature='NDWI')
    '''
                    eopatch[FeatureType.DATA]['BANDS'][time][..., [2, 1, 0]] *
                    3.5, 0, 1)
                img = rgb2gray(img0)
                gray_img[time] = (img * 255).astype(np.uint8)

            eopatch.add_feature(FeatureType.DATA, 'GRAY', gray_img[...,
                                                                   np.newaxis])
            return eopatch

    no_patches = 7

    path = '/home/beno/Documents/test'
    # path = 'E:/Data/PerceptiveSentinel'

    patch_location = path + '/Slovenia/'
    load = LoadFromDisk(patch_location, lazy_loading=True)

    save_path_location = path + '/Slovenia/'
    if not os.path.isdir(save_path_location):
        os.makedirs(save_path_location)
    save = SaveToDisk(save_path_location,
                      overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

    execution_args = []
    for id in range(no_patches):
        execution_args.append({
            load: {
                'eopatch_folder': 'eopatch_{}'.format(id)
            },
            save: {
                'eopatch_folder': 'eopatch_{}'.format(id)
Exemple #5
0
def predict_raster_patch(path_EOPatch, patch_n, scale, debug=False):
    path_EOPatch = Path(path_EOPatch)
    model_path = path_module / "model.pkl"
    model = joblib.load(model_path)

    # TASK TO LOAD EXISTING EOPATCHES
    load = LoadFromDisk(path_EOPatch.parent)

    # TASK FOR CONCATENATION
    concatenate = ConcatenateData("FEATURES",
                                  ["BANDS", "NDVI", "NDWI", "NORM"])

    # TASK FOR FILTERING OUT TOO CLOUDY SCENES
    # keep frames with > 80 % valid coverage
    valid_data_predicate = ValidDataFractionPredicate(0.8)
    filter_task = SimpleFilterTask((FeatureType.MASK, 'IS_VALID'),
                                   valid_data_predicate)

    save = SaveToDisk(path_EOPatch.parent,
                      overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

    workflow = LinearWorkflow(
        load,
        concatenate,
        filter_task,
        save,
    )

    execution_args = []
    for idx in range(0, 1):
        execution_args.append({
            load: {
                "eopatch_folder": path_EOPatch.stem
            },
            save: {
                "eopatch_folder": path_EOPatch.stem
            },
        })
    if debug:
        print("Saving the features ...")
    executor = EOExecutor(workflow, execution_args, save_logs=False)
    executor.run(workers=5, multiprocess=False)

    if debug:
        executor.make_report()

    # load from disk to determine number of valid pictures
    eopatch = EOPatch.load(path_EOPatch, lazy_loading=True)
    n_pics = eopatch.data["BANDS"].shape[0]

    print(f'Number of valid pictures detected: {n_pics}')

    list_path_raster = []
    for pic_n in range(n_pics):

        # TASK TO LOAD EXISTING EOPATCHES
        load = LoadFromDisk(path_EOPatch.parent)

        # TASK FOR PREDICTION
        predict = PredictPatch(model, (FeatureType.DATA, "FEATURES"), "LBL",
                               pic_n, "SCR")

        # TASK FOR SAVING
        save = SaveToDisk(
            str(path_EOPatch.parent),
            overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

        # TASK TO EXPORT TIFF
        export_tiff = ExportToTiff((FeatureType.MASK_TIMELESS, "LBL"))
        tiff_location = (path_EOPatch.parent / f"predicted_tiff")

        if not os.path.isdir(tiff_location):
            os.makedirs(tiff_location)

        workflow = LinearWorkflow(load, predict, export_tiff, save)

        # create a list of execution arguments for each patch
        execution_args = []
        path_predict = tiff_location / f"prediction-eopatch_{patch_n}-pic_{pic_n}.tiff"
        for i in range(0, 1):
            execution_args.append({
                load: {
                    "eopatch_folder": path_EOPatch.stem
                },
                export_tiff: {
                    "filename": path_predict
                },
                save: {
                    "eopatch_folder": path_EOPatch.stem
                },
            })

        # run the executor on 2 cores
        executor = EOExecutor(workflow, execution_args)

        # uncomment below save the logs in the current directory and produce a report!
        # executor = EOExecutor(workflow, execution_args, save_logs=True)
        if debug:
            print("Predicting the land cover ...")
        executor.run(workers=5, multiprocess=False)
        if debug:
            executor.make_report()

        # PATH = path_out / "predicted_tiff" / f"patch{patch_n}"
        path_merged = tiff_location / f"merged_prediction-eopatch_{patch_n}-pic_{pic_n}.tiff"
        if path_merged.exists():
            path_merged.unlink()
        cmd = f"gdal_merge.py -o {path_merged} -co compress=LZW {path_predict}"
        os.system(cmd)

        # save path
        list_path_raster.append(path_merged)

        # Reference colormap things
        lulc_cmap = mpl.colors.ListedColormap([entry.color for entry in LULC])
        lulc_norm = mpl.colors.BoundaryNorm(np.arange(-0.5, 3, 1), lulc_cmap.N)

        size = 20
        fig, ax = plt.subplots(figsize=(2 * size * 1, 1 * size * scale),
                               nrows=1,
                               ncols=2)
        eopatch = EOPatch.load(path_EOPatch, lazy_loading=True)
        im = ax[0].imshow(eopatch.mask_timeless["LBL"].squeeze(),
                          cmap=lulc_cmap,
                          norm=lulc_norm)
        ax[0].set_xticks([])
        ax[0].set_yticks([])
        ax[0].set_aspect("auto")

        fig.subplots_adjust(wspace=0, hspace=0)
        for i in range(0, 1):
            eopatch = EOPatch.load(path_EOPatch, lazy_loading=True)
            ax = ax[1]
            plt.imshow(
                np.clip(
                    eopatch.data["BANDS"][pic_n, :, :, :][..., [2, 1, 0]] *
                    3.5, 0, 1))
            plt.xticks([])
            plt.yticks([])
            ax.set_aspect("auto")
            del eopatch

        if debug:
            print("saving the predicted image ...")
        plt.savefig(path_EOPatch.parent /
                    f"predicted_vs_real_{patch_n}-{pic_n}.png")

    return list_path_raster
Exemple #6
0
ndwi = NormalizedDifferenceIndex('NDWI', 'BANDS/1', 'BANDS/3')
norm = EuclideanNorm('NORM', 'BANDS')

# TASK FOR VALID MASK
# validate pixels using SentinelHub's cloud detection mask and region of acquisition
add_sh_valmask = AddValidDataMaskTask(
    SentinelHubValidData(),
    'IS_VALID'  # name of output mask
)

# TASK FOR COUNTING VALID PIXELS
# count number of valid observations per pixel using valid data mask
count_val_sh = CountValid(
    'IS_VALID',  # name of existing mask
    'VALID_COUNT'  # name of output scalar
)

# TASK FOR SAVING TO OUTPUT (if needed)
path_out = './eopatches_small/' if use_smaller_patches else './eopatches_large/'
if not os.path.isdir(path_out):
    os.makedirs(path_out)
save = SaveToDisk(path_out,
                  overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

# TASK TO LOAD EXISTING EOPATCHES
load = LoadFromDisk(path_out)

# Define the workflow
workflow = LinearWorkflow(add_data, add_clm, ndvi, ndwi, norm, add_sh_valmask,
                          count_val_sh, save)
def get_add_l2a_data_workflow(data):
    """
    Creates an workflow that:
    1. loads existing EOPatch
    2. adds sen2cor scene classification map
    3. adds L2A data (all 12 bands)
    4. adds s2cloudless cloud masks
    5. determines `L2A_VALID` - map of valid observations (t,h,w,1) based on L2A SCL map
        * pixels are marked as valid, if they're tagged as
        `[DARK_AREA_PIXELS, VEGETATION, NOT_VEGETATED, WATER, UNCLASSIFIED]`
        * performs opening with disk with radius 11 on `L2A_VALID`
    6. determines `L1C_VALID` - map of valid observations (t,h,w,1) based on s2cloudless cloud map
        * pixels are marked as valid, if they're tagged as not cloud
    7. saves EOPatch to disk
    """
    # 1. loads existing EOPatch
    load = LoadFromDisk(str(data))

    # 2. add L2A
    add_l2a = S2L2AWCSInput(layer='BANDS-S2-L2A',
                            resx='10m',
                            resy='10m',
                            maxcc=0.8,
                            time_difference=timedelta(hours=2),
                            raise_download_errors=False)

    # 3. add sen2cor's scene classification map and snow probability map
    add_scl = AddSen2CorClassificationFeature(sen2cor_classification='SCL',
                                              layer='TRUE-COLOR-S2-L2A',
                                              image_format=MimeType.TIFF_d32f,
                                              raise_download_errors=False)

    # 4. add s2cloudless cloud mask
    cloud_classifier = get_s2_pixel_cloud_detector(average_over=2,
                                                   dilation_size=1,
                                                   all_bands=False)
    add_clm = AddCloudMaskTask(cloud_classifier,
                               'BANDS-S2CLOUDLESS',
                               cm_size_y='160m',
                               cm_size_x='160m',
                               cmask_feature='CLM')

    # create valid data masks
    scl_valid_classes = [2, 4, 5, 6, 7]

    # 5. and 6. add L2A and L1C valid data masks
    add_l1c_valmask = AddValidDataMaskTask(SentinelHubValidData(), 'L1C_VALID')
    add_l2a_valmask = AddValidDataMaskTask(
        Sen2CorValidData(scl_valid_classes, 6, 22), 'L2A_VALID')
    add_valmask = AddValidDataMaskTask(MergeMasks('L1C_VALID', 'L2A_VALID'),
                                       'VALID_DATA')

    # 3. keep only frames with valid data fraction over 70%
    valid_data_predicate = ValidDataFractionPredicate(0.7)
    filter_task = SimpleFilterTask((FeatureType.MASK, 'VALID_DATA'),
                                   valid_data_predicate)

    # save
    save = SaveToDisk(str(data),
                      compress_level=1,
                      overwrite_permission=OverwritePermission.OVERWRITE_PATCH)

    workflow = LinearWorkflow(load,
                              add_l2a,
                              add_scl,
                              add_clm,
                              add_l1c_valmask,
                              add_l2a_valmask,
                              add_valmask,
                              filter_task,
                              save,
                              task_names={
                                  load: 'load',
                                  add_l2a: 'add_L2A',
                                  add_scl: 'add_SCL',
                                  add_clm: 'add_clm',
                                  add_l1c_valmask: 'add_L1C_valmask',
                                  add_l2a_valmask: 'add_L2A_valmask',
                                  add_valmask: 'add_valmask',
                                  filter_task: ' filter_task',
                                  save: 'save'
                              })

    return workflow