예제 #1
0
def calculate_distance_medial_axis(input_shp, out_shp, process_num=4, enlarge_m=20):
    print('calculating polygon width based on medial axis')

    code_dir = os.path.expanduser('~/codes/PycharmProjects/ChangeDet_DL/thawSlumpChangeDet')
    sys.path.insert(0, code_dir)

    # after test, found that when polygons are very narrow and irregular, cal_retreat_rate output wrong results.
    # use buffer enlarge the polygons

    polygons = vector_gpd.read_polygons_gpd(input_shp)
    # for poly in polygons:
    #     if poly.geom_type == 'MultiPolygon':
    #         print(poly.geom_type,poly)
    # cal_retreat_rate only use exterior, fill hole for buffer
    # polygon_large = [ vector_gpd.fill_holes_in_a_polygon(item) for item in polygons]
    polygon_large = polygons
    # buffer
    polygon_large = [item.buffer(enlarge_m) for item in polygon_large]

    wkt = map_projection.get_raster_or_vector_srs_info_wkt(input_shp)
    # save_large_shp = io_function.get_name_by_adding_tail(input_shp,'larger')
    save_pd = pd.DataFrame({'Polygon':polygon_large})
    vector_gpd.save_polygons_to_files(save_pd,'Polygon',wkt,out_shp)

    # calculate width based on expanding areas
    import cal_retreat_rate
    if cal_retreat_rate.cal_expand_area_distance(out_shp, proc_num=process_num,save_medial_axis=True):
        os.system('rm save_medial_axis_radius*.txt out_polygon_vertices_*.txt')
        return out_shp
예제 #2
0
def remove_narrow_parts_of_polygons_shp_NOmultiPolygon(input_shp,out_shp,rm_narrow_thr):
    # read polygons as shapely objects
    shapefile = gpd.read_file(input_shp)

    attribute_names = None
    new_polygon_list = []
    polygon_attributes_list = []  # 2d list

    for idx, row in shapefile.iterrows():
        if idx==0:
            attribute_names = row.keys().to_list()[:-1]  # the last one is 'geometry'
        print('removing narrow parts of %dth polygon (total: %d)'%(idx+1,len(shapefile.geometry.values)))
        shapely_polygon = row['geometry']
        if shapely_polygon.is_valid is False:
            shapely_polygon = shapely_polygon.buffer(0.000001)
            basic.outputlogMessage('warning, %d th polygon is is_valid, fix it by the buffer operation'%idx)
        out_geometry = remove_narrow_parts_of_a_polygon(shapely_polygon, rm_narrow_thr)
        # if out_polygon.is_empty is True:
        #     print(idx, out_polygon)
        if out_geometry.is_empty is True:
            basic.outputlogMessage('Warning, remove %dth (0 index) polygon in %s because it is empty after removing narrow parts'%
                                   (idx, os.path.basename(input_shp)))
            # continue, don't save
            # shapefile.drop(idx, inplace=True),
        else:
            out_polygon_list = MultiPolygon_to_polygons(idx, out_geometry)
            if len(out_polygon_list) < 1:
                continue
            new_polygon_list.extend(out_polygon_list)
            attributes = [row[key] for key in attribute_names]
            for idx in range(len(out_polygon_list)):
                # copy the attributes (Not area and perimeter, etc)
                polygon_attributes_list.append(attributes)        # last one is 'geometry'
            # copy attributes

    if len(new_polygon_list) < 1:
        basic.outputlogMessage('Warning, no polygons in %s'%input_shp)
        return False

    save_polyons_attributes = {}
    for idx, attribute in enumerate(attribute_names):
        # print(idx, attribute)
        values = [item[idx] for item in polygon_attributes_list]
        save_polyons_attributes[attribute] = values

    save_polyons_attributes["Polygons"] = new_polygon_list
    polygon_df = pd.DataFrame(save_polyons_attributes)

    basic.outputlogMessage('After removing the narrow parts, obtaining %d polygons'%len(new_polygon_list))
    print(out_shp, isinstance(out_shp,list))
    basic.outputlogMessage('will be saved to %s'%out_shp)
    wkt_string = map_projection.get_raster_or_vector_srs_info_wkt(input_shp)
    return save_polygons_to_files(polygon_df, 'Polygons', wkt_string, out_shp)
예제 #3
0
def test_get_dem_tif_ext_polygons():
    work_dir = os.path.expanduser(
        '~/Data/Arctic/canada_arctic/DEM/WR_dem_diff/dem_tifs')
    os.chdir(work_dir)

    tifs = io_function.get_file_list_by_ext('.tif',
                                            work_dir,
                                            bsub_folder=False)
    polygons = dem_mosaic_crop.get_dem_tif_ext_polygons(tifs)

    data = {'poly': polygons}
    pddata = pd.DataFrame(data)
    wkt_str = map_projection.get_raster_or_vector_srs_info_wkt(tifs[0])
    save_path = 'tif_extent.shp'
    vector_gpd.save_polygons_to_files(pddata, 'poly', wkt_str, save_path)
예제 #4
0
def post_processing_subsidence(in_shp):
    polygons = vector_gpd.read_polygons_gpd(in_shp)

    # get shapeinfo
    # poly_shapeinfo_list = []
    save_polyons = []
    for poly in polygons:
        # get INarea, INperimete, WIDTH, HEIGHT, ratio_w_h, hole_count
        # shapeinfo = vector_gpd.calculate_polygon_shape_info(poly)     # error:  'MultiPolygon' object has no attribute 'interiors'
        # poly_shapeinfo_list.append(shapeinfo)
        # if shapeinfo['INarea'] < 40:    # remove the one with area smaller than 40 m^2
        if poly.area < 90:    # remove the one with area smaller than 40 m^2
            continue
        save_polyons.append(poly)

    save_pd = pd.DataFrame({'Polygon': save_polyons})
    wkt = map_projection.get_raster_or_vector_srs_info_wkt(in_shp)
    save_shp = io_function.get_name_by_adding_tail(in_shp,'post')
    vector_gpd.save_polygons_to_files(save_pd,'Polygon',wkt,save_shp)
예제 #5
0
def merge_based_on_adjacent_matrix(in_shp):

    # not working, many box did not merge together.

    process_num = 8
    polygons = vector_gpd.read_polygons_gpd(in_shp)

    print('start building adjacent_matrix')
    adjacent_matrix = vector_gpd.build_adjacent_map_of_polygons(
        polygons, process_num=process_num)
    print('finish building adjacent_matrix')
    if adjacent_matrix is False:
        return False
    merged_polygons = vector_features.merge_touched_polygons(
        polygons, adjacent_matrix)
    print('finish merging touched polygons, get %d ones' %
          (len(merged_polygons)))

    # save
    wkt = map_projection.get_raster_or_vector_srs_info_wkt(in_shp)
    merged_pd = pd.DataFrame({'Polygon': merged_polygons})
    merged_shp = io_function.get_name_by_adding_tail(in_shp, 'merged')
    vector_gpd.save_polygons_to_files(merged_pd, 'Polygon', wkt, merged_shp)
예제 #6
0
def remove_merge_polygon_in_one_shp(in_shp, org_raster, attribute_name, attribute_range, min_area, max_area, process_num=1):
    # attribute_range: [min, max],

    lower = attribute_range[0]
    upper = attribute_range[1]

    save_shp = io_function.get_name_by_adding_tail(in_shp, 'post')
    if os.path.isfile(save_shp):
        basic.outputlogMessage('%s exists, skip'%save_shp)
        return save_shp

    shp_pre = io_function.get_name_no_ext(in_shp)
    # read polygons and label from segment algorithm, note: some polygons may have the same label
    polygons, attr_value_list = vector_gpd.read_polygons_attributes_list(in_shp,attribute_name)
    print('Read %d polygons'%len(polygons))
    if attr_value_list is None:
        raise ValueError('%s not in %s, need to remove it and then re-create'%(attribute_name,in_shp))


    remain_polyons = []
    rm_min_area_count = 0
    rm_att_value_count = 0
    for poly, att_value in zip(polygons, attr_value_list):
        if poly.area < min_area:
            rm_min_area_count += 1
            continue
        if lower is None:
            if att_value >= upper:
                rm_att_value_count += 1
                continue
        elif upper is None:
            if att_value <= lower:
                rm_att_value_count += 1
                continue
        else:
            # out of range, rmeove
            if att_value < lower or att_value > upper:
                rm_att_value_count += 1
                continue
        remain_polyons.append(poly)

    print('remove %d polygons based on min_area, %d polygons based on attribute_range, remain %d ones'%(rm_min_area_count, rm_diff_thr_count,len(remain_polyons)))

    if len(remain_polyons) > 1:
        # we should only merge polygon with similar reduction, but we already remove polygons with mean reduction > threshhold
        # merge touch polygons
        print(timeTools.get_now_time_str(), 'start building adjacent_matrix')
        # adjacent_matrix = vector_features.build_adjacent_map_of_polygons(remain_polyons)
        machine_name = os.uname()[1]
        # if 'login' in machine_name or 'shas' in machine_name or 'sgpu' in machine_name:
        #     print('Warning, some problem of parallel running in build_adjacent_map_of_polygons on curc, but ok in my laptop and uist, change process_num = 1')
        #     process_num = 1
        adjacent_matrix = vector_gpd.build_adjacent_map_of_polygons(remain_polyons, process_num=process_num)
        print(timeTools.get_now_time_str(), 'finish building adjacent_matrix')

        if adjacent_matrix is False:
            return False
        merged_polygons = vector_features.merge_touched_polygons(remain_polyons,adjacent_matrix)
        print(timeTools.get_now_time_str(), 'finish merging touched polygons, get %d ones'%(len(merged_polygons)))

        # remove large ones
        remain_polyons = []
        rm_max_area_count = 0
        for poly in merged_polygons:
            if poly.area > max_area:
                rm_max_area_count += 1
                continue
            remain_polyons.append(poly)

        print('remove %d polygons based on max_area, remain %d'%(rm_max_area_count, len(remain_polyons)))

    wkt = map_projection.get_raster_or_vector_srs_info_wkt(in_shp)

    polyons_noMulti = [ vector_gpd.MultiPolygon_to_polygons(idx,poly) for idx,poly in enumerate(remain_polyons) ]
    remain_polyons = []
    for polys in polyons_noMulti:
        polys = [poly for poly in polys if poly.area > min_area]    # remove tiny polygon before buffer
        remain_polyons.extend(polys)
    print('convert MultiPolygon to polygons, remove some small polygons, remain %d' % (len(remain_polyons)))


    # based on the merged polygons, calculate the mean dem diff, relative dem_diff
    buffer_surrounding = 20  # meters
    surrounding_polygons = vector_gpd.get_surrounding_polygons(remain_polyons,buffer_surrounding)
    surrounding_shp = io_function.get_name_by_adding_tail(in_shp, 'surrounding')
    surr_pd = pd.DataFrame({'Polygon': surrounding_polygons})
    vector_gpd.save_polygons_to_files(surr_pd, 'Polygon', wkt, surrounding_shp)
    raster_statistic.zonal_stats_multiRasters(surrounding_shp, org_raster, stats=['mean', 'std', 'count'], prefix='demD',process_num=process_num)


    # calcualte attributes of remain ones: area, dem_diff: mean, std
    merged_pd = pd.DataFrame({'Polygon': remain_polyons})
    merged_shp = io_function.get_name_by_adding_tail(in_shp, 'merged')
    vector_gpd.save_polygons_to_files(merged_pd, 'Polygon', wkt, merged_shp)
    raster_statistic.zonal_stats_multiRasters(merged_shp, dem_diff_tif, stats=['mean','std','count'], prefix='demD', process_num=process_num)

    # calculate the relative dem diff
    surr_dem_diff_list = vector_gpd.read_attribute_values_list(surrounding_shp,'demD_mean')
    merge_poly_dem_diff_list = vector_gpd.read_attribute_values_list(merged_shp,'demD_mean')
    if len(surr_dem_diff_list) != len(merge_poly_dem_diff_list):
        raise ValueError('The number of surr_dem_diff_list and merge_poly_dem_diff_list is different')
    relative_dem_diff_list = [  mer - sur for sur, mer in zip(surr_dem_diff_list, merge_poly_dem_diff_list) ]

    merge_poly_demD_std_list = vector_gpd.read_attribute_values_list(merged_shp,'demD_std')
    merge_poly_demD_count_list = vector_gpd.read_attribute_values_list(merged_shp,'demD_count')

    # remove large ones
    save_polyons = []
    save_demD_mean_list = []
    save_demD_std_list = []
    save_demD_count_list = []
    save_rel_diff_list = []
    save_surr_demD_list = []
    rm_rel_dem_diff_count = 0
    rm_min_area_count = 0
    for idx in range(len(remain_polyons)):
        # relative dem diff
        if relative_dem_diff_list[idx] > dem_diff_thread_m:  #
            rm_rel_dem_diff_count += 1
            continue

        # when convert MultiPolygon to Polygon, may create some small polygons
        if remain_polyons[idx].area < min_area:
            rm_min_area_count += 1
            continue


        save_polyons.append(remain_polyons[idx])
        save_demD_mean_list.append(merge_poly_dem_diff_list[idx])
        save_demD_std_list.append(merge_poly_demD_std_list[idx])
        save_demD_count_list.append(merge_poly_demD_count_list[idx])
        save_rel_diff_list.append(relative_dem_diff_list[idx])
        save_surr_demD_list.append(surr_dem_diff_list[idx])

    print('remove %d polygons based on relative rel_demD and %d based on min_area, remain %d' % (rm_rel_dem_diff_count, rm_min_area_count, len(save_polyons)))

    poly_ids = [ item+1  for item in range(len(save_polyons)) ]
    poly_areas = [poly.area for poly in save_polyons]

    save_pd = pd.DataFrame({'poly_id':poly_ids, 'poly_area':poly_areas,'demD_mean':save_demD_mean_list, 'demD_std':save_demD_std_list,
                             'demD_count':save_demD_count_list, 'surr_demD':save_surr_demD_list, 'rel_demD':save_rel_diff_list ,'Polygon': save_polyons})

    vector_gpd.save_polygons_to_files(save_pd, 'Polygon', wkt, save_shp)

    # add date difference if there are available
    date_diff_base = os.path.basename(dem_diff_tif).replace('DEM_diff','date_diff')
    date_diff_tif = os.path.join(os.path.dirname(dem_diff_tif) , date_diff_base)
    if os.path.isfile(date_diff_tif):
        raster_statistic.zonal_stats_multiRasters(save_shp, date_diff_tif, stats=['mean', 'std'], prefix='dateD',
                                              process_num=process_num)

    return save_shp
예제 #7
0
def main(options, args):

    time0 = time.time()
    image_dir = args[0]
    geojson_list = io_function.get_file_list_by_ext('.geojson',
                                                    image_dir,
                                                    bsub_folder=False)
    # remove some scenes, or maybe we should set bsub_folder=False
    # geojson_list = [item for item in geojson_list if 'incomplete_scenes' not in item ]  # remove those in "incomplete_scenes"
    # geojson_list = [item for item in geojson_list if 'scenes_high_cloud_cover' not in item ]  # remove those in "scenes_high_cloud_cover"

    if len(geojson_list) < 1:
        raise ValueError('There is no geojson files in %s' % image_dir)

    basic.outputlogMessage('Image Dir: %s' % image_dir)
    basic.outputlogMessage("Number of geojson files: %d" % len(geojson_list))

    grid_polygon_shp = args[
        1]  # the polygon should be in projection Cartesian coordinate system (e.g., UTM )
    basic.outputlogMessage('Image grid polygon shapefile: %s' %
                           grid_polygon_shp)
    process_num = options.process_num
    basic.outputlogMessage(
        'The number of processes for creating the mosaic is: %d' % process_num)

    # read grid polygons
    grid_polygons = vector_gpd.read_polygons_gpd(grid_polygon_shp)
    grid_ids = vector_gpd.read_attribute_values_list(grid_polygon_shp, 'id')
    if grid_ids is None:
        basic.outputlogMessage(
            'Warning, field: id is not in %s, will create default ID for each grid'
            % grid_polygon_shp)
        grid_ids = [id + 1 for id in range(len(grid_polygons))]

    shp_prj = map_projection.get_raster_or_vector_srs_info_proj4(
        grid_polygon_shp).strip()
    # print(shp_prj)
    grid_polygons_latlon = grid_polygons
    if shp_prj != '+proj=longlat +datum=WGS84 +no_defs':
        # read polygons and reproject to 4326 projection
        grid_polygons_latlon = vector_gpd.read_shape_gpd_to_NewPrj(
            grid_polygon_shp, 'EPSG:4326')
    # else:
    #     raise ValueError(' %s should be in projection of Cartesian coordinate system'%grid_polygon_shp)

    shp_prj_wkt = map_projection.get_raster_or_vector_srs_info_wkt(
        grid_polygon_shp)

    max_sr = options.max_sr
    min_sr = options.min_sr

    original_img_copy_dir = options.original_img_copy_dir
    b_to_rgb_8bit = options.to_rgb
    basic.outputlogMessage('Convert to 8bit RGB images: %s' %
                           str(b_to_rgb_8bit))

    # group planet image based on acquisition date
    b_group_date = options.group_date
    basic.outputlogMessage('Group Planet image based on acquisition date: %s' %
                           str(b_group_date))
    if b_group_date:
        # diff_days as 0, group images acquired at the same date
        geojson_groups = group_planet_images_date(geojson_list, diff_days=0)

        # sort based on yeardate in accending order : operator.itemgetter(0)
        geojson_groups = dict(
            sorted(geojson_groups.items(), key=operator.itemgetter(0)))

        save_group_txt = 'geojson_groups_input_folder.txt'
        basic.outputlogMessage(
            'images are divided into %d groups, save to %s' %
            (len(geojson_groups.keys()), save_group_txt))
        io_function.save_dict_to_txt_json(save_group_txt, geojson_groups)
    else:
        geojson_groups = {'all': geojson_list}

    # create mosaic of each grid
    cloud_cover_thr = options.cloud_cover
    cloud_cover_thr = cloud_cover_thr * 100  # for Planet image, it is percentage
    out_res = options.out_res
    cur_dir = os.getcwd()
    resampling_method = options.merged_method

    for key in geojson_groups.keys():

        # # test
        # if key != '20200701':
        #     continue

        geojson_list = geojson_groups[key]
        save_dir = os.path.basename(cur_dir) + '_mosaic_' + str(
            out_res) + '_' + key
        # print(save_dir)
        if process_num == 1:
            for id, polygon, poly_latlon in zip(grid_ids, grid_polygons,
                                                grid_polygons_latlon):
                # if id != 34:
                #     continue
                create_moasic_of_each_grid_polygon(
                    id,
                    polygon,
                    poly_latlon,
                    out_res,
                    cloud_cover_thr,
                    geojson_list,
                    save_dir,
                    new_prj_wkt=shp_prj_wkt,
                    new_prj_proj4=shp_prj,
                    sr_min=min_sr,
                    sr_max=max_sr,
                    to_rgb=b_to_rgb_8bit,
                    save_org_dir=original_img_copy_dir,
                    resampling_method=resampling_method)
        elif process_num > 1:
            theadPool = Pool(process_num)  # multi processes

            parameters_list = [
                (id, polygon, poly_latlon, out_res, cloud_cover_thr,
                 geojson_list, save_dir, shp_prj_wkt, shp_prj, min_sr, max_sr,
                 b_to_rgb_8bit, 0, original_img_copy_dir)
                for id, polygon, poly_latlon in zip(grid_ids, grid_polygons,
                                                    grid_polygons_latlon)
            ]
            results = theadPool.starmap(create_moasic_of_each_grid_polygon,
                                        parameters_list)  # need python3
            theadPool.close()
        else:
            raise ValueError('incorrect process number: %d' % process_num)

    cost_time_sec = time.time() - time0
    basic.outputlogMessage(
        'Done, total time cost %.2f seconds (%.2f minutes or %.2f hours)' %
        (cost_time_sec, cost_time_sec / 60, cost_time_sec / 3600))

    pass
예제 #8
0
def extract_headwall_from_slope(idx, total, slope_tif, work_dir, save_dir,slope_threshold, min_area, max_area,max_axis_width,max_box_WH,process_num):
    '''

    :param idx: tif index
    :param total: total slope file count
    :param slope_tif: slope file
    :param work_dir:
    :param save_dir:
    :param slope_threshold:
    :param min_area:
    :param max_area:
    :param max_axis_width: max width based on medial axis
    :param max_box_WH:  max width or height based on minimum_rotated_rectangle
    :param process_num:
    :return:
    '''

    headwall_shp = os.path.splitext(os.path.basename(io_function.get_name_by_adding_tail(slope_tif,'headwall')))[0] + '.shp'
    save_headwall_shp = os.path.join(save_dir,headwall_shp)
    if os.path.isfile(save_headwall_shp):
        print('%s exists, skip'%save_headwall_shp)
        return save_headwall_shp


    print('(%d/%d) extracting headwall from %s'%(idx,total,slope_tif))

    wkt = map_projection.get_raster_or_vector_srs_info_wkt(slope_tif)
    # binary slope
    slope_bin_path = os.path.join(work_dir, os.path.basename(io_function.get_name_by_adding_tail(slope_tif, 'bin')))
    slope_bin_shp = slope_tif_to_slope_shapefile(slope_tif,slope_bin_path,slope_threshold)


    # only keep small, but not too small
    rm_area_shp = io_function.get_name_by_adding_tail(slope_bin_shp, 'rmArea')
    if os.path.isfile(rm_area_shp):
        print('%s exists, skip removing based on area'%rm_area_shp)
    else:
        if remove_based_on_area(slope_bin_shp,min_area,max_area, wkt,rm_area_shp) is False:
            return False

    # add some shape info
    rm_shapeinfo_shp = io_function.get_name_by_adding_tail(slope_bin_shp, 'rmShape')
    if os.path.isfile(rm_shapeinfo_shp):
        print('%s exists, skip removing based on shape'%rm_shapeinfo_shp)
    else:
        if remove_based_on_shapeinfo(rm_area_shp, rm_shapeinfo_shp, max_box_WH) is False:
            return False

    rm_medialAxis_shp = io_function.get_name_by_adding_tail(slope_bin_shp, 'rmMedialAxis')
    if os.path.isfile(rm_medialAxis_shp):
        print('%s exists, skip removing based on Medial Axis')
    else:
        remove_based_medialAxis(rm_shapeinfo_shp, rm_medialAxis_shp,process_num,max_axis_width)

    # copy the results.
    io_function.copy_shape_file(rm_medialAxis_shp,save_headwall_shp)

    # add slope around surrounding? the sourrounding should be flat.  NO.


    return save_headwall_shp
예제 #9
0
def get_dem_subscidence_polygons(in_shp, dem_diff_tif, dem_diff_thread_m=-0.5, min_area=40, max_area=100000000, process_num=1,
                                 b_rm_files=False):

    save_shp = io_function.get_name_by_adding_tail(in_shp, 'post')
    if os.path.isfile(save_shp):
        basic.outputlogMessage('%s exists, skip'%save_shp)
        return save_shp


    demD_height, demD_width, demD_band_num, demD_date_type = raster_io.get_height_width_bandnum_dtype(dem_diff_tif)
    # print(demD_date_type)

    # # read mean elevation difference
    # attributes_path = os.path.join(os.path.dirname(in_shp), shp_pre + '_attributes.txt')
    #
    # # for each seg lable [mean, std, pixel count], if dem_diff_tif is float 32, then in meters, if int16, then in centimeter
    # poly_attributes = io_function.read_dict_from_txt_json(attributes_path)

    # if int16, then it's in centimeter
    if demD_date_type == 'int16':
        dem_diff_thread_m = dem_diff_thread_m*100

    # merge polygons touch each others
    wkt = map_projection.get_raster_or_vector_srs_info_wkt(in_shp)
    merged_shp = io_function.get_name_by_adding_tail(in_shp, 'merged')
    if filter_merge_polygons(in_shp,merged_shp,wkt, min_area,max_area,dem_diff_tif,dem_diff_thread_m,process_num) is None:
        return None

    # in merge_polygons, it will remove some big polygons, convert MultiPolygon to Polygons, so neeed to update remain_polyons
    remain_polyons = vector_gpd.read_polygons_gpd(merged_shp)

    # check MultiPolygons again.
    polyons_noMulti = [vector_gpd.MultiPolygon_to_polygons(idx, poly) for idx, poly in enumerate(remain_polyons)]
    remain_polyons = []
    for polys in polyons_noMulti:
        polys = [poly for poly in polys if poly.area > min_area]  # remove tiny polygon before buffer
        remain_polyons.extend(polys)
    print('convert MultiPolygon to polygons and remove small ones, remain %d' % (len(remain_polyons)))

    # based on the merged polygons, surrounding polygons
    buffer_surrounding = 20  # meters
    surrounding_shp = io_function.get_name_by_adding_tail(in_shp, 'surrounding')
    get_surrounding_polygons(remain_polyons, surrounding_shp, wkt, dem_diff_tif, buffer_surrounding, process_num)

    rm_reldemD_shp = io_function.get_name_by_adding_tail(in_shp, 'rmreldemD')
    if remove_polygons_based_relative_dem_diff(remain_polyons, merged_shp, surrounding_shp, wkt, rm_reldemD_shp, min_area,dem_diff_thread_m) is None:
        return None

    rm_shapeinfo_shp = io_function.get_name_by_adding_tail(in_shp, 'rmshapeinfo')
    area_limit = 10000
    circularit_limit = 0.1
    holes_count = 20
    if remove_polygons_based_shapeinfo(rm_reldemD_shp, rm_shapeinfo_shp, area_limit, circularit_limit, holes_count) is None:
        return None

    # remove based on slope
    # use the slope derived from ArcitcDEM mosaic
    slope_tif_list = io_function.get_file_list_by_ext('.tif',dem_common.arcticDEM_tile_slope_dir,bsub_folder=False)
    basic.outputlogMessage('Find %d slope files in %s'%(len(slope_tif_list), dem_common.arcticDEM_tile_slope_dir))
    rm_slope_shp = io_function.get_name_by_adding_tail(in_shp, 'rmslope')
    max_slope = 20
    if remove_based_slope(rm_shapeinfo_shp, rm_slope_shp,slope_tif_list, max_slope,process_num) is False:
        return None

    # copy
    io_function.copy_shape_file(rm_slope_shp,save_shp)

    # add date difference if they are available
    date_diff_base = os.path.basename(dem_diff_tif).replace('DEM_diff','date_diff')
    date_diff_tif = os.path.join(os.path.dirname(dem_diff_tif) , date_diff_base)
    if os.path.isfile(date_diff_tif):
        raster_statistic.zonal_stats_multiRasters(save_shp, date_diff_tif,tile_min_overlap=tile_min_overlap,
                                                  stats=['mean', 'std'], prefix='dateD',process_num=process_num)

    return save_shp