def add_raster_info_insidePolygons(polygons_shp, raster_file, raster_name): """ calculate the hydrology information of each polygons Args: polygons_shp: input shapfe file flow_accumulation: the file path of flow accumulation Returns: True if successful, False Otherwise """ if io_function.is_file_exist(polygons_shp) is False: return False operation_obj = shape_opeation() # all_touched: bool, optional # Whether to include every raster cell touched by a geometry, or only # those having a center point within the polygon. # defaults to `False` # Since the dem usually is coarser, so we set all_touched = True all_touched = True # #DEM stats_list = ['min', 'max', 'mean', 'median', 'std'] # ['min', 'max', 'mean', 'count','median','std'] if operation_obj.add_fields_from_raster(polygons_shp, raster_file, raster_name, band=1, stats_list=stats_list, all_touched=all_touched) is False: return False return True
def remove_polygons(shapefile, field_name, threshold, bsmaller, output): # remove the not narrow polygon based on ratio_p_a operation_obj = shape_opeation() if operation_obj.remove_shape_baseon_field_value( shapefile, output, field_name, threshold, smaller=bsmaller) is False: return False
def remove_small_round_polygons(input_shp,output_shp,area_thr,ratio_thr): """ remove the polygons that is not gully, that is the polygon is too small or not narrow. # too small or not narrow :param input_shp: input shape file :param output_shp: output shape file :return: True if successful, False otherwise """ #remove the too small polygon operation_obj = shape_opeation() output_rm_small = io_function.get_name_by_adding_tail(input_shp,'rmSmall') # area_thr = parameters.get_minimum_gully_area() if operation_obj.remove_shape_baseon_field_value(input_shp,output_rm_small,'INarea',area_thr,smaller=True) is False: return False # remove the not narrow polygon # it seems that this can not represent how narrow the polygon is, because they are irregular polygons # whatever, it can remove some flat, and not long polygons. if you want to omit this, just set the maximum_ratio_width_height = 1 output_rm_Rwh=io_function.get_name_by_adding_tail(input_shp,'rmRwh') ratio_thr = parameters.get_maximum_ratio_width_height() if operation_obj.remove_shape_baseon_field_value(output_rm_small, output_rm_Rwh, 'ratio_w_h', ratio_thr, smaller=False) is False: return False # remove the not narrow polygon based on ratio_p_a ratio_thr = parameters.get_minimum_ratio_perimeter_area() if operation_obj.remove_shape_baseon_field_value(output_rm_Rwh, output_shp, 'ratio_p_a', ratio_thr, smaller=True) is False: return False return True
def remove_lines_based_on_polygons(shp_line, output_mainline, shp_polygon): ''' if lines if they don't overlap any polygons :param shp_line: :param output_mainline: :param shp_polygon: :return: ''' if check_same_projection(shp_line, shp_polygon) is False: raise ValueError('%s and %s don\'t have the same projection') print(shp_line, shp_polygon) inte_lines_list = vector_features.get_intersection_of_line_polygon( shp_line, shp_polygon) b_remove = [True if item.is_empty else False for item in inte_lines_list] # print(b_remove) # a=0 # b=0 # for rm in b_remove: # if rm is True: # a += 1 # else: # b += 1 # print(a,b) #note that, after remove, the number of main lines are greater than the number of polygons in "shp_polygon" #This is because, in Beiluhe, some mapped thaw slumps close to each other were merged to one operation_obj = shape_opeation() if operation_obj.remove_shapes_by_list(shp_line, output_mainline, b_remove) is False: return False
def calculate_gully_information(gullies_shp): """ get Oriented minimum bounding box for the gully polygon shapefile, and update the shape information based on oriented minimum bounding box to the gullies_shp :param gullies_shp: input shapefile contains the gully polygons :return: True if successful, False Otherwise """ operation_obj = shape_opeation() output_shapeinfo = io_function.get_name_by_adding_tail( gullies_shp, 'shapeInfo') if os.path.isfile(output_shapeinfo) is False: operation_obj.get_polygon_shape_info(gullies_shp, output_shapeinfo) else: basic.outputlogMessage( 'warning, %s already exist, skip calculate shape feature' % output_shapeinfo) # put all feature to one shapefile # parameter 3 the same as parameter 1 to overwrite the input file # note: the area in here, is the area of the oriented minimum bounding box, not the area of polygon operation_obj.add_fields_shape(gullies_shp, output_shapeinfo, gullies_shp) # add width/height (suppose height greater than width) width_height_list = operation_obj.get_shape_records_value( gullies_shp, attributes=['WIDTH', 'HEIGHT']) ratio = [] for width_height in width_height_list: if width_height[0] > width_height[1]: r_value = width_height[1] / width_height[0] else: r_value = width_height[0] / width_height[1] ratio.append(r_value) operation_obj.add_one_field_records_to_shapefile(gullies_shp, ratio, 'ratio_w_h') # add perimeter/area perimeter_area_list = operation_obj.get_shape_records_value( gullies_shp, attributes=['INperimete', 'INarea']) if perimeter_area_list is False: return False ratio_p_a = [] for perimeter_area in perimeter_area_list: r_value = (perimeter_area[0])**2 / perimeter_area[1] ratio_p_a.append(r_value) operation_obj.add_one_field_records_to_shapefile(gullies_shp, ratio_p_a, 'ratio_p_a') # add circularity (4*pi*area/perimeter**2) which is similar to ratio_p_a circularity = [] for perimeter_area in perimeter_area_list: value = (4 * math.pi * perimeter_area[1] / perimeter_area[0]**2) circularity.append(value) operation_obj.add_one_field_records_to_shapefile(gullies_shp, circularity, 'circularity') return True
def add_raster_info_from_bufferArea(polygons_shp, raster_file, raster_name, b_buffer_size): """ calculate the raster information such elevation, then add toeach polygon Args: polygons_shp: input shapfe file raster_file: raster file, should have the same projection of shapefile raster_name: the name of raster, should less than four letters, will be used as part of the attribute name b_buffer_size: the size of buffer area in meters Returns: True if successful, False Otherwise """ if io_function.is_file_exist(polygons_shp) is False: return False if io_function.is_file_exist(raster_file) is False: return False operation_obj = shape_opeation() ## calculate the topography information from the buffer area basic.outputlogMessage( "info: calculate the raster information from the buffer area") buffer_polygon_shp = io_function.get_name_by_adding_tail( polygons_shp, 'buffer') # if os.path.isfile(buffer_polygon_shp) is False: if vector_features.get_buffer_polygons(polygons_shp, buffer_polygon_shp, b_buffer_size) is False: raise IOError("error, failed in producing the buffer_polygon_shp") # replace the polygon shape file polygons_shp_backup = polygons_shp polygons_shp = buffer_polygon_shp # all_touched: bool, optional # Whether to include every raster cell touched by a geometry, or only # those having a center point within the polygon. # defaults to `False` # Since the dem usually is coarser, so we set all_touched = True all_touched = True stats_list = ['min', 'max', 'mean', 'std'] #['min', 'max', 'mean', 'count','median','std'] if operation_obj.add_fields_from_raster(polygons_shp, raster_file, raster_name, band=1, stats_list=stats_list, all_touched=all_touched) is False: return False # copy the information to the original shape file operation_obj.add_fields_shape(polygons_shp_backup, buffer_polygon_shp, polygons_shp_backup) return True
def evaluation_result(result_shp, val_shp): """ evaluate the result based on IoU :param result_shp: result shape file contains detected polygons :param val_shp: shape file contains validation polygons :return: True is successful, False otherwise """ basic.outputlogMessage("evaluation result") IoUs = vector_features.calculate_IoU_scores(result_shp, val_shp) if IoUs is False: return False #save IoU to result shapefile operation_obj = shape_opeation() operation_obj.add_one_field_records_to_shapefile(result_shp, IoUs, 'IoU') iou_threshold = parameters.get_IOU_threshold() true_pos_count = 0 false_pos_count = 0 val_polygon_count = operation_obj.get_shapes_count(val_shp) # calculate precision, recall, F1 score for iou in IoUs: if iou > iou_threshold: true_pos_count += 1 else: false_pos_count += 1 false_neg_count = val_polygon_count - true_pos_count if false_neg_count < 0: basic.outputlogMessage( 'warning, false negative count is smaller than 0, recall can not be trusted' ) precision = float(true_pos_count) / (float(true_pos_count) + float(false_pos_count)) recall = float(true_pos_count) / (float(true_pos_count) + float(false_neg_count)) if (true_pos_count > 0): F1score = 2.0 * precision * recall / (precision + recall) else: F1score = 0 #output evaluation reslult evaluation_txt = "evaluation_report.txt" f_obj = open(evaluation_txt, 'w') f_obj.writelines('true_pos_count: %d\n' % true_pos_count) f_obj.writelines('false_pos_count: %d\n' % false_pos_count) f_obj.writelines('false_neg_count: %d\n' % false_neg_count) f_obj.writelines('precision: %.6f\n' % precision) f_obj.writelines('recall: %.6f\n' % recall) f_obj.writelines('F1score: %.6f\n' % F1score) f_obj.close() pass
def remove_polygons(shapefile,field_name, threshold, bsmaller,output): ''' remove polygons based on attribute values. :param shapefile: input shapefile name :param field_name: :param threshold: :param bsmaller: :param output: :return: ''' operation_obj = shape_opeation() if operation_obj.remove_shape_baseon_field_value(shapefile, output, field_name, threshold, smaller=bsmaller) is False: return False
def get_polygon_class(shp_path): operation_obj = shape_opeation() class_int_list = operation_obj.get_shape_records_value( shp_path, ['class_int']) if class_int_list is False: return False else: class_int_list_1d = [ item for alist in class_int_list for item in alist ] return class_int_list_1d
def add_adjacent_polygon_count(polygons_shp, buffer_size, field_name): ''' :param polygons_shp: :param buffer_size: :param field_name: should be "adj_count" :return: ''' # save IoU to result shape file operation_obj = shape_opeation() counts = vector_features.get_adjacent_polygon_count( polygons_shp, buffer_size) # print(len(counts)) return operation_obj.add_one_field_records_to_shapefile( polygons_shp, counts, field_name)
def remove_nonclass_polygon(input_shp,output_shp, field_name='svmclass'): """ remove polygon which is not belong to target :param input_shp: input shape file :param output_shp: output shape file :param field_name: the field name of specific field containing class information in the shape file :return: True if successful, False Otherwise """ operation_obj = shape_opeation() if operation_obj.remove_nonclass_polygon(input_shp, output_shp, field_name): operation_obj = None return True else: operation_obj = None return False
def remove_polygons_intersect_multi_ground_truths(shp_file, shp_ground_truth, output, copy_fields=None): ''' :param shp_file: :param shp_ground_truth: :param output: :param copy_fields: :return: ''' operation_obj = shape_opeation() return operation_obj.remove_polygons_intersect_multi_polygons( shp_file, shp_ground_truth, output, copy_fields=copy_fields)
def add_IoU_values(polygons_shp, ground_truth_shp, field_name): ''' add IoU values to the shape file :param polygons_shp: :param ground_truth_shp: :param field_name: should be 'IoU' :return: ''' IoUs = vector_features.calculate_IoU_scores(polygons_shp, ground_truth_shp) if IoUs is False: return False # save IoU to result shape file operation_obj = shape_opeation() return operation_obj.add_one_field_records_to_shapefile( polygons_shp, IoUs, field_name)
def convert_training_examples_from_shp_to_raster(shp_path, raster_path): """ convert training examples which stored in shape file to a raster file, so these training examples can be shared by other shape file :param shp_path: shape file path :param raster_path: raster data path (the output data type is Byte) :return:True if successful, False otherwise """ if io_function.is_file_exist(shp_path) is False: return False # convert class label (string) to class index (integer) class_int_field = 'class_int' class_label_field = 'class' class_int_list = [] shp_operation_obj = shape_opeation() if shp_operation_obj.has_field(shp_path, class_int_field) is False: if shp_operation_obj.has_field(shp_path, class_label_field) is False: basic.outputlogMessage('%s do not contain training examples' % shp_path) else: # convert class label (string) to class index (integer) and create a new field name 'class_int' attribute = ['class'] class_label_list = shp_operation_obj.get_shape_records_value( shp_path, attribute) classLabel.output_classLabel_to_txt('class_label_index.txt') for label in class_label_list: class_int_list.append(classLabel.get_class_index(label[0])) shp_operation_obj.add_one_field_records_to_shapefile( shp_path, class_int_list, class_int_field) # check again whether there is 'class_int' if shp_operation_obj.has_field(shp_path, class_int_field) is False: basic.outputlogMessage( "Error: There is not class_int field in the shape file") assert False # convert training example in shape file to raster res = parameters.get_input_image_rescale() layername = os.path.splitext(os.path.basename(shp_path))[0] args_list = ['gdal_rasterize', '-a', class_int_field, '-ot', 'Byte', \ '-tr',str(res),str(res),'-l',layername,shp_path,raster_path] result = basic.exec_command_args_list_one_file(args_list, raster_path) if os.path.getsize(result) < 1: return False return True
def get_k_fold_training_polygons(gt_shp,out_shp, k): """ split polygons to k-fold, Each fold is then used once as a validation while the k - 1 remaining folds form the training set. similar to sklearn.model_selection.KFold, but apply to polygons in a shapefile Args: gt_shp: input out_shp: output, will output k shapefiles in the same directory with the basename of "out_shp" k: k value Returns: True if successful, False Otherwise """ if io_function.is_file_exist(gt_shp) is False: return False operation_obj = shape_opeation() return operation_obj.get_k_fold_of_polygons(gt_shp,out_shp,k,"class_int",shuffle = True)
def get_training_polygons(gt_shp,out_shp, per): """ randomly select training polygons from positive and negative ground truth polygons Args: gt_shp: the shape file of ground truth polygons out_shp: save path per: percentage for selecting. Returns: True if successfully, False othersize """ if io_function.is_file_exist(gt_shp) is False: return False operation_obj = shape_opeation() if operation_obj.get_portition_of_polygons(gt_shp,out_shp,per,"class_int"): operation_obj = None return True else: operation_obj = None return False
def add_polygon_circularity_info(shp_path): ''' add circularity of polygons, in this process, it will also add area and peremeter :param shp_path: the input shape file, this would modify the shapefile :return: ''' # get the area and perimeter first if vector_features.cal_area_length_of_polygon(shp_path) is False: return False operation_obj = shape_opeation() perimeter_area_list = operation_obj.get_shape_records_value( shp_path, attributes=['INperimete', 'INarea']) # add circularity (4*pi*area/perimeter**2) circularity = [] for perimeter_area in perimeter_area_list: value = (4 * math.pi * perimeter_area[1] / perimeter_area[0]**2) circularity.append(value) return operation_obj.add_one_field_records_to_shapefile( shp_path, circularity, 'circularit')
def evaluation_result(result_shp, val_shp): """ evaluate the result based on IoU :param result_shp: result shape file contains detected polygons :param val_shp: shape file contains validation polygons :return: True is successful, False otherwise """ basic.outputlogMessage("evaluation result") IoUs = vector_features.calculate_IoU_scores(result_shp, val_shp) if IoUs is False: return False #save IoU to result shapefile operation_obj = shape_opeation() operation_obj.add_one_field_records_to_shapefile(result_shp, IoUs, 'IoU') iou_threshold = parameters.get_IOU_threshold() true_pos_count = 0 false_pos_count = 0 val_polygon_count = operation_obj.get_shapes_count(val_shp) # calculate precision, recall, F1 score for iou in IoUs: if iou > iou_threshold: true_pos_count += 1 else: false_pos_count += 1 false_neg_count = val_polygon_count - true_pos_count if false_neg_count < 0: basic.outputlogMessage( 'warning, false negative count is smaller than 0, recall can not be trusted' ) precision = float(true_pos_count) / (float(true_pos_count) + float(false_pos_count)) recall = float(true_pos_count) / (float(true_pos_count) + float(false_neg_count)) if (true_pos_count > 0): F1score = 2.0 * precision * recall / (precision + recall) else: F1score = 0 #output evaluation reslult evaluation_txt = "evaluation_report.txt" f_obj = open(evaluation_txt, 'w') f_obj.writelines('true_pos_count: %d\n' % true_pos_count) f_obj.writelines('false_pos_count: %d\n' % false_pos_count) f_obj.writelines('false_neg_count: %d\n' % false_neg_count) f_obj.writelines('precision: %.6f\n' % precision) f_obj.writelines('recall: %.6f\n' % recall) f_obj.writelines('F1score: %.6f\n' % F1score) f_obj.close() ########################################################################################## ## another method for calculating false_neg_count base on IoU value # calculate the IoU for validation polygons (ground truths) IoUs = vector_features.calculate_IoU_scores(val_shp, result_shp) if IoUs is False: return False # if the IoU of a validation polygon smaller than threshold, then it's false negative false_neg_count = 0 idx_of_false_neg = [] for idx, iou in enumerate(IoUs): if iou < iou_threshold: false_neg_count += 1 idx_of_false_neg.append(idx + 1) # index start from 1 precision = float(true_pos_count) / (float(true_pos_count) + float(false_pos_count)) recall = float(true_pos_count) / (float(true_pos_count) + float(false_neg_count)) if (true_pos_count > 0): F1score = 2.0 * precision * recall / (precision + recall) else: F1score = 0 # output evaluation reslult evaluation_txt = "evaluation_report.txt" f_obj = open(evaluation_txt, 'a') # add to "evaluation_report.txt" f_obj.writelines('\n\n** Count false negative by IoU**\n') f_obj.writelines('true_pos_count: %d\n' % true_pos_count) f_obj.writelines('false_pos_count: %d\n' % false_pos_count) f_obj.writelines('false_neg_count_byIoU: %d\n' % false_neg_count) f_obj.writelines('precision: %.6f\n' % precision) f_obj.writelines('recall: %.6f\n' % recall) f_obj.writelines('F1score: %.6f\n' % F1score) # output the index of false negative f_obj.writelines('\nindex (start from 1) of false negatives: %s\n' % ','.join([str(item) for item in idx_of_false_neg])) f_obj.close() pass
def calculate_polygon_topography(polygons_shp, para_file, dem_files, slope_files, aspect_files=None, dem_diffs=None): """ calculate the topography information such elevation and slope of each polygon Args: polygons_shp: input shapfe file dem_files: DEM raster file or tiles, should have the same projection of shapefile slope_files: slope raster file or tiles (can be drived from dem file by using QGIS or ArcGIS) aspect_files: aspect raster file or tiles (can be drived from dem file by using QGIS or ArcGIS) Returns: True if successful, False Otherwise """ if io_function.is_file_exist(polygons_shp) is False: return False operation_obj = shape_opeation() ## calculate the topography information from the buffer area # the para file was set in parameters.set_saved_parafile_path(options.para_file) b_use_buffer_area = parameters.get_bool_parameters( para_file, 'b_topo_use_buffer_area') if b_use_buffer_area is True: b_buffer_size = 5 # meters (the same as the shape file) basic.outputlogMessage( "info: calculate the topography information from the buffer area") buffer_polygon_shp = io_function.get_name_by_adding_tail( polygons_shp, 'buffer') # if os.path.isfile(buffer_polygon_shp) is False: if vector_features.get_buffer_polygons( polygons_shp, buffer_polygon_shp, b_buffer_size) is False: basic.outputlogMessage( "error, failed in producing the buffer_polygon_shp") return False # else: # basic.outputlogMessage("warning, buffer_polygon_shp already exist, skip producing it") # replace the polygon shape file polygons_shp_backup = polygons_shp polygons_shp = buffer_polygon_shp else: basic.outputlogMessage( "info: calculate the topography information from the inside of each polygon" ) # all_touched: bool, optional # Whether to include every raster cell touched by a geometry, or only # those having a center point within the polygon. # defaults to `False` # Since the dem usually is coarser, so we set all_touched = True all_touched = True process_num = 4 # #DEM if dem_files is not None: stats_list = ['min', 'max', 'mean', 'median', 'std'] #['min', 'max', 'mean', 'count','median','std'] # if operation_obj.add_fields_from_raster(polygons_shp, dem_file, "dem", band=1,stats_list=stats_list,all_touched=all_touched) is False: # return False if zonal_stats_multiRasters(polygons_shp, dem_files, stats=stats_list, prefix='dem', band=1, all_touched=all_touched, process_num=process_num) is False: return False else: basic.outputlogMessage( "warning, DEM file not exist, skip the calculation of DEM information" ) # #slope if slope_files is not None: stats_list = ['min', 'max', 'mean', 'median', 'std'] if zonal_stats_multiRasters(polygons_shp, slope_files, stats=stats_list, prefix='slo', band=1, all_touched=all_touched, process_num=process_num) is False: return False else: basic.outputlogMessage( "warning, slope file not exist, skip the calculation of slope information" ) # #aspect if aspect_files is not None: stats_list = ['min', 'max', 'mean', 'std'] if zonal_stats_multiRasters(polygons_shp, aspect_files, stats=stats_list, prefix='asp', band=1, all_touched=all_touched, process_num=process_num) is False: return False else: basic.outputlogMessage( 'warning, aspect file not exist, ignore adding aspect information') # elevation difference if dem_diffs is not None: stats_list = ['min', 'max', 'mean', 'median', 'std', 'area'] # only count the pixel within this range when do statistics dem_diff_range_str = parameters.get_string_list_parameters( para_file, 'dem_difference_range') range = [ None if item.upper() == 'NONE' else float(item) for item in dem_diff_range_str ] # expand the polygon when doing dem difference statistics buffer_size_dem_diff = parameters.get_digit_parameters( para_file, 'buffer_size_dem_diff', 'float') if zonal_stats_multiRasters(polygons_shp, dem_diffs, stats=stats_list, prefix='demD', band=1, all_touched=all_touched, process_num=process_num, range=range, buffer=buffer_size_dem_diff) is False: return False else: basic.outputlogMessage( 'warning, dem difference file not exist, ignore adding dem diff information' ) # # hillshape # copy the topography information if b_use_buffer_area is True: operation_obj.add_fields_shape(polygons_shp_backup, buffer_polygon_shp, polygons_shp_backup) return True
def calculate_gully_topography(polygons_shp, dem_file, slope_file, aspect_file=None): """ calculate the topography information such elevation and slope of each polygon Args: polygons_shp: input shapfe file dem_file: DEM raster file, should have the same projection of shapefile slope_file: slope raster file (can be drived from dem file by using QGIS or ArcGIS) aspect_file: aspect raster file (can be drived from dem file by using QGIS or ArcGIS) Returns: True if successful, False Otherwise """ if io_function.is_file_exist(polygons_shp) is False: return False operation_obj = shape_opeation() ## calculate the topography information from the buffer area b_use_buffer_area = True b_buffer_size = 5 # meters (the same as the shape file) if b_use_buffer_area is True: basic.outputlogMessage( "info: calculate the topography information from the buffer area") buffer_polygon_shp = io_function.get_name_by_adding_tail( polygons_shp, 'buffer') # if os.path.isfile(buffer_polygon_shp) is False: if vector_features.get_buffer_polygons( polygons_shp, buffer_polygon_shp, b_buffer_size) is False: basic.outputlogMessage( "error, failed in producing the buffer_polygon_shp") return False # else: # basic.outputlogMessage("warning, buffer_polygon_shp already exist, skip producing it") # replace the polygon shape file polygons_shp_backup = polygons_shp polygons_shp = buffer_polygon_shp else: basic.outputlogMessage( "info: calculate the topography information from the inside of each polygon" ) # all_touched: bool, optional # Whether to include every raster cell touched by a geometry, or only # those having a center point within the polygon. # defaults to `False` # Since the dem usually is coarser, so we set all_touched = True all_touched = True # #DEM if io_function.is_file_exist(dem_file): stats_list = ['min', 'max', 'mean', 'std'] #['min', 'max', 'mean', 'count','median','std'] if operation_obj.add_fields_from_raster( polygons_shp, dem_file, "dem", band=1, stats_list=stats_list, all_touched=all_touched) is False: return False else: basic.outputlogMessage( "warning, DEM file not exist, skip the calculation of DEM information" ) # #slope if io_function.is_file_exist(slope_file): stats_list = ['min', 'max', 'mean', 'std'] if operation_obj.add_fields_from_raster( polygons_shp, slope_file, "slo", band=1, stats_list=stats_list, all_touched=all_touched) is False: return False else: basic.outputlogMessage( "warning, slope file not exist, skip the calculation of slope information" ) # #aspect if aspect_file is not None and os.path.isfile(aspect_file): if io_function.is_file_exist(aspect_file) is False: return False stats_list = ['min', 'max', 'mean', 'std'] if operation_obj.add_fields_from_raster( polygons_shp, aspect_file, "asp", band=1, stats_list=stats_list, all_touched=all_touched) is False: return False else: basic.outputlogMessage( 'warning, aspect file not exist, ignore adding aspect information') # # hillshape # copy the topography information if b_use_buffer_area is True: operation_obj.add_fields_shape(polygons_shp_backup, buffer_polygon_shp, polygons_shp_backup) return True
def calculate_gully_topography(polygons_shp, dem_file, slope_file, aspect_file=None): """ calculate the topography information such elevation and slope of each polygon Args: polygons_shp: input shapfe file dem_file: DEM raster file, should have the same projection of shapefile slope_file: slope raster file (can be drived from dem file by using QGIS or ArcGIS) aspect_file: aspect raster file (can be drived from dem file by using QGIS or ArcGIS) Returns: True if successful, False Otherwise """ if io_function.is_file_exist(polygons_shp) is False: return False operation_obj = shape_opeation() # all_touched: bool, optional # Whether to include every raster cell touched by a geometry, or only # those having a center point within the polygon. # defaults to `False` # Since the dem usually is coarser, so we set all_touched = True all_touched = True # #DEM if io_function.is_file_exist(dem_file) is False: return False stats_list = ['min', 'max', 'mean', 'std'] #['min', 'max', 'mean', 'count','median','std'] if operation_obj.add_fields_from_raster(polygons_shp, dem_file, "dem", band=1, stats_list=stats_list, all_touched=all_touched) is False: return False # #slope if io_function.is_file_exist(slope_file) is False: return False stats_list = ['min', 'max', 'mean', 'std'] if operation_obj.add_fields_from_raster(polygons_shp, slope_file, "slo", band=1, stats_list=stats_list, all_touched=all_touched) is False: return False # #aspect if aspect_file is not None: if io_function.is_file_exist(slope_file) is False: return False stats_list = ['mean', 'std'] if operation_obj.add_fields_from_raster( polygons_shp, aspect_file, "asp", band=1, stats_list=stats_list, all_touched=all_touched) is False: return False # # hillshape return True