def save_false_positve_and_false_negative(result_shp, val_shp, para_file): """ save false positive and false negative polygons in the mapped polygon based on IOU values :param result_shp: result shape file containing mapped polygons :param val_shp: shape file containing validation polygons :return: """ assert io_function.is_file_exist(result_shp) assert io_function.is_file_exist(val_shp) basic.outputlogMessage('Input mapping result: %s' % result_shp) basic.outputlogMessage('Input ground truth: %s' % val_shp) IOU_threshold = parameters.get_IOU_threshold(parafile=para_file) basic.outputlogMessage('IOU threshold is: %f' % IOU_threshold) # calcuate IOU IOU_mapped_polygons = vector_features.calculate_IoU_scores( result_shp, val_shp) save_FP_path = io_function.get_name_by_adding_tail(result_shp, 'FP') # set False, remove greater than the threshold one remove_polygons_based_values(result_shp, IOU_mapped_polygons, IOU_threshold, False, save_FP_path) basic.outputlogMessage('save false positives to %s' % save_FP_path) # calculate IOU IOU_groud_truth = vector_features.calculate_IoU_scores(val_shp, result_shp) save_FN_path = io_function.get_name_by_adding_tail(result_shp, 'FN') # set False, remove greater than the threshold one remove_polygons_based_values(val_shp, IOU_groud_truth, IOU_threshold, False, save_FN_path) basic.outputlogMessage('save false negatives to %s' % save_FN_path)
def plot_mmseg_loss_miou_acc_main(json_path, save_file_pre=None): if os.path.isfile(json_path) is False: return False if save_file_pre is None: file_name = os.path.splitext(os.path.basename(json_path))[0] else: file_name = save_file_pre # TODO: background and thawslump should be read from the setting file miou_class_0 = '0_IoU.%s'%'background' miou_class_1 = '0_IoU.%s'%'thawslump' save_dir = os.path.dirname(json_path) save_path = os.path.join(save_dir, file_name + '.jpg') # log_dict_train = {'epoch':[], 'iter':[],'lr':[],'memory':[],'loss':[],'data_time':[],'time':[]} # log_dict_val = {'epoch':[], 'iter':[],'lr':[],miou_class_0:[],miou_class_1:[],'0_mIoU':[]} log_dict_train, log_dict_val = load_mmseg_log_json(json_path,miou_class_0,miou_class_1) # print(log_dict_train, log_dict_val) # io_function.save_dict_to_txt_json('log_dict_train.txt',log_dict_train) # io_function.save_dict_to_txt_json('log_dict_val.txt',log_dict_val) plot_loss_vs_epoch_iter(log_dict_train,io_function.get_name_by_adding_tail(save_path,'loss')) plot_miou_vs_epoch_iter(log_dict_val,miou_class_1,io_function.get_name_by_adding_tail(save_path,'miou'))
def mask_crop_dem_by_matchtag(org_dem_tif_list, crop_dem_list, extent_poly, extent_id, crop_tif_dir, o_res,process_num): matchtag_crop_tif_list = [] mask_crop_dem_list = [] for o_tif, crop_dem in zip(org_dem_tif_list,crop_dem_list): # find matchtag matchtag_tif = o_tif.replace('_dem_reg.tif','_matchtag_reg.tif') # io_function.is_file_exist(matchtag_tif) if os.path.isfile(matchtag_tif) is False: basic.outputlogMessage('Warning, %s not exists, skip'%matchtag_tif) continue # crop matchtag save_crop_path = os.path.join(crop_tif_dir, os.path.basename(io_function.get_name_by_adding_tail(matchtag_tif, 'sub_poly_%d' % extent_id)) ) if os.path.isfile(save_crop_path): basic.outputlogMessage('%s exists, skip cropping' % save_crop_path) matchtag_crop_tif_list.append(save_crop_path) else: crop_tif = subset_image_by_polygon_box(matchtag_tif, save_crop_path, extent_poly, resample_m='near', o_format='VRT', out_res=o_res,same_extent=True,thread_num=process_num) if crop_tif is False: raise ValueError('warning, crop %s failed' % matchtag_tif) matchtag_crop_tif_list.append(crop_tif) # masking crop_dem_mask = io_function.get_name_by_adding_tail(crop_dem,'mask') if os.path.isfile(crop_dem_mask): basic.outputlogMessage('%s exists, skip masking' % crop_dem_mask) mask_crop_dem_list.append(crop_dem_mask) else: if mask_dem_by_matchtag(crop_dem,save_crop_path,crop_dem_mask) is False: raise ValueError('warning, masking %s failed' % crop_dem) mask_crop_dem_list.append(crop_dem_mask) return mask_crop_dem_list, matchtag_crop_tif_list
def remove_small_round_polygons(input_shp,output_shp,area_thr,ratio_thr): """ remove the polygons that is not gully, that is the polygon is too small or not narrow. # too small or not narrow :param input_shp: input shape file :param output_shp: output shape file :return: True if successful, False otherwise """ #remove the too small polygon operation_obj = shape_opeation() output_rm_small = io_function.get_name_by_adding_tail(input_shp,'rmSmall') # area_thr = parameters.get_minimum_gully_area() if operation_obj.remove_shape_baseon_field_value(input_shp,output_rm_small,'INarea',area_thr,smaller=True) is False: return False # remove the not narrow polygon # it seems that this can not represent how narrow the polygon is, because they are irregular polygons # whatever, it can remove some flat, and not long polygons. if you want to omit this, just set the maximum_ratio_width_height = 1 output_rm_Rwh=io_function.get_name_by_adding_tail(input_shp,'rmRwh') ratio_thr = parameters.get_maximum_ratio_width_height() if operation_obj.remove_shape_baseon_field_value(output_rm_small, output_rm_Rwh, 'ratio_w_h', ratio_thr, smaller=False) is False: return False # remove the not narrow polygon based on ratio_p_a ratio_thr = parameters.get_minimum_ratio_perimeter_area() if operation_obj.remove_shape_baseon_field_value(output_rm_Rwh, output_shp, 'ratio_p_a', ratio_thr, smaller=True) is False: return False return True
def main(options, args): polygons_shp = args[0] output = options.output if output is None: output = io_function.get_name_by_adding_tail(polygons_shp, 'removed') para_file = options.para_file assert io_function.is_file_exist(polygons_shp) # remove polygons based on area rm_area_save_shp = io_function.get_name_by_adding_tail( polygons_shp, 'rmArea') # area_thr = 1000 #10 pixels area_thr = parameters.get_digit_parameters_None_if_absence( para_file, 'minimum_gully_area', 'int') b_smaller = True if area_thr is not None: remove_polygons(polygons_shp, 'INarea', area_thr, b_smaller, rm_area_save_shp) # remove polygons based on slope information rm_slope_save_shp1 = io_function.get_name_by_adding_tail( polygons_shp, 'rmslope1') # slope_small_thr = 2 slope_small_thr = parameters.get_digit_parameters_None_if_absence( para_file, 'min_slope', 'float') b_smaller = True if slope_small_thr is not None: remove_polygons(rm_area_save_shp, 'slo_mean', slope_small_thr, b_smaller, rm_slope_save_shp1) rm_slope_save_shp2 = io_function.get_name_by_adding_tail( polygons_shp, 'rmslope2') # slope_large_thr = 20 slope_large_thr = parameters.get_digit_parameters_None_if_absence( para_file, 'max_slope', 'float') b_smaller = False if slope_large_thr is not None: remove_polygons(rm_slope_save_shp1, 'slo_mean', slope_large_thr, b_smaller, rm_slope_save_shp2) # remove polgyons based on dem rm_dem_save_shp = output # final output # dem_small_thr = 3000 dem_small_thr = parameters.get_digit_parameters_None_if_absence( para_file, 'minimum_elevation', 'int') b_smaller = True if dem_small_thr is not None: remove_polygons(rm_slope_save_shp2, 'dem_mean', dem_small_thr, b_smaller, rm_dem_save_shp) pass
def mask_by_surface_water(map_raster, surface_water_crop): # save mask result to current folder save_mask_result = io_function.get_name_by_adding_tail( os.path.basename(map_raster), 'WaterMask') if os.path.isfile(save_mask_result): print('warning, %s exists' % save_mask_result) return save_mask_result # read map_array_2d, nodata = raster_io.read_raster_one_band_np(map_raster) water_array_2d, _ = raster_io.read_raster_one_band_np(surface_water_crop) print(map_array_2d.shape) if map_array_2d.shape != water_array_2d.shape: raise ValueError('size inconsistent: %s and %s' % (str(map_array_2d.shape), str(water_array_2d.shape))) # mask out pixel, original is water or others map_array_2d[np.logical_or(water_array_2d == 1, water_array_2d == 255)] = 0 if raster_io.save_numpy_array_to_rasterfile(map_array_2d, save_mask_result, map_raster, compress='lzw', tiled='Yes', bigtiff='if_safer'): return save_mask_result
def main(): basic.setlogfile('log_to_relative_dem_8bit.txt') if os.path.isdir(relative_dem_dir) is False: io_function.mkdir(relative_dem_dir) # 500 pixel by 500 pixel, that is 1 km by 1 km patch_width = 500 patch_height = 500 process_num = 1 failed_tifs = [] dem_reg_list = io_function.get_file_list_by_pattern( arcticDEM_reg_tif_dir, '*dem_reg.tif') count = len(dem_reg_list) for idx, tif in enumerate(dem_reg_list): print('%d/%d convert %s to relative DEM (8bit)' % (idx + 1, count, tif)) rel_dem_8bit = io_function.get_name_by_adding_tail(tif, 'relDEM8bit') rel_dem_8bit = os.path.join(relative_dem_dir, os.path.basename(rel_dem_8bit)) try: dem_to_relative_dem(tif, rel_dem_8bit, patch_width, patch_height, process_num) except: failed_tifs.append(tif) with open('to_relative_dem_failed_cases.txt', 'w') as f_obj: for item in failed_tifs: f_obj.writelines(item + '\n') pass
def crop_to_same_exent_for_diff(dem_tif_list, save_dir, extent_id, extent_poly, process_num): # crop to the same extent crop_tif_dir = os.path.join(save_dir, 'dem_crop_for_diff_sub_%d' % extent_id) if os.path.isdir(crop_tif_dir) is False: io_function.mkdir(crop_tif_dir) crop_tif_list = [] for tif in dem_tif_list: save_crop_path = os.path.join( crop_tif_dir, os.path.basename( io_function.get_name_by_adding_tail(tif, 'sub_poly_%d' % extent_id))) if os.path.isfile(save_crop_path): basic.outputlogMessage('%s exists, skip cropping' % save_crop_path) crop_tif_list.append(save_crop_path) else: crop_tif = subset_image_by_polygon_box(tif, save_crop_path, extent_poly, resample_m='near', same_extent=True, thread_num=process_num) if crop_tif is False: # raise ValueError('warning, crop %s failed' % tif) continue crop_tif_list.append(crop_tif) dem_tif_list = crop_tif_list return dem_tif_list
def resample_crop_raster(ref_raster, input_raster, output_raster=None, resample_method='near'): if output_raster is None: output_raster = io_function.get_name_by_adding_tail( os.path.basename(input_raster), 'res_sub') if os.path.isfile(output_raster): print('Warning, %s exists' % output_raster) return output_raster # check projection prj4_ref = map_projection.get_raster_or_vector_srs_info_proj4(ref_raster) prj4_input = map_projection.get_raster_or_vector_srs_info_proj4( input_raster) if prj4_ref != prj4_input: raise ValueError('projection inconsistent: %s and %s' % (ref_raster, input_raster)) # crop RSImageProcess.subset_image_baseimage(output_raster, input_raster, ref_raster, same_res=True, resample_m=resample_method) if os.path.isfile(output_raster): return output_raster else: return False
def mask_by_elevation(map_raster_path, elevation_crop_path, threashold): # save mask result to current folder save_mask_result = io_function.get_name_by_adding_tail( os.path.basename(map_raster_path), 'DEMMask') if os.path.isfile(save_mask_result): print('warning, %s exists' % save_mask_result) return save_mask_result # read map_array_2d, nodata = raster_io.read_raster_one_band_np(map_raster_path) dem_array_2d, _ = raster_io.read_raster_one_band_np(elevation_crop_path) print(map_array_2d.shape) if map_array_2d.shape != dem_array_2d.shape: raise ValueError('size inconsistent: %s and %s' % (str(map_array_2d.shape), str(dem_array_2d.shape))) # mask out pixel with high elevation map_array_2d[dem_array_2d > threashold] = 0 if raster_io.save_numpy_array_to_rasterfile(map_array_2d, save_mask_result, map_raster_path, compress='lzw', tiled='Yes', bigtiff='if_safer'): return save_mask_result
def main(options, args): extent_shp = args[0] img_path = args[1] save_dir = options.save_dir #check projection extent_prj = map_projection.get_raster_or_vector_srs_info_proj4(extent_shp) img_prj = map_projection.get_raster_or_vector_srs_info_proj4(img_path) if img_prj != extent_prj: raise ValueError('Project of %s and %s is different' % (extent_shp, img_path)) out_img = io_function.get_name_by_adding_tail(img_path, 'sub') out_img = os.path.join(save_dir, os.path.basename(out_img)) extent_polys = vector_gpd.read_polygons_gpd(extent_shp) if len(extent_polys) != 1: raise ValueError('current only support one polygon') for ext_poly in extent_polys: subset_image_by_polygon_min(img_path, out_img, ext_poly, resample_m='bilinear', o_format='GTiff', out_res=None) pass
def test_zonal_stats_multiRasters(): shp = os.path.expanduser( '~/Data/Arctic/canada_arctic/Willow_River/Willow_River_Thaw_Slumps.shp' ) # save_shp = os.path.basename(io_function.get_name_by_adding_tail(shp,'raster_stats')) # a single DEM # dem_file_dir = os.path.expanduser('~/Data/Arctic/canada_arctic/DEM/WR_dem_ArcticDEM_mosaic') # dem_path = os.path.join(dem_file_dir,'WR_extent_2m_v3.0_ArcticTileDEM_sub_1_prj.tif') # dem patches dem_file_dir = os.path.expanduser( '~/Data/Arctic/canada_arctic/DEM/WR_dem_ArcticDEM_mosaic/dem_patches') dem_list = io_function.get_file_list_by_ext('.tif', dem_file_dir, bsub_folder=False) save_shp = os.path.basename( io_function.get_name_by_adding_tail(shp, 'multi_raster_stats')) io_function.copy_shape_file(shp, save_shp) zonal_stats_multiRasters(save_shp, dem_list, nodata=None, band=1, stats=None, prefix='dem', range=None, all_touched=True, process_num=4)
def segment_a_dem_diff(dem_diff_path, process_num, ele_diff_thr, min_area, max_area, job_id=0): basic.setlogfile('log_segment_dem_diff_job_%d.txt' % job_id) # find 8bit one tif_8bit = io_function.get_name_by_adding_tail(dem_diff_path, '8bit') demD_8bit = os.path.join(dem_common.grid_dem_diffs_8bit_dir, os.path.basename(tif_8bit)) if os.path.isfile(demD_8bit): print('error, 8bit DEM diff not exists: %s ' % demD_8bit) return False grid_id = int( re.findall('grid\d+', os.path.basename(dem_diff_path))[0][4:]) save_dir = os.path.join(dem_common.grid_dem_diffs_segment_dir, 'segment_result_grid%d' % grid_id) return segment_subsidence_grey_image(demD_8bit, dem_diff_path, save_dir, process_num, subsidence_thr_m=ele_diff_thr, min_area=min_area, max_area=max_area)
def check_coreg_results(dem_tif, save_dir): dem_align = os.path.join( save_dir, 'dem_coreg', os.path.basename(io_function.get_name_by_adding_tail(dem_tif, 'coreg'))) if os.path.isfile(dem_align): return True return False
def test_fill_holes_in_polygons_shp(): dir = os.path.expanduser( '~/Data/Arctic/canada_arctic/Willow_River/training_polygons') shp_path = os.path.join(dir, 'WR_training_polygons_v4.shp') output = io_function.get_name_by_adding_tail(shp_path, 'noholes') vector_gpd.fill_holes_in_polygons_shp(shp_path, output) basic.outputlogMessage('saving no hole polygons to %s' % output)
def test_dem_tif_to_8bit(): dem_diff_list = io_function.get_file_list_by_pattern('./','*.tif') count = len(dem_diff_list) for idx, tif in enumerate(dem_diff_list): print('%d/%d convert %s to 8 bit'%(idx+1, count, tif)) tif_8bit = io_function.get_name_by_adding_tail(tif, '8bit') output = os.path.join(grid_dem_diffs_8bit_dir, os.path.basename(tif_8bit)) dem_tif_to_8bit(tif,output)
def process_one_dem(idx, count, tif, product_list, arcticDEM_slope_dir, arcticDEM_slope_8bit_dir, arcticDEM_hillshade_dir, arcticDEM_tpi_8bit_dir): print('%d/%d convert %s to slope (8bit) and hillshade' % (idx + 1, count, tif)) try: slope_file = os.path.basename( io_function.get_name_by_adding_tail(tif, 'slope')) slope_file_bak = os.path.join(arcticDEM_slope_dir, os.path.basename(slope_file)) if 'slope' in product_list or 'slope_8bit' in product_list: slope_out = dem_to_slope(tif, slope_file, slope_file_bak) if slope_out is False: pass else: if 'slope_8bit' in product_list: slope_8bit = io_function.get_name_by_adding_tail( tif, 'slope8bit') slope_8bit = os.path.join(arcticDEM_slope_8bit_dir, os.path.basename(slope_8bit)) slope_to_8bit(slope_file, slope_8bit) # delete or move the slope file if 'slope' in product_list: io_function.move_file_to_dst(slope_file, slope_file_bak) else: io_function.delete_file_or_dir(slope_file) if 'hillshade' in product_list: hillshapde = io_function.get_name_by_adding_tail(tif, 'hillshade') hillshapde = os.path.join(arcticDEM_hillshade_dir, os.path.basename(hillshapde)) dem_to_hillshade(tif, hillshapde) if 'tpi' in product_list: tip_8bit = io_function.get_name_by_adding_tail(tif, 'TPI8bit') tip_8bit = os.path.join(arcticDEM_tpi_8bit_dir, os.path.basename(tip_8bit)) dem_to_tpi_save_8bit(tif, tip_8bit) return True except: print('failed in process %s' % tif) return tif
def move_files(save_dir, out_fig, out_hist_info): if os.path.isdir(save_dir) is False: io_function.mkdir(save_dir) trim_fig = io_function.get_name_by_adding_tail(out_fig, 'trim') os.system('convert -trim %s %s' % (out_fig, trim_fig)) io_function.movefiletodir(trim_fig, save_dir, overwrite=True) io_function.delete_file_or_dir(out_fig) # io_function.movefiletodir(out_fig,save_dir,overwrite=True) io_function.movefiletodir(out_hist_info, save_dir, overwrite=True)
def main(options, args): polygons_shp = args[0] output = options.output if output is None: output = io_function.get_name_by_adding_tail(polygons_shp, 'removed') para_file = options.para_file remove_polygons_main(polygons_shp, output, para_file)
def calculate_gully_information(gullies_shp): """ get Oriented minimum bounding box for the gully polygon shapefile, and update the shape information based on oriented minimum bounding box to the gullies_shp :param gullies_shp: input shapefile contains the gully polygons :return: True if successful, False Otherwise """ operation_obj = shape_opeation() output_shapeinfo = io_function.get_name_by_adding_tail( gullies_shp, 'shapeInfo') if os.path.isfile(output_shapeinfo) is False: operation_obj.get_polygon_shape_info(gullies_shp, output_shapeinfo) else: basic.outputlogMessage( 'warning, %s already exist, skip calculate shape feature' % output_shapeinfo) # put all feature to one shapefile # parameter 3 the same as parameter 1 to overwrite the input file # note: the area in here, is the area of the oriented minimum bounding box, not the area of polygon operation_obj.add_fields_shape(gullies_shp, output_shapeinfo, gullies_shp) # add width/height (suppose height greater than width) width_height_list = operation_obj.get_shape_records_value( gullies_shp, attributes=['WIDTH', 'HEIGHT']) ratio = [] for width_height in width_height_list: if width_height[0] > width_height[1]: r_value = width_height[1] / width_height[0] else: r_value = width_height[0] / width_height[1] ratio.append(r_value) operation_obj.add_one_field_records_to_shapefile(gullies_shp, ratio, 'ratio_w_h') # add perimeter/area perimeter_area_list = operation_obj.get_shape_records_value( gullies_shp, attributes=['INperimete', 'INarea']) if perimeter_area_list is False: return False ratio_p_a = [] for perimeter_area in perimeter_area_list: r_value = (perimeter_area[0])**2 / perimeter_area[1] ratio_p_a.append(r_value) operation_obj.add_one_field_records_to_shapefile(gullies_shp, ratio_p_a, 'ratio_p_a') # add circularity (4*pi*area/perimeter**2) which is similar to ratio_p_a circularity = [] for perimeter_area in perimeter_area_list: value = (4 * math.pi * perimeter_area[1] / perimeter_area[0]**2) circularity.append(value) operation_obj.add_one_field_records_to_shapefile(gullies_shp, circularity, 'circularity') return True
def one_dem_diff_to_8bit(demDiff_tif): if os.path.isdir(grid_dem_diffs_8bit_dir) is False: io_function.mkdir(grid_dem_diffs_8bit_dir) tif_8bit = io_function.get_name_by_adding_tail(demDiff_tif, '8bit') output = os.path.join(grid_dem_diffs_8bit_dir, os.path.basename(tif_8bit)) if dem_tif_to_8bit(demDiff_tif, output) is False: basic.outputlogMessage('failed to generate 8bit grey image from DEM differnce') return False return True
def main(options, args): polygons_shp = args[0] output = options.output if output is None: output = io_function.get_name_by_adding_tail(polygons_shp, 'noholes') vector_gpd.fill_holes_in_polygons_shp(polygons_shp, output) basic.outputlogMessage('saving no hole polygons to %s' % output)
def predict_one_image_mmseg(para_file, image_path, img_save_dir, inf_list_file, gpuid, trained_model): """ run prediction of one image """ expr_name = parameters.get_string_parameters(para_file, 'expr_name') network_ini = parameters.get_string_parameters(para_file, 'network_setting_ini') base_config_file = parameters.get_string_parameters( network_ini, 'base_config') config_file = osp.basename( io_function.get_name_by_adding_tail(base_config_file, expr_name)) inf_batch_size = parameters.get_digit_parameters(network_ini, 'inf_batch_size', 'int') patch_width = parameters.get_digit_parameters(para_file, 'inf_patch_width', 'int') patch_height = parameters.get_digit_parameters(para_file, 'inf_patch_height', 'int') adj_overlay_x = parameters.get_digit_parameters(para_file, 'inf_pixel_overlay_x', 'int') adj_overlay_y = parameters.get_digit_parameters(para_file, 'inf_pixel_overlay_y', 'int') done_indicator = '%s_done' % inf_list_file if os.path.isfile(done_indicator): basic.outputlogMessage('warning, %s exist, skip prediction' % done_indicator) return if os.path.isdir(img_save_dir) is False: io_function.mkdir(img_save_dir) # use a specific GPU for prediction, only inference one image time0 = time.time() if gpuid is None: gpuid = 0 predict_rsImage_mmseg(config_file, trained_model, image_path, img_save_dir, batch_size=inf_batch_size, gpuid=gpuid, tile_width=patch_width, tile_height=patch_height, overlay_x=adj_overlay_x, overlay_y=adj_overlay_y) duration = time.time() - time0 os.system( 'echo "$(date): time cost of inference for image in %s: %.2f seconds">>"time_cost.txt"' % (inf_list_file, duration)) # write a file to indicate that the prediction has done. os.system('echo %s > %s_done' % (inf_list_file, inf_list_file)) return
def surface_water_Houston(): ref_raster_up = os.path.join(data_dir, 'Houston/Houston_SAR_GRD_FLOAT_gee/S1_Houston_prj_8bit_select/S1A_IW_GRDH_1SDV_20170829T002645_20170829T002710_018131_01E74D_3220_prj_8bit.tif') ref_raster_down = os.path.join(data_dir, 'Houston/Houston_SAR_GRD_FLOAT_gee/S1_Houston_prj_8bit_select/S1A_IW_GRDH_1SDV_20170829T002620_20170829T002645_018131_01E74D_D734_prj_8bit.tif') # notes that: these is a tiny gap between ref_raster_up and ref_raster_down, around 30 m width, # end in a line in the figure of surface water. # think about using another way to crop surface water, get shapefile of these two, then edit the shapefile and use the shapefile for cropping # global surface water surface_water=os.path.join(sur_water_dir,'extent_epsg2163_houston/extent_100W_30_40N_v1_3_2020.tif') # resample and crop to the same resolution and extent output_raster_up = io_function.get_name_by_adding_tail(os.path.basename(surface_water),'res_sub_up') surface_water_crop = resample_crop_raster(ref_raster_up, surface_water,output_raster=output_raster_up) # mask nodata regions out_nodata = 128 # because 255 is used to indicate ocean, so set 128 as nodata out_nodata_raster_up = io_function.get_name_by_adding_tail(output_raster_up,'nodataMask') mask_nodata_regions_surface_water(ref_raster_up, surface_water_crop, out_nodata_raster_up, ref_nodata=0, out_nodata=out_nodata) # resample and crop to the same resolution and extent output_raster_down = io_function.get_name_by_adding_tail(os.path.basename(surface_water),'res_sub_down') surface_water_crop = resample_crop_raster(ref_raster_down, surface_water,output_raster=output_raster_down) # mask nodata regions out_nodata_raster_down = io_function.get_name_by_adding_tail(output_raster_down,'nodataMask') mask_nodata_regions_surface_water(ref_raster_down, surface_water_crop, out_nodata_raster_down, ref_nodata=0, out_nodata=out_nodata) # merge these two? output_raster_nodata_mask = io_function.get_name_by_adding_tail(os.path.basename(surface_water), 'res_sub_nodata') command_str = 'gdal_merge.py -o %s -n %d -a_nodata %d -co compress=lzw %s %s'%(output_raster_nodata_mask,out_nodata,out_nodata, out_nodata_raster_up,out_nodata_raster_down) res = os.system(command_str) if res != 0: sys.exit(1) # remove the nodata, allow QGIS to custermize # raster_io.remove_nodata_from_raster_metadata(output_raster_nodata_mask) return True
def get_dem_diff_8bit(dem_diff_path): # find 8bit one tif_8bit = io_function.get_name_by_adding_tail(dem_diff_path, '8bit') demD_8bit= os.path.join(dem_common.grid_dem_diffs_8bit_dir, os.path.basename(tif_8bit)) # if not exist, try to generate it if os.path.isfile(demD_8bit) is False: if one_dem_diff_to_8bit(dem_diff_path) is False: basic.outputlogMessage('error, 8bit DEM diff not exists: %s '%demD_8bit) return None return demD_8bit
def main(options, args): input_shp = args[0] output_raster = args[1] if io_function.is_file_exist(input_shp) is False: return False all_class_raster = io_function.get_name_by_adding_tail( output_raster, 'AllClass') num_class = parameters.get_digit_parameters(options.para_file, 'NUM_CLASSES_noBG', None, 'int') if convert_training_examples_from_shp_to_raster(input_shp, all_class_raster) is False: basic.outputlogMessage( "Producing the label images from training polygons is Falild") return False else: basic.outputlogMessage( "Done: Producing the label images from training polygons, output: %s" % all_class_raster) if num_class == 1: #only keep target (gully or others) label one_class_raster = io_function.get_name_by_adding_tail( output_raster, 'oneClass') if only_keep_one_class( all_class_raster, one_class_raster, class_index=1) is False: return False else: one_class_raster = all_class_raster # crop the label image to have the same 2D dimension with the training images baseimage = parameters.get_input_image_path() if RSImageProcess.subset_image_baseimage(output_raster, one_class_raster, baseimage) is False: basic.outputlogMessage("Error: subset_image_baseimage Failed") return False return True
def save_planet_images_to_shapefile(geojson_list, save_shp_path, wkt_string, extent_polygon=None, b_group_date=False): ''' get the meta data and extent of download :param geojson_list: geojson_list :param save_shp_path: :param extent_polygon: a extent polygon :param b_group_date: :return: ''' # remove incomplete scenes geojson_list = [ item for item in geojson_list if 'incomplete_scenes' not in item ] if len(geojson_list) < 1: raise ValueError('No geojson files (exclude incomplete_scenes) the given folder') if extent_polygon is not None and len(extent_polygon) > 1: raise ValueError('Only support one extent polygon') extent = extent_polygon[0] if b_group_date is False: geojson_group = {'all': geojson_list} else: geojson_group = group_geojson_by_date(geojson_list) for key in geojson_group.keys(): sub_geojsons = geojson_group[key] if len(sub_geojsons) < 1: continue sel_geojson_list, sel_polygons = get_geojson_list_overlap_a_polygon(extent,sub_geojsons) if len(sel_geojson_list) < 1: continue scene_table, scene_without_asset = get_meta_dict(sel_geojson_list) if len(scene_table['scene_id']) != len(sel_polygons): raise ValueError('The count of scence ID and polygon are different, could due to some duplicated scenes ') # to strings, ESRI Shapefile does not support datetime fields scene_table['acquisitionDate'] = [ timeTools.datetime2str(item) for item in scene_table['acquisitionDate']] scene_table['downloadTime'] = [ timeTools.datetime2str(item) for item in scene_table['downloadTime']] scene_table['Polygons'] = sel_polygons df = pd.DataFrame(scene_table) if key=="all": save_path = save_shp_path else: date_str = timeTools.date2str(key) save_path = io_function.get_name_by_adding_tail(save_shp_path,date_str) vector_gpd.save_polygons_to_files(df,'Polygons', wkt_string, save_path) return True
def create_new_region_defined_parafile(template_para_file, img_dir, area_remark=None): ''' create a new region defined para file. Only defined the new images (did not change others) :param template_para_file: :param img_dir: :param area_remark: :return: ''' io_function.is_file_exist(template_para_file) dir_base = os.path.basename(img_dir) date_strs = re.findall('\d{8}', dir_base) if len(date_strs) == 1: date = date_strs[0] else: date = 'unknown' new_para_file = io_function.get_name_by_adding_tail( template_para_file, date + '_' + area_remark) new_para_file = os.path.basename(new_para_file) # save to current folder if os.path.isfile(new_para_file): raise IOError('%s already exists, please check or remove first' % new_para_file) # copy the file io_function.copy_file_to_dst(template_para_file, new_para_file) if area_remark is not None: modify_parameter(new_para_file, 'area_remark', area_remark) if date != 'unknown': modify_parameter(new_para_file, 'area_time', date) modify_parameter(new_para_file, 'input_image_dir', img_dir) modify_parameter(new_para_file, 'inf_image_dir', img_dir) tif_list = io_function.get_file_list_by_ext('.tif', img_dir, bsub_folder=False) if len(tif_list) < 1: raise ValueError('No tif in %s' % img_dir) if len(tif_list) == 1: modify_parameter(new_para_file, 'input_image_or_pattern', os.path.basename(tif_list[0])) modify_parameter(new_para_file, 'inf_image_or_pattern', os.path.basename(tif_list[0])) else: modify_parameter(new_para_file, 'input_image_or_pattern', '*.tif') modify_parameter(new_para_file, 'inf_image_or_pattern', '*.tif') print("modified and saved new parameter file: %s " % new_para_file) return new_para_file
def add_raster_info_from_bufferArea(polygons_shp, raster_file, raster_name, b_buffer_size): """ calculate the raster information such elevation, then add toeach polygon Args: polygons_shp: input shapfe file raster_file: raster file, should have the same projection of shapefile raster_name: the name of raster, should less than four letters, will be used as part of the attribute name b_buffer_size: the size of buffer area in meters Returns: True if successful, False Otherwise """ if io_function.is_file_exist(polygons_shp) is False: return False if io_function.is_file_exist(raster_file) is False: return False operation_obj = shape_opeation() ## calculate the topography information from the buffer area basic.outputlogMessage( "info: calculate the raster information from the buffer area") buffer_polygon_shp = io_function.get_name_by_adding_tail( polygons_shp, 'buffer') # if os.path.isfile(buffer_polygon_shp) is False: if vector_features.get_buffer_polygons(polygons_shp, buffer_polygon_shp, b_buffer_size) is False: raise IOError("error, failed in producing the buffer_polygon_shp") # replace the polygon shape file polygons_shp_backup = polygons_shp polygons_shp = buffer_polygon_shp # all_touched: bool, optional # Whether to include every raster cell touched by a geometry, or only # those having a center point within the polygon. # defaults to `False` # Since the dem usually is coarser, so we set all_touched = True all_touched = True stats_list = ['min', 'max', 'mean', 'std'] #['min', 'max', 'mean', 'count','median','std'] if operation_obj.add_fields_from_raster(polygons_shp, raster_file, raster_name, band=1, stats_list=stats_list, all_touched=all_touched) is False: return False # copy the information to the original shape file operation_obj.add_fields_shape(polygons_shp_backup, buffer_polygon_shp, polygons_shp_backup) return True
def test_select_polygons_overlap_each_other(): yolo_box_shp = os.path.expanduser( '~/Data/Arctic/alaska/autoMapping/alaskaNS_yolov4_4/result_backup/' 'alaNS_hillshadeHWline_2008to2017_alaskaNS_yolov4_4_exp4_1/alaNS_hillshadeHWline_2008to2017_alaskaNS_yolov4_4_exp4_post_1.shp' ) dem_subsidence_shp = os.path.expanduser( '~/Data/dem_processing/grid_dem_diffs_segment_subsidence.gpkg') save_path = io_function.get_name_by_adding_tail(yolo_box_shp, 'select') select_polygons_overlap_others_in_group2(yolo_box_shp, dem_subsidence_shp, save_path, process_num=4)