def subset_image_by_polygon_box(in_img, out_img, polygon, resample_m='bilinear', o_format='GTiff', out_res=None, same_extent=False): if same_extent: return RSImageProcess.subset_image_by_polygon_box( out_img, in_img, polygon, resample_m=resample_m, o_format=o_format, xres=out_res, yres=out_res) else: # crop to the min extent (polygon or the image) return RSImageProcess.subset_image_by_polygon_box_image_min( out_img, in_img, polygon, resample_m=resample_m, o_format=o_format, xres=out_res, yres=out_res)
def resample_crop_raster(ref_raster, input_raster, output_raster=None, resample_method='near'): if output_raster is None: output_raster = io_function.get_name_by_adding_tail( os.path.basename(input_raster), 'res_sub') if os.path.isfile(output_raster): print('Warning, %s exists' % output_raster) return output_raster # check projection prj4_ref = map_projection.get_raster_or_vector_srs_info_proj4(ref_raster) prj4_input = map_projection.get_raster_or_vector_srs_info_proj4( input_raster) if prj4_ref != prj4_input: raise ValueError('projection inconsistent: %s and %s' % (ref_raster, input_raster)) # crop RSImageProcess.subset_image_baseimage(output_raster, input_raster, ref_raster, same_res=True, resample_m=resample_method) if os.path.isfile(output_raster): return output_raster else: return False
def subset_image_by_polygon_box(in_img, out_img, polygon,resample_m='bilinear',o_format='GTiff', out_res=None,same_extent=False, thread_num=1): if same_extent: return RSImageProcess.subset_image_by_polygon_box(out_img,in_img,polygon,resample_m=resample_m, o_format=o_format, xres=out_res,yres=out_res,compress='lzw', tiled='yes', bigtiff='if_safer',thread_num=thread_num) else: # crop to the min extent (polygon or the image) return RSImageProcess.subset_image_by_polygon_box_image_min(out_img,in_img,polygon,resample_m=resample_m,o_format=o_format, xres=out_res,yres=out_res,compress='lzw', tiled='yes', bigtiff='if_safer',thread_num=thread_num)
def process_arcticDEM_tiles(tar_list, save_dir, inter_format, resample_method, o_res, extent_poly, extent_id, pre_name, b_rm_inter=True): ''' process the mosaic (not multi-temporal) version of ArcticDEM :param tar_list: :param save_dir: :param inter_format: :param resample_method: :param o_res: output resolution :param extent_poly: extent polygons, in the same projection of ArcticDEM :param extent_id: extent id :param pre_name: :param b_rm_inter: :return: ''' # unpackage and crop to extent dem_tif_list, tar_folders = process_dem_tarball(tar_list, save_dir, inter_format, o_res, extent_poly=extent_poly, poly_id=extent_id, process_num=4) if len(dem_tif_list) < 1: raise ValueError('No DEM extracted from tarballs') dem_name = os.path.basename(tar_folders[0])[-7:] save_path = os.path.join( save_dir, pre_name + '_' + dem_name + '_ArcticTileDEM_sub_%d.tif' % extent_id) RSImageProcess.mosaic_crop_images_gdalwarp( dem_tif_list, save_path, resampling_method=resample_method, o_format=inter_format, xres=o_res, yres=o_res, compress='lzw', tiled='yes', bigtiff='if_safer') # remove intermediate files if b_rm_inter: basic.outputlogMessage('remove intermediate files') for folder in tar_folders: io_function.delete_file_or_dir(folder) return True
def main(options, args): ref_image = args[0] new_image = args[1] if options.output is not None: output = options.output else: output = io_function.get_name_by_adding_tail(new_image,'coreg') bkeepmidfile = True xml_path=os.path.splitext(output)[0]+'.xml' coreg_xml = OffsetMetaDataClass(xml_path) RSImageProcess.coregistration_siftGPU(ref_image,new_image,bkeepmidfile,coreg_xml)
def mosaic_dem_list_gdal_merge(key, dem_list, save_tif_dir,save_source): # Use gdal_merge.py to create a mosaic, In areas of overlap, the last image will be copied over earlier ones. save_mosaic = os.path.join(save_tif_dir, key + '.tif') b_save_mosaic = io_function.is_file_exist_subfolder(save_tif_dir, key + '.tif') if b_save_mosaic is not False: basic.outputlogMessage('warning, mosaic file: %s exist, skip' % save_mosaic) return save_mosaic # save the source file for producing the mosaic if save_source: save_mosaic_source_txt = os.path.join(save_tif_dir, key + '_src.txt') io_function.save_list_to_txt(save_mosaic_source_txt, dem_list) # if only one dem, then copy it if it's not VRT format if len(dem_list) == 1: if raster_io.get_driver_format(dem_list[0]) != 'VRT': io_function.copy_file_to_dst(dem_list[0], save_mosaic) return save_mosaic nodata = raster_io.get_nodata(dem_list[0]) # create mosaic, can handle only input one file, but is slow result = RSImageProcess.mosaics_images(dem_list,save_mosaic,nodata=nodata, compress='lzw', tiled='yes', bigtiff='if_safer') if result is False: sys.exit(1) # return False return save_mosaic
def create_a_mosaic(pre_name, extent_id, save_dir, extent_poly, tile_list): # create mosaic tif_list = [] for idx, tile_num in enumerate(tile_list): dem_name = tile_num + '_' + res + '_v3.0' tiff_name = dem_name + '_reg_dem.tif' tiff_path = os.path.join(dem_tif_dir, tiff_name) if os.path.isfile(tiff_path) is False: raise ValueError('%s not exists' % tiff_path) tif_list.append(tiff_path) thread_num = 8 output_mosaic = os.path.join( save_dir, pre_name + '_ArcticDEM_mosaic_%d' % extent_id + '.tif') # create mosaic, can handle only input one file, but is slow if os.path.isfile(output_mosaic) is False: result = RSImageProcess.mosaic_crop_images_gdalwarp( tif_list, output_mosaic, resampling_method='average', o_format='GTiff', compress='lzw', tiled='yes', bigtiff='if_safer', thread_num=thread_num) else: print('mosaic: %s exist, skip' % output_mosaic) # crop output_crop = os.path.join( save_dir, pre_name + '_ArcticDEM_mosaic_%d_crop' % extent_id + '.tif') if os.path.isfile(output_crop) is False: RSImageProcess.subset_image_by_polygon_box_image_min( output_crop, output_mosaic, extent_poly, resample_m='average', o_format='GTiff', xres=32, yres=32, compress='lzw', tiled='yes', bigtiff='if_safer', thread_num=thread_num) else: print('Crop: %s exist, skip' % output_crop)
def subset_image_by_ref_image(in_img, out_img, base_image, resample_m='bilinear', o_format='GTiff'): return RSImageProcess.subset_image_baseimage(out_img, in_img, base_image, same_res=True, resample_m=resample_m)
def resample_crop_raster_using_shp(ref_shp, input_raster, output_raster=None, resample_method='near', save_dir='./', out_res=10, dst_nondata=128): if output_raster is None: output_raster = io_function.get_name_by_adding_tail( os.path.basename(input_raster), 'res_sub') output_raster = os.path.join(save_dir, output_raster) # check projection prj4_ref = map_projection.get_raster_or_vector_srs_info_proj4(ref_shp) prj4_input = map_projection.get_raster_or_vector_srs_info_proj4( input_raster) if prj4_ref != prj4_input: raise ValueError('projection inconsistent: %s and %s' % (ref_shp, input_raster)) if os.path.isfile(output_raster): print('Warning, %s exists' % output_raster) return output_raster # crop # RSImageProcess.subset_image_baseimage(output_raster, input_raster, ref_raster, same_res=True,resample_m=resample_method) RSImageProcess.subset_image_by_shapefile(input_raster, ref_shp, save_path=output_raster, dst_nondata=dst_nondata, resample_m=resample_method, xres=out_res, yres=out_res, compress='lzw', tiled='yes', bigtiff='IF_SAFER') if os.path.isfile(output_raster): return output_raster else: return False
def main(options, args): input_shp = args[0] output_raster = args[1] if io_function.is_file_exist(input_shp) is False: return False all_class_raster = io_function.get_name_by_adding_tail( output_raster, 'AllClass') num_class = parameters.get_digit_parameters(options.para_file, 'NUM_CLASSES_noBG', None, 'int') if convert_training_examples_from_shp_to_raster(input_shp, all_class_raster) is False: basic.outputlogMessage( "Producing the label images from training polygons is Falild") return False else: basic.outputlogMessage( "Done: Producing the label images from training polygons, output: %s" % all_class_raster) if num_class == 1: #only keep target (gully or others) label one_class_raster = io_function.get_name_by_adding_tail( output_raster, 'oneClass') if only_keep_one_class( all_class_raster, one_class_raster, class_index=1) is False: return False else: one_class_raster = all_class_raster # crop the label image to have the same 2D dimension with the training images baseimage = parameters.get_input_image_path() if RSImageProcess.subset_image_baseimage(output_raster, one_class_raster, baseimage) is False: basic.outputlogMessage("Error: subset_image_baseimage Failed") return False return True
def mosaic_dem_list(key, dem_list, save_tif_dir, resample_method, save_source, o_format): save_mosaic = os.path.join(save_tif_dir, key + '.tif') # check file existence # if os.path.isfile(save_mosaic): b_save_mosaic = io_function.is_file_exist_subfolder( save_tif_dir, key + '.tif') if b_save_mosaic is not False: basic.outputlogMessage('warning, mosaic file: %s exist, skip' % b_save_mosaic) return b_save_mosaic # mosaic_list.append(b_save_mosaic) # continue # save the source file for producing the mosaic if save_source: save_mosaic_source_txt = os.path.join(save_tif_dir, key + '_src.txt') io_function.save_list_to_txt(save_mosaic_source_txt, dem_list) # if len(demTif_groups[key]) == 1: # io_function.copy_file_to_dst(demTif_groups[key][0],save_mosaic) # else: # # RSImageProcess.mosaics_images(dem_groups[key],save_mosaic) # RSImageProcess.mosaic_crop_images_gdalwarp(demTif_groups[key],save_mosaic,resampling_method=resample_method,o_format=o_format) # create mosaic, can handle only input one file result = RSImageProcess.mosaic_crop_images_gdalwarp( dem_list, save_mosaic, resampling_method=resample_method, o_format=o_format, compress='lzw', tiled='yes', bigtiff='if_safer') if result is False: return False return save_mosaic
def mosaic_dem_list(key, dem_list, save_tif_dir,resample_method,save_source, o_format, thread_num=1): # print('\n\n os.fork \n\n', os.fork()) # if os.fork()==0: # proc_id = multiprocessing.current_process().pid # basic.setlogfile('log_file_pid_%d.txt'%proc_id) save_mosaic = os.path.join(save_tif_dir, key + '.tif') # check file existence # if os.path.isfile(save_mosaic): b_save_mosaic = io_function.is_file_exist_subfolder(save_tif_dir, key + '.tif') if b_save_mosaic is not False: basic.outputlogMessage('warning, mosaic file: %s exist, skip' % save_mosaic) return save_mosaic # mosaic_list.append(b_save_mosaic) # continue # save the source file for producing the mosaic if save_source: save_mosaic_source_txt = os.path.join(save_tif_dir, key + '_src.txt') io_function.save_list_to_txt(save_mosaic_source_txt, dem_list) # if only one dem, then copy it if it's not VRT format if len(dem_list) == 1: if raster_io.get_driver_format(dem_list[0]) != 'VRT': io_function.copy_file_to_dst(dem_list[0], save_mosaic) return save_mosaic # create mosaic, can handle only input one file, but is slow result = RSImageProcess.mosaic_crop_images_gdalwarp(dem_list, save_mosaic, resampling_method=resample_method, o_format=o_format, compress='lzw', tiled='yes', bigtiff='if_safer',thread_num=thread_num) if result is False: sys.exit(1) # return False return save_mosaic
def crop_mosaic_reproject_dem_diff(grid_dem_tifs, pre_name, extent_poly, o_res, new_prj, b_mosaic=False): # crop crop_tif_list = [] # crop to the same extent crop_tif_dir = os.path.join('dem_diff_crop') if os.path.isdir(crop_tif_dir) is False: io_function.mkdir(crop_tif_dir) for tif in grid_dem_tifs: save_crop_path = os.path.join( crop_tif_dir, os.path.basename(io_function.get_name_by_adding_tail(tif, 'sub'))) if os.path.isfile(save_crop_path): basic.outputlogMessage('%s exists, skip cropping' % save_crop_path) crop_tif_list.append(save_crop_path) else: crop_tif = subset_image_by_polygon_box(tif, save_crop_path, extent_poly, resample_m='near', o_format='VRT', out_res=o_res, same_extent=True, thread_num=2) if crop_tif is False: raise ValueError('warning, crop %s failed' % tif) crop_tif_list.append(crop_tif) grid_dem_tifs = crop_tif_list # mosaic if b_mosaic: save_mosaic = pre_name + '_DEM_diff.tif' resample_method = 'average' # create mosaic, can handle only input one file, but is slow result = RSImageProcess.mosaic_crop_images_gdalwarp( grid_dem_tifs, save_mosaic, resampling_method=resample_method, o_format='Gtiff', compress='lzw', tiled='yes', bigtiff='if_safer', thread_num=2) if result is False: sys.exit(1) grid_dem_tifs = [save_mosaic] # reproject for tif in grid_dem_tifs: t_file = io_function.get_name_by_adding_tail(tif, 'prj') map_projection.transforms_raster_srs(tif, new_prj, t_file, o_res, o_res, resample_m='bilinear', o_format='GTiff', compress='lzw', tiled='yes', bigtiff='if_safer') pass
def main(options, args): if options.s_width is None: patch_width = 1024 else: patch_width = int(options.s_width) if options.s_height is None: patch_height = 1024 else: patch_height = int(options.s_width) if options.out_dir is None: out_dir = "extract_dir" else: out_dir = options.out_dir if options.dstnodata is None: dstnodata = 255 else: dstnodata = options.dstnodata bSub_rect = options.rectangle if os.path.isdir(out_dir) is False: os.makedirs(out_dir) buffer_size = 10 # buffer size is 10 meters (in the projection) if options.bufferSize is not None: buffer_size = options.bufferSize shp_path = args[0] image_path = args[1] # get polygons polygons = get_polygons(shp_path) class_int = get_polygon_class(shp_path) class_str_list = ["class_" + str(item) for item in class_int ] #e.g., class_0 is non-gully, class_1 is gully # buffer polygons (dilation) poly_geos = [ vector_features.shape_from_pyshp_to_shapely(pyshp_polygon) for pyshp_polygon in polygons ] poly_geos_buffer = [ shapely_obj.buffer(buffer_size) for shapely_obj in poly_geos ] #save each polygon to the folder poly_pyshp = [ vector_features.shape_from_shapely_to_pyshp(item) for item in poly_geos_buffer ] polygon_files = save_polygons_to_shp(poly_pyshp, shp_path, out_dir) # print (polygon_files) # subset image based on polygon save_id = 0 for polygon, class_str in zip(polygon_files, class_str_list): Outfilename = os.path.join( out_dir, os.path.splitext(os.path.basename(image_path))[0] + '_' + str(save_id) + '_' + class_str + '.tif') if bSub_rect is True: extent = get_layer_extent(polygon) RSImageProcess.subset_image_projwin(Outfilename, image_path, extent[0], extent[3], extent[2], extent[1], dst_nondata=dstnodata) else: RSImageProcess.subset_image_by_shapefile(image_path, polygon, Outfilename, True) save_id += 1 pass
def mask_dem_by_surface_water(crop_dem_list, extent_poly, extent_id, crop_tif_dir, o_res, process_num): # get list of the ArcticDEM mosaic water_mask_tifs = io_function.get_file_list_by_ext('.tif',mask_water_dir,bsub_folder=False) water_mask_ext_polys = get_dem_tif_ext_polygons(water_mask_tifs) overlap_index = vector_gpd.get_poly_index_within_extent(water_mask_ext_polys,extent_poly) #### crop and mosaic water mask sub_mosaic_dem_tifs = [water_mask_tifs[item] for item in overlap_index] water_mask_crop_tif_list = [] for tif in sub_mosaic_dem_tifs: save_crop_path = os.path.join(crop_tif_dir, os.path.basename(io_function.get_name_by_adding_tail(tif, 'sub_poly_%d' % extent_id)) ) if os.path.isfile(save_crop_path): basic.outputlogMessage('%s exists, skip' % save_crop_path) water_mask_crop_tif_list.append(save_crop_path) else: crop_tif = subset_image_by_polygon_box(tif, save_crop_path, extent_poly, resample_m='near', o_format='VRT',out_res=o_res, same_extent=True,thread_num=process_num) # if crop_tif is False: raise ValueError('warning, crop %s failed' % tif) water_mask_crop_tif_list.append(crop_tif) if len(water_mask_crop_tif_list) < 1: basic.outputlogMessage('No water mask for %d grid'%extent_id) save_id_grid_no_watermask(extent_id) return None # create mosaic, can handle only input one file, but is slow save_water_mask_mosaic = os.path.join(crop_tif_dir, 'global_surface_water_grid%d.tif'%extent_id) result = RSImageProcess.mosaic_crop_images_gdalwarp(water_mask_crop_tif_list, save_water_mask_mosaic, resampling_method='average',o_format='GTiff', compress='lzw', tiled='yes', bigtiff='if_safer',thread_num=process_num) if result is False: return False # because the resolution of dem and water mask is different, so we polygonize the watermask, then burn into the dem water_mask_shp = os.path.join(crop_tif_dir, 'global_surface_water_grid%d.shp'%extent_id) if os.path.isfile(water_mask_shp): basic.outputlogMessage('%s exists, skip cropping' % water_mask_shp) else: # set 0 as nodata if raster_io.set_nodata_to_raster_metadata(save_water_mask_mosaic,0) is False: return False if vector_gpd.raster2shapefile(save_water_mask_mosaic,water_mask_shp,connect8=True) is None: return False # masking the strip version of DEMs mask_dem_list = [] for idx, strip_dem in enumerate(crop_dem_list): save_path = io_function.get_name_by_adding_tail(strip_dem, 'maskWater') if os.path.isfile(save_path): basic.outputlogMessage('%s exist, skip'%save_path) mask_dem_list.append(save_path) continue io_function.copy_file_to_dst(strip_dem,save_path,overwrite=True) nodata = raster_io.get_nodata(save_path) if raster_io.burn_polygon_to_raster_oneband(save_path,water_mask_shp,nodata) is False: continue mask_dem_list.append(save_path) return mask_dem_list
def mask_strip_dem_outlier_by_ArcticDEM_mosaic(crop_strip_dem_list, extent_poly, extent_id, crop_tif_dir, o_res, process_num): # get list of the ArcticDEM mosaic arcticDEM_mosaic_reg_tifs = io_function.get_file_list_by_ext('.tif',arcticDEM_tile_reg_tif_dir,bsub_folder=False) mosaic_dem_ext_polys = get_dem_tif_ext_polygons(arcticDEM_mosaic_reg_tifs) overlap_index = vector_gpd.get_poly_index_within_extent(mosaic_dem_ext_polys,extent_poly) #### crop and mosaic mosaic_reg_tifs sub_mosaic_dem_tifs = [arcticDEM_mosaic_reg_tifs[item] for item in overlap_index] mosaic_crop_tif_list = [] for tif in sub_mosaic_dem_tifs: save_crop_path = os.path.join(crop_tif_dir, os.path.basename(io_function.get_name_by_adding_tail(tif, 'sub_poly_%d' % extent_id)) ) if os.path.isfile(save_crop_path): basic.outputlogMessage('%s exists, skip cropping' % save_crop_path) mosaic_crop_tif_list.append(save_crop_path) else: crop_tif = subset_image_by_polygon_box(tif, save_crop_path, extent_poly, resample_m='near', o_format='VRT', out_res=o_res,same_extent=True,thread_num=process_num) if crop_tif is False: raise ValueError('warning, crop %s failed' % tif) mosaic_crop_tif_list.append(crop_tif) if len(mosaic_crop_tif_list) < 1: basic.outputlogMessage('No mosaic version of ArcticDEM for %d grid, skip mask_strip_dem_outlier_by_ArcticDEM_mosaic'%extent_id) return False # create mosaic, can handle only input one file, but is slow save_dem_mosaic = os.path.join(crop_tif_dir, 'ArcticDEM_tiles_grid%d.tif'%extent_id) result = RSImageProcess.mosaic_crop_images_gdalwarp(mosaic_crop_tif_list, save_dem_mosaic, resampling_method='average',o_format='GTiff', compress='lzw', tiled='yes', bigtiff='if_safer',thread_num=process_num) if result is False: return False height_tileDEM, width_tileDEM, count_tileDEM, dtype_tileDEM = raster_io.get_height_width_bandnum_dtype(save_dem_mosaic) tileDEM_data, tileDEM_nodata = raster_io.read_raster_one_band_np(save_dem_mosaic) # masking the strip version of DEMs mask_strip_dem_list = [] for idx, strip_dem in enumerate(crop_strip_dem_list): save_path = io_function.get_name_by_adding_tail(strip_dem, 'maskOutlier') if os.path.isfile(save_path): basic.outputlogMessage('%s exist, skip'%save_path) mask_strip_dem_list.append(save_path) continue # check band, with, height height, width, count, dtype = raster_io.get_height_width_bandnum_dtype(strip_dem) if height_tileDEM != height or width_tileDEM != width or count_tileDEM != count: raise ValueError('size different between %s and %s' % (strip_dem, save_dem_mosaic)) if count != 1: raise ValueError('DEM and Matchtag should only have one band') try: dem_data, nodata = raster_io.read_raster_one_band_np(strip_dem) except: basic.outputlogMessage(' invalid tif file: %s'%strip_dem) continue nodata_loc = np.where(dem_data == nodata) diff = dem_data - tileDEM_data # mask as nodata dem_data[np.abs(diff) > 50 ] = nodata # ignore greater than 50 m dem_data[ nodata_loc ] = nodata # may change some nodata pixel, change them back # save to file raster_io.save_numpy_array_to_rasterfile(dem_data, save_path, strip_dem, compress='lzw', tiled='yes', bigtiff='if_safer') mask_strip_dem_list.append(save_path) return mask_strip_dem_list
# remove the files with dark area greater than 10% with rasterio.open(tif_img) as img_obj: # read the first band # indexes = img_obj.indexes # print(indexes) data_band1 = img_obj.read(1) # print(data_band1.shape) width, height = data_band1.shape # dark area are pixel value smaller than 3 index_zeros = np.where(data_band1 < 3) # num_non_zero = np.count_nonzero(data_band1) # if num_non_zero != 16380: # print(tif_img) # print(num_non_zero) zeros_per = len(index_zeros[0]) / float(width * height) # print(tif_img) if zeros_per > 0.1: # remove this file print(zeros_per) print('remove image patch:', tif_img) io_function.movefiletodir(tif_img, rm_dark_img_dir) continue # use the same name of tif file output = os.path.basename(tif_img) if RSImageProcess.subset_image_baseimage(output, org_img, tif_img) is False: break
def create_moasic_of_each_grid_polygon(id, polygon, polygon_latlon, out_res, cloud_cover_thr, geojson_list, save_dir, new_prj_wkt=None, new_prj_proj4=None, sr_min=0, sr_max=3000, to_rgb=True, nodata=0, save_org_dir=None, resampling_method='min'): ''' create mosaic for Planet images within a grid :param polygon: :param polygon_latlon: :param out_res: :param cloud_cover_thr: :param geojson_list: :param save_dir: :param new_prj_wkt: :param new_prj_proj4: :param sr_min: :param sr_max: :param to_rgb: :param nodata: :return: ''' time0 = time.time() file_name = os.path.basename(save_dir) fin_out = os.path.join(save_dir, file_name + '_sub_%d.tif' % id) if os.path.isfile(fin_out): basic.outputlogMessage( 'Warning, skip %s because it already exists, remove it if want to regenerate it' % fin_out) return fin_out # get image list and cloud cover planet_img_list, cloud_covers = get_Planet_SR_image_list_overlap_a_polygon( polygon_latlon, geojson_list, cloud_cover_thr) if len(planet_img_list) < 1: basic.outputlogMessage('warning, no images within %d grid' % id) return False io_function.mkdir(save_dir) print('images and their cloud cover for %dth grid:' % id) for img, cloud_cover in zip(planet_img_list, cloud_covers): print(img, cloud_cover) proc_id = multiprocessing.current_process().pid # convert to RGB images (for Planet) rgb_image_list = [] rgb_dir = 'RGB_images_' + str(proc_id) if to_rgb: for tif_path in planet_img_list: rgb_img = convert_planet_to_rgb_images(tif_path, save_dir=rgb_dir, save_org_dir=save_org_dir, sr_min=sr_min, sr_max=sr_max) rgb_image_list.append(rgb_img) if len(rgb_image_list) > 0: planet_img_list = rgb_image_list reproj_img_list = [] # reproject if necessary reproj_dir = 'planet_images_reproj_' + str(proc_id) if new_prj_wkt != None and new_prj_proj4 != None: for tif_path in planet_img_list: prj_out = reproject_planet_image(tif_path, new_prj_wkt, new_prj_proj4, save_dir=reproj_dir) # replace the image if prj_out is not False and os.path.isfile(prj_out): reproj_img_list.append(prj_out) else: # if not reproject, then append the original image. reproj_img_list.append(tif_path) if len(reproj_img_list) > 0: planet_img_list = reproj_img_list # create mosaic using gdal_merge.py # because in gdal_merge.py, a later image will replace one, so we put image with largest cloud cover first out = os.path.join(save_dir, file_name + '_sub_%d_tmp.tif' % id) if os.path.isfile(out): io_function.delete_file_or_dir(out) # reverse=True to make it in descending order img_cloud_list = [ (img_path, cloud) for cloud, img_path in sorted(zip(cloud_covers, planet_img_list), key=lambda pair: pair[0], reverse=True) ] # for checking print('Image and its cloud after sorting:') for (img_path, cloud) in img_cloud_list: print(img_path, cloud) tifs = [img_path for (img_path, cloud) in img_cloud_list] tifs_str = ' '.join(tifs) # cmd_str = 'gdal_merge.py -o %s -n %d -init %d -ps %d %d %s'%(out,nodata,nodata,out_res,out_res,tifs_str) cmd_str = 'gdalbuildvrt -resolution user -tr %d %d -srcnodata %d -vrtnodata %d %s %s' % ( out_res, out_res, nodata, nodata, out, tifs_str) status, result = basic.exec_command_string(cmd_str) if status != 0: print(result) sys.exit(status) # # # polygon.exterior.coords # minx, miny, maxx, maxy = polygon.bounds # (minx, miny, maxx, maxy) # print(minx, miny, maxx, maxy) # results = RSImageProcess.subset_image_projwin(fin_out,out,minx, maxy, maxx, miny, xres=out_res,yres=out_res) # print(results) results = RSImageProcess.subset_image_by_polygon_box_image_min( fin_out, out, polygon, xres=out_res, yres=out_res, compress='lzw', tiled='yes', bigtiff='if_safer') if results is False: basic.outputlogMessage( 'Warning, Crop %s failed, keep the one without cropping' % out) io_function.move_file_to_dst(out, fin_out) else: io_function.delete_file_or_dir(out) # ## mosaic and crop at the same time together # minx, miny, maxx, maxy = polygon.bounds # (minx, miny, maxx, maxy) # print(minx, miny, maxx, maxy) # results = RSImageProcess.mosaic_crop_images_gdalwarp(tifs,fin_out,src_nodata=nodata,min_x=minx,min_y=miny,max_x=maxx,max_y=maxy, # xres=out_res,yres=out_res,resampling_method=resampling_method) # # if results is False: # basic.outputlogMessage('Warning, create %s failed' % fin_out) # return False # sys.exit(0) cost_time_sec = time.time() - time0 basic.outputlogMessage( 'finished creating %s cost %.2f seconds (%.2f minutes)' % (fin_out, cost_time_sec, cost_time_sec / 60)) return fin_out