def download_srtm(aoi): warnings.filterwarnings( 'ignore', 'Geometry is in a geographic CRS', UserWarning ) srtm = gpd.read_file( OST_ROOT.joinpath('aux/srtm1sectiles.gpkg') ) aoi_gdf = vec.wkt_to_gdf(aoi) aoi_gdf['geometry'] = aoi_gdf.geometry.buffer(1) overlap_df = gpd.overlay(srtm, aoi_gdf, how='intersection') iter_list = [] for file in overlap_df.url.values: iter_list.append(file) # now we run with godale, which works also with 1 worker executor = Executor( executor='concurrent_processes', max_workers=10 ) for task in executor.as_completed( func=download_srtm_tile, iterable=iter_list ): task.result()
def refine_burst_inventory(aoi, burst_gdf, outfile): '''Creates a Burst GeoDataFrame from an OST inventory file Args: Returns: ''' # turn aoi into a geodataframe aoi_gdf = vec.wkt_to_gdf(aoi) # get columns of input dataframe for later return function cols = burst_gdf.columns # 1) get only intersecting footprints (double, since we do this before) burst_gdf = gpd.sjoin(burst_gdf, aoi_gdf, how='inner', op='intersects') # if aoi gdf has an id field we need to rename the changed id_left field if 'id_left' in burst_gdf.columns.tolist(): # rename id_left to id burst_gdf.columns = ([ 'id' if x == 'id_left' else x for x in burst_gdf.columns.tolist() ]) # save file to out burst_gdf['Date'] = burst_gdf['Date'].astype(str) burst_gdf['BurstNr'] = burst_gdf['BurstNr'].astype(str) burst_gdf['AnxTime'] = burst_gdf['AnxTime'].astype(str) burst_gdf['Track'] = burst_gdf['Track'].astype(str) burst_gdf.to_file(outfile) return burst_gdf[cols]
def mt_extent(list_of_extents, config_file): with open(config_file) as file: config_dict = json.load(file) import warnings warnings.filterwarnings('ignore', 'GeoSeries.isna', UserWarning) # get track/burst dir from first scene target_dir = list_of_extents[0].parent.parent.parent out_file = target_dir.joinpath(f'{target_dir.name}.min_bounds.json') logger.info(f'Creating common image bounds for track {target_dir.name}.') for i, file in enumerate(list_of_extents): if i == 0: df1 = gpd.read_file(file) df1 = df1[~(df1.geometry.is_empty | df1.geometry.isna())] elif i > 0: # read filter out invalid geometries df2 = gpd.read_file(file) df2 = df2[~(df2.geometry.is_empty | df2.geometry.isna())] # do intersect df1 = gpd.overlay( df1, df2, how='intersection' )[['raster_val_1', 'geometry']] # rename columns df1.columns = ['raster_val', 'geometry'] # remove empty or non geometries df1 = df1[~(df1.geometry.is_empty | df1.geometry.isna())] else: raise RuntimeError('No extents found.') if config_dict['processing']['mosaic']['cut_to_aoi']: try: aoi_df = vec.wkt_to_gdf(config_dict['aoi']) df = gpd.overlay(aoi_df, df1, how='intersection') df.to_file(out_file, driver='GPKG') except ValueError as e: df1.to_file(out_file, driver='GeoJSON') else: df1.to_file(out_file, driver='GeoJSON') return target_dir.name, list_of_extents, out_file
def refine_burst_inventory(aoi, burst_gdf, outfile, coverages=None): '''Creates a Burst GeoDataFrame from an OST inventory file Args: Returns: ''' # turn aoi into a geodataframe aoi_gdf = gpd.GeoDataFrame(vec.wkt_to_gdf(aoi).buffer(0.05)) aoi_gdf.columns = ['geometry'] # get columns of input dataframe for later return function cols = burst_gdf.columns # 1) get only intersecting footprints (double, since we do this before) burst_gdf = gpd.sjoin(burst_gdf, aoi_gdf, how='inner', op='intersects') # if aoi gdf has an id field we need to rename the changed id_left field if 'id_left' in burst_gdf.columns.tolist(): # rename id_left to id burst_gdf.columns = (['id' if x == 'id_left' else x for x in burst_gdf.columns.tolist()]) # remove duplicates burst_gdf.drop_duplicates(['SceneID', 'Date', 'bid'], inplace=True) # check if number of bursts align with number of coverages if coverages: for burst in burst_gdf.bid.unique(): if len(burst_gdf[burst_gdf.bid == burst]) != coverages: print(' INFO. Removing burst {} because of' ' unsuffcient coverage.'.format(burst)) burst_gdf.drop(burst_gdf[burst_gdf.bid == burst].index, inplace=True) # save file to out burst_gdf['Date'] = burst_gdf['Date'].astype(str) burst_gdf['BurstNr'] = burst_gdf['BurstNr'].astype(str) burst_gdf['AnxTime'] = burst_gdf['AnxTime'].astype(str) burst_gdf['Track'] = burst_gdf['Track'].astype(str) burst_gdf.to_file(outfile) return burst_gdf[cols]
def refine_burst_inventory(aoi, burst_gdf, outfile, coverages=None): """Creates a Burst GeoDataFrame from an OST inventory file Args: Returns: """ warnings.filterwarnings('ignore', 'Geometry is in a geographic CRS', UserWarning) # turn aoi into a geodataframe aoi_gdf = gpd.GeoDataFrame(vec.wkt_to_gdf(aoi).buffer(0.05)) aoi_gdf.columns = ['geometry'] aoi_gdf.crs = {'init': 'epsg:4326', 'no_defs': True} # get columns of input dataframe for later return function cols = burst_gdf.columns # 1) get only intersecting footprints (double, since we do this before) burst_gdf = gpd.sjoin(burst_gdf, aoi_gdf, how='inner', op='intersects') # if aoi gdf has an id field we need to rename the changed id_left field if 'id_left' in burst_gdf.columns: # rename id_left to id burst_gdf.columns = ([ 'id' if x == 'id_left' else x for x in burst_gdf.columns ]) # remove duplicates burst_gdf.drop_duplicates(['SceneID', 'Date', 'bid'], inplace=True) # check if number of bursts align with number of coverages if coverages: for burst in burst_gdf.bid.unique(): if len(burst_gdf[burst_gdf.bid == burst]) != coverages: logging.info( f'Removing burst {burst} because of unsuffcient coverage.') burst_gdf.drop(burst_gdf[burst_gdf.bid == burst].index, inplace=True) # save file to out burst_gdf.to_file(outfile, driver="GPKG") return burst_gdf[cols]
def search_refinement(aoi, inventory_df, inventory_dir, exclude_marginal=True, full_aoi_crossing=True, mosaic_refine=True, area_reduce=0.05, complete_coverage=True): '''A function to refine the Sentinel-1 search by certain criteria Args: aoi (WKT str): inventory_df (GeoDataFrame): inventory_dir (str or path): Returns: refined inventory (dictionary): coverages (dictionary): ''' # creat AOI GeoDataframe and calulate area aoi_gdf = vec.wkt_to_gdf(aoi) aoi_area = aoi_gdf.area.sum() # get all polarisations apparent in the inventory pols = inventory_df['polarisationmode'].unique() # get orbit directions apparent in the inventory orbit_directions = inventory_df['orbitdirection'].unique() # create inventoryDict inventory_dict = {} coverage_dict = {} # loop through all possible combinations for pol, orb in itertools.product(pols, orbit_directions): print(' INFO: Coverage analysis for {} tracks in {} polarisation.'. format(orb, pol)) # subset the footprint for orbit direction and polarisations inv_df_sorted = inventory_df[(inventory_df['polarisationmode'] == pol) & (inventory_df['orbitdirection'] == orb)] print(' INFO: {} frames for {} tracks in {} polarisation.'.format( len(inv_df_sorted), orb, pol)) # calculate intersected area inter = aoi_gdf.geometry.intersection(inv_df_sorted.unary_union) intersect_area = inter.area.sum() # we do a first check if the scenes do not fully cover the AOI if (intersect_area <= aoi_area - area_reduce) and complete_coverage: print(' WARNING: Set of footprints does not fully cover AOI. ') # otherwise we go on else: # apply the different sorting steps inventory_refined = _remove_double_entries(inv_df_sorted) inventory_refined = _remove_outside_aoi(aoi_gdf, inventory_refined) if orb == 'ASCENDING': inventory_refined = _handle_equator_crossing(inventory_refined) # get number of tracks nr_of_tracks = len(inventory_refined.relativeorbit.unique()) print(nr_of_tracks) if exclude_marginal is True and nr_of_tracks > 1: inventory_refined = _exclude_marginal_tracks( aoi_gdf, inventory_refined, area_reduce) if full_aoi_crossing is True: inventory_refined = _remove_incomplete_tracks( aoi_gdf, inventory_refined) inventory_refined = _handle_non_continous_swath(inventory_refined) if mosaic_refine is True: datelist, inventory_refined = _forward_search( aoi_gdf, inventory_refined, area_reduce) inventory_refined = _backward_search(aoi_gdf, inventory_refined, datelist, area_reduce) if len(inventory_refined) != 0: vec.inventory_to_shp( inventory_refined, '{}/{}_{}_{}.shp'.format(inventory_dir, len(datelist), orb, ''.join(pol.split()))) inventory_dict['{}_{}'.format(orb, ''.join( pol.split()))] = inventory_refined coverage_dict['{}_{}'.format(orb, ''.join( pol.split()))] = len(datelist) print(' INFO: Found {} full coverage mosaics.'.format( len(datelist))) return inventory_dict, coverage_dict
def mosaic(filelist, outfile, cut_to_aoi=False): check_file = opj(os.path.dirname(outfile), '.{}.processed'.format(os.path.basename(outfile)[:-4])) logfile = opj(os.path.dirname(outfile), '{}.errLog'.format(os.path.basename(outfile)[:-4])) with rasterio.open(filelist.split(' ')[0]) as src: dtype = src.meta['dtype'] dtype = 'float' if dtype == 'float32' else dtype with TemporaryDirectory() as temp_dir: if cut_to_aoi: tempfile = opj(temp_dir, os.path.basename(outfile)) else: tempfile = outfile cmd = ('otbcli_Mosaic -ram 4096' ' -progress 1' ' -comp.feather large' ' -harmo.method band' ' -harmo.cost rmse' ' -temp_dir {}' ' -il {}' ' -out {} {}'.format(temp_dir, filelist, tempfile, dtype)) return_code = h.run_command(cmd, logfile) if return_code != 0: if os.path.isfile(tempfile): os.remove(tempfile) return if cut_to_aoi: # get aoi ina way rasterio wants it features = vec.gdf_to_json_geometry(vec.wkt_to_gdf(cut_to_aoi)) # import raster and mask with rasterio.open(tempfile) as src: out_image, out_transform = rasterio.mask.mask(src, features, crop=True) out_meta = src.meta.copy() ndv = src.nodata out_image = np.ma.masked_where(out_image == ndv, out_image) out_meta.update({ 'driver': 'GTiff', 'height': out_image.shape[1], 'width': out_image.shape[2], 'transform': out_transform, 'tiled': True, 'blockxsize': 128, 'blockysize': 128 }) with rasterio.open(outfile, 'w', **out_meta) as dest: dest.write(out_image.data) # remove intermediate file os.remove(tempfile) # check return_code = h.check_out_tiff(outfile) if return_code != 0: if os.path.isfile(outfile): os.remove(outfile) # write file, so we know this ts has been succesfully processed if return_code == 0: with open(str(check_file), 'w') as file: file.write('passed all tests \n')
def mosaic(filelist, outfile, config_file, cut_to_aoi=None, harm=None): if outfile.parent.joinpath(f'.{outfile.name[:-4]}.processed').exists(): logger.info(f'{outfile} already exists.') return logger.info(f'Mosaicking file {outfile}.') with open(config_file, 'r') as ard_file: config_dict = json.load(ard_file) temp_dir = config_dict['temp_dir'] aoi = config_dict['aoi'] epsg = config_dict['processing']['single_ARD']['dem']['out_projection'] if not harm: harm = config_dict['processing']['mosaic']['harmonization'] if not cut_to_aoi: cut_to_aoi = config_dict['processing']['mosaic']['cut_to_aoi'] logfile = outfile.parent.joinpath(f'{str(outfile)[:-4]}.errLog') with TemporaryDirectory(prefix=f'{temp_dir}/') as temp: temp = Path(temp) # get datatype from first image in our mosaic filelist with rasterio.open(filelist.split(' ')[0]) as src: dtype = src.meta['dtype'] dtype = 'float' if dtype == 'float32' else dtype if cut_to_aoi: tempfile = temp.joinpath(outfile.name) else: tempfile = outfile harm = 'band' if harm else 'none' cmd = ( f"otbcli_Mosaic -ram 8192 -progress 1 " f"-comp.feather large " f"-harmo.method {harm} " f"-harmo.cost rmse " f"-tmpdir {str(temp)} " f"-interpolator bco" f" -il {filelist} " f" -out {str(tempfile)} {dtype}" ) return_code = h.run_command(cmd, logfile) if return_code != 0: if tempfile.exists(): tempfile.unlink() return if cut_to_aoi: # get aoi in a way rasterio wants it aoi_gdf = vec.wkt_to_gdf(aoi) features = vec.gdf_to_json_geometry(aoi_gdf.to_crs(epsg=epsg)) # import raster and mask with rasterio.open(tempfile) as src: out_image, out_transform = rasterio.mask.mask(src, features, crop=True) out_meta = src.meta.copy() ndv = src.nodata out_image = np.ma.masked_where(out_image == ndv, out_image) out_meta.update({ 'driver': 'GTiff', 'height': out_image.shape[1], 'width': out_image.shape[2], 'transform': out_transform, 'tiled': True, 'blockxsize': 128, 'blockysize': 128 }) with rasterio.open(outfile, 'w', **out_meta) as dest: dest.write(out_image.data) # remove intermediate file tempfile.unlink() # check return_code = h.check_out_tiff(outfile) if return_code != 0: if outfile.exists(): outfile.unlink() else: check_file = outfile.parent.joinpath( f'.{outfile.name[:-4]}.processed' ) with open(str(check_file), 'w') as file: file.write('passed all tests \n')