def mosaic_timescan(burst_inventory, processing_dir, temp_dir, proc_file, cut_to_aoi=False, exec_file=None): # load ard parameters with open(proc_file, 'r') as ard_file: ard_params = json.load(ard_file)['processing parameters'] metrics = ard_params['time-scan ARD']['metrics'] if 'harmonics' in metrics: metrics.remove('harmonics') metrics.extend(['amplitude', 'phase', 'residuals']) if 'percentiles' in metrics: metrics.remove('percentiles') metrics.extend(['p95', 'p5']) # a products list product_list = ['bs.HH', 'bs.VV', 'bs.HV', 'bs.VH', 'coh.VV', 'coh.VH', 'coh.HH', 'coh.HV', 'pol.Entropy', 'pol.Anisotropy', 'pol.Alpha'] tscan_dir = opj(processing_dir, 'Mosaic', 'Timescan') os.makedirs(tscan_dir, exist_ok=True) outfiles = [] for product, metric in itertools.product(product_list, metrics): # **** filelist = glob.glob( opj(processing_dir, '*', 'Timescan', '*{}.{}.tif'.format(product, metric)) ) if not len(filelist) >= 2: continue filelist = ' '.join(filelist) outfile = opj(tscan_dir, '{}.{}.tif'.format(product, metric)) check_file = opj( os.path.dirname(outfile), '.{}.processed'.format(os.path.basename(outfile)[:-4]) ) if os.path.isfile(check_file): print(' INFO: Mosaic layer {} already ' ' processed.'.format(os.path.basename(outfile))) continue print(' INFO: Mosaicking layer {}.'.format(os.path.basename(outfile))) mosaic.mosaic(filelist, outfile, temp_dir, cut_to_aoi) outfiles.append(outfile) if exec_file: print(' gdalbuildvrt ....command, outfiles') else: # create vrt ras.create_tscan_vrt(tscan_dir, proc_file)
def mosaic_timescan(inventory_df, processing_dir, temp_dir, proc_file, cut_to_aoi=False, exec_file=None): # load ard parameters with open(proc_file, 'r') as ard_file: ard_params = json.load(ard_file)['processing parameters'] metrics = ard_params['time-scan ARD']['metrics'] if 'harmonics' in metrics: metrics.remove('harmonics') metrics.extend(['amplitude', 'phase', 'residuals']) if 'percentiles' in metrics: metrics.remove('percentiles') metrics.extend(['p95', 'p5']) # create out directory of not existent tscan_dir = opj(processing_dir, 'Mosaic', 'Timescan') os.makedirs(tscan_dir, exist_ok=True) outfiles = [] # loop through all pontial proucts for polar, metric in itertools.product(['VV', 'HH', 'VH', 'HV'], metrics): # create a list of files based on polarisation and metric filelist = glob.glob( opj(processing_dir, '*', 'Timescan', '*bs.{}.{}.tif'.format(polar, metric))) # break loop if there are no files if not len(filelist) >= 2: continue # get number filelist = ' '.join(filelist) outfile = opj(tscan_dir, 'bs.{}.{}.tif'.format(polar, metric)) check_file = opj( os.path.dirname(outfile), '.{}.processed'.format(os.path.basename(outfile)[:-4])) if os.path.isfile(check_file): print(' INFO: Mosaic layer {} already ' ' processed.'.format(os.path.basename(outfile))) continue print(' INFO: Mosaicking layer {}.'.format(os.path.basename(outfile))) mosaic.mosaic(filelist, outfile, temp_dir, cut_to_aoi) outfiles.append(outfile) if exec_file: print(' gdalbuildvrt ....command, outfiles') else: ras.create_tscan_vrt(tscan_dir, proc_file)
def mosaic_timescan(config_file): # load ard parameters with open(config_file, 'r') as ard_file: config_dict = json.load(ard_file) processing_dir = Path(config_dict['processing_dir']) metrics = config_dict['processing']['time-scan_ARD']['metrics'] if 'harmonics' in metrics: metrics.remove('harmonics') metrics.extend(['amplitude', 'phase', 'residuals']) if 'percentiles' in metrics: metrics.remove('percentiles') metrics.extend(['p95', 'p5']) # create out directory of not existent tscan_dir = processing_dir.joinpath('Mosaic/Timescan') tscan_dir.mkdir(parents=True, exist_ok=True) # loop through all pontial proucts iter_list = [] for polar, metric in itertools.product(['VV', 'HH', 'VH', 'HV'], metrics): # create a list of files based on polarisation and metric filelist = list( processing_dir.glob(f'*/Timescan/*bs.{polar}.{metric}.tif')) # break loop if there are no files if not len(filelist) >= 2: continue # get number filelist = ' '.join([str(file) for file in filelist]) outfile = tscan_dir.joinpath(f'bs.{polar}.{metric}.tif') check_file = outfile.parent.joinpath(f'.{outfile.stem}.processed') if check_file.exists(): logger.info(f'Mosaic layer {outfile.name} already processed.') continue iter_list.append([filelist, outfile, config_file]) # now we run with godale, which works also with 1 worker executor = Executor(executor=config_dict['executor_type'], max_workers=config_dict['max_workers']) # run mosaicking for task in executor.as_completed(func=mosaic.gd_mosaic, iterable=iter_list): task.result() ras.create_tscan_vrt(tscan_dir, config_file)
def mosaic_timescan_old(config_file): print(' -----------------------------------------------------------------') logger.info('Mosaicking time-scan layers.') print(' -----------------------------------------------------------------') with open(config_file, 'r') as ard_file: config_dict = json.load(ard_file) processing_dir = Path(config_dict['processing_dir']) metrics = config_dict['processing']['time-scan_ARD']['metrics'] if 'harmonics' in metrics: metrics.remove('harmonics') metrics.extend(['amplitude', 'phase', 'residuals']) if 'percentiles' in metrics: metrics.remove('percentiles') metrics.extend(['p95', 'p5']) tscan_dir = processing_dir.joinpath('Mosaic/Timescan') tscan_dir.mkdir(parents=True, exist_ok=True) iter_list = [] for product, metric in itertools.product(PRODUCT_LIST, metrics): filelist = list( processing_dir.glob(f'*/Timescan/*{product}.{metric}.tif')) if not len(filelist) >= 1: continue filelist = ' '.join([str(file) for file in filelist]) outfile = tscan_dir.joinpath(f'{product}.{metric}.tif') check_file = outfile.parent.joinpath(f'.{outfile.name[:-4]}.processed') if check_file.exists(): logger.info(f'Mosaic layer {outfile.name} already processed.') continue logger.info(f'Mosaicking layer {outfile.name}.') iter_list.append([filelist, outfile, config_file]) # now we run with godale, which works also with 1 worker executor = Executor(executor=config_dict['executor_type'], max_workers=config_dict['max_workers']) # run mosaicking for task in executor.as_completed(func=mosaic.gd_mosaic, iterable=iter_list): task.result() ras.create_tscan_vrt(tscan_dir, config_file)
def timeseries_to_timescan(burst_inventory, processing_dir, temp_dir, proc_file, exec_file=None): '''Function to create a timescan out of a OST timeseries. ''' # load ard parameters with open(proc_file, 'r') as ard_file: ard_params = json.load(ard_file)['processing parameters'] ard = ard_params['single ARD'] ard_mt = ard_params['time-series ARD'] ard_tscan = ard_params['time-scan ARD'] # get the db scaling right to_db = ard['to db'] if ard['to db'] or ard_mt['to db']: to_db = True # a products list product_list = ['bs.HH', 'bs.VV', 'bs.HV', 'bs.VH', 'coh.VV', 'coh.VH', 'coh.HH', 'coh.HV', 'pol.Entropy', 'pol.Anisotropy', 'pol.Alpha'] # get datatype right dtype_conversion = True if ard_mt['dtype output'] != 'float32' else False for burst in burst_inventory.bid.unique(): # *** print(' INFO: Entering burst {}.'.format(burst)) # get burst directory burst_dir = opj(processing_dir, burst) # get timescan directory timescan_dir = opj(burst_dir, 'Timescan') os.makedirs(timescan_dir, exist_ok=True) for product in product_list: if os.path.isfile( opj(timescan_dir, '.{}.processed'.format(product))): print(' INFO: Timescans for burst {} already' ' processed.'.format(burst)) # get respective timeseries timeseries = opj(burst_dir, 'Timeseries', 'Timeseries.{}.vrt'.format(product)) if not os.path.isfile(timeseries): continue print(' INFO: Creating Timescans of {} for burst {}.'.format(product, burst)) # datelist for harmonics scenelist = glob.glob( opj(burst_dir, 'Timeseries', '*{}*tif'.format(product)) ) datelist = [] for layer in sorted(scenelist): datelist.append(os.path.basename(layer).split('.')[1][:6]) # define timescan prefix timescan_prefix = opj(timescan_dir, product) # get rescaling and db right (backscatter vs. polarimetry) if 'bs.' in timescan_prefix: # backscatter rescale = dtype_conversion to_power = to_db else: to_power = False rescale = False # placeholder for parallelisation if exec_file: continue # run command timescan.mt_metrics( timeseries, timescan_prefix, ard_tscan['metrics'], rescale_to_datatype=rescale, to_power=to_power, outlier_removal=ard_tscan['remove outliers'], datelist=datelist ) if not exec_file: ras.create_tscan_vrt(timescan_dir, proc_file)
def mosaic_timescan(burst_inventory, config_file): """ :param burst_inventory: :param config_file: :return: """ print(' -----------------------------------------------------------------') logger.info('Mosaicking time-scan layers.') print(' -----------------------------------------------------------------') # ------------------------------------- # 1 load project config with open(config_file, 'r') as ard_file: config_dict = json.load(ard_file) processing_dir = Path(config_dict['processing_dir']) metrics = config_dict['processing']['time-scan_ARD']['metrics'] if 'harmonics' in metrics: metrics.remove('harmonics') metrics.extend(['amplitude', 'phase', 'residuals']) if 'percentiles' in metrics: metrics.remove('percentiles') metrics.extend(['p95', 'p5']) # create output folder ts_dir = processing_dir.joinpath('Mosaic/Timescan') ts_dir.mkdir(parents=True, exist_ok=True) temp_mosaic = processing_dir.joinpath('Mosaic/temp') temp_mosaic.mkdir(parents=True, exist_ok=True) # ------------------------------------- # 2 create iterable # loop through each product iter_list, vrt_iter_list = [], [] for product, metric in itertools.product(PRODUCT_LIST, metrics): for track in burst_inventory.Track.unique(): filelist = list( processing_dir.glob( f'[A,D]{track}_IW*/Timescan/*{product}.{metric}.tif')) if not len(filelist) >= 1: continue temp_acq = temp_mosaic.joinpath(f'{track}.{product}.{metric}.tif') if temp_acq: iter_list.append( [track, metric, product, temp_acq, config_file]) # now we run with godale, which works also with 1 worker executor = Executor(executor=config_dict['executor_type'], max_workers=config_dict['max_workers']) # run vrt creation for task in executor.as_completed(func=mosaic.gd_mosaic_slc_acquisition, iterable=iter_list): task.result() iter_list, vrt_iter_list = [], [] for product, metric in itertools.product(PRODUCT_LIST, metrics): list_of_files = list(temp_mosaic.glob(f'*{product}.{metric}.tif')) if not list_of_files: continue # turn to OTB readable format list_of_files = ' '.join([str(file) for file in list_of_files]) # create namespace for outfile outfile = ts_dir.joinpath(f'{product}.{metric}.tif') check_file = outfile.parent.joinpath(f'.{outfile.name[:-4]}.processed') if check_file.exists(): logger.info(f'Mosaic layer {outfile.name} already processed.') continue logger.info(f'Mosaicking layer {outfile.name}.') iter_list.append([list_of_files, outfile, config_file]) # now we run with godale, which works also with 1 worker executor = Executor(executor=config_dict['executor_type'], max_workers=config_dict['max_workers']) # run mosaicking for task in executor.as_completed(func=mosaic.gd_mosaic, iterable=iter_list): task.result() ras.create_tscan_vrt(ts_dir, config_file) # remove temp folder h.remove_folder_content(temp_mosaic)
def timeseries_to_timescan(inventory_df, processing_dir, proc_file, exec_file=None): # load ard parameters with open(proc_file, 'r') as ard_file: ard_params = json.load(ard_file)['processing parameters'] ard = ard_params['single ARD'] ard_mt = ard_params['time-series ARD'] ard_tscan = ard_params['time-scan ARD'] # get the db scaling right to_db = ard['to db'] if ard['to db'] or ard_mt['to db']: to_db = True dtype_conversion = True if ard_mt['dtype output'] != 'float32' else False for track in inventory_df.relativeorbit.unique(): print(' INFO: Entering track {}.'.format(track)) # get track directory track_dir = opj(processing_dir, track) # define and create Timescan directory timescan_dir = opj(track_dir, 'Timescan') os.makedirs(timescan_dir, exist_ok=True) # loop thorugh each polarization for polar in ['VV', 'VH', 'HH', 'HV']: if os.path.isfile(opj(timescan_dir, '.{}.processed'.format(polar))): print(' INFO: Timescans for track {} already' ' processed.'.format(track)) continue #get timeseries vrt timeseries = opj(track_dir, 'Timeseries', 'Timeseries.bs.{}.vrt'.format(polar)) if not os.path.isfile(timeseries): continue print(' INFO: Processing Timescans of {} for track {}.'.format( polar, track)) # create a datelist for harmonics scenelist = glob.glob(opj(track_dir, '*bs.{}.tif'.format(polar))) # create a datelist for harmonics calculation datelist = [] for file in sorted(scenelist): datelist.append(os.path.basename(file).split('.')[1]) # define timescan prefix timescan_prefix = opj(timescan_dir, 'bs.{}'.format(polar)) # placeholder for parallel execution if exec_file: print(' Write command to a text file') continue # run timescan timescan.mt_metrics(timeseries, timescan_prefix, ard_tscan['metrics'], rescale_to_datatype=dtype_conversion, to_power=to_db, outlier_removal=ard_tscan['remove outliers'], datelist=datelist) if not exec_file: # create vrt file (and rename ) ras.create_tscan_vrt(timescan_dir, proc_file)
def mosaic_timescan(burst_inventory, processing_dir, temp_dir, proc_file, cut_to_aoi=False, exec_file=None, ncores=os.cpu_count()): # load ard parameters with open(proc_file, 'r') as ard_file: ard_params = json.load(ard_file)['processing parameters'] metrics = ard_params['time-scan ARD']['metrics'] if 'harmonics' in metrics: metrics.remove('harmonics') metrics.extend(['amplitude', 'phase', 'residuals']) if 'percentiles' in metrics: metrics.remove('percentiles') metrics.extend(['p95', 'p5']) # a products list product_list = ['bs.HH', 'bs.VV', 'bs.HV', 'bs.VH', 'coh.VV', 'coh.VH', 'coh.HH', 'coh.HV', 'pol.Entropy', 'pol.Anisotropy', 'pol.Alpha'] tscan_dir = opj(processing_dir, 'Mosaic', 'Timescan') os.makedirs(tscan_dir, exist_ok=True) outfiles = [] for product, metric in itertools.product(product_list, metrics): # **** filelist = glob.glob( opj(processing_dir, '*', 'Timescan', '*{}.{}.tif'.format(product, metric)) ) if not len(filelist) >= 1: continue filelist = ' '.join(filelist) outfile = opj(tscan_dir, '{}.{}.tif'.format(product, metric)) check_file = opj( os.path.dirname(outfile), '.{}.processed'.format(os.path.basename(outfile)[:-4]) ) if os.path.isfile(check_file): print(' INFO: Mosaic layer {} already ' ' processed.'.format(os.path.basename(outfile))) continue if exec_file: filelist = filelist.split(" ") if cut_to_aoi==False: cut_to_aoi = 'False' parallel_temp_dir = temp_dir + '/temp_' + product + '_mosaic_tscan' os.makedirs(parallel_temp_dir, exist_ok=True) args = ('{};{};{};{};{}').format( filelist, outfile, parallel_temp_dir, cut_to_aoi, ncores) # get path to graph # rootpath = imp.find_module('ost')[1] # python_exe = opj(rootpath, 'mosaic', 'mosaic.py') exec_mosaic_timescan = exec_file + '_mosaic_tscan.txt' with open(exec_mosaic_timescan, 'a') as exe: exe.write('{}\n'.format(args)) else: print(' INFO: Mosaicking layer {}.'.format(os.path.basename(outfile))) mosaic.mosaic(filelist, outfile, temp_dir, cut_to_aoi) outfiles.append(outfile) if not exec_file: # create vrt ras.create_tscan_vrt(tscan_dir, proc_file) else: #create vrt exec file exec_mosaic_tscan_vrt = exec_file + '_mosaic_tscan_vrt.txt' with open(exec_mosaic_tscan_vrt, 'a') as exe: exe.write('{};{}\n'.format(tscan_dir, proc_file))