Esempio n. 1
0
    def create_ard(self, infile, out_dir, subset=None, overwrite=False):
        """

        :param infile:
        :param out_dir:
        :param subset:
        :param overwrite:
        :return:
        """

        if self.product_type != 'GRD':
            raise ValueError(
                'The create_ard method for single products is currently '
                'only available for GRD products')

        if isinstance(infile, str):
            infile = Path(infile)

        if isinstance(out_dir, str):
            out_dir = Path(out_dir)

        # set config param necessary for processing
        self.config_dict['processing_dir'] = str(out_dir)
        self.config_dict['temp_dir'] = str(out_dir.joinpath('temp'))
        self.config_dict['snap_cpu_parallelism'] = os.cpu_count()
        self.config_dict['subset'] = False

        if subset:
            self.config_dict['subset'] = True
            self.config_dict['aoi'] = subset

        # create directories
        out_dir.mkdir(parents=True, exist_ok=True)
        out_dir.joinpath('temp').mkdir(parents=True, exist_ok=True)

        if overwrite:
            file_dir = out_dir.joinpath(f'{self.rel_orbit}/{self.start_date}')
            if file_dir.joinpath('.processed').exists():
                file_dir.joinpath('.processed').unlink()

        # --------------------------------------------
        # 2 Check if within SRTM coverage
        # set ellipsoid correction and force GTC production
        # when outside SRTM
        center_lat = self._get_center_lat(infile)
        if float(center_lat) > 59 or float(center_lat) < -59:
            logger.info(
                'Scene is outside SRTM coverage. Snap will therefore use '
                'the GETASSE30 DEM. Also consider to use a stereographic '
                'projection.')
            epsg = input(
                'Please type the EPSG you want to project the output data or '
                'just press enter for keeping Lat/Lon coordinate system '
                '(e.g. 3413 for NSIDC Sea Ice Polar Stereographic North '
                'projection, or 3976 for NSIDC Sea Ice Polar Stereographic '
                'South projection')
            if not epsg:
                epsg = 4326

            self.ard_parameters['single_ARD']['dem']['dem_name'] = 'GETASSE30'
            self.ard_parameters['single_ARD']['dem']['out_projection'] = int(
                epsg)

        # --------------------------------------------
        # 3 Check ard parameters in case they have been updated,
        #   and write them to json file

        # set config file to output directory
        self.config_file = out_dir.joinpath('processing.json')

        # write ard parameters, and check if they are correct
        self.update_ard_parameters()

        # --------------------------------------------
        # 4 set resolution to degree
        # self.ard_parameters['resolution'] = h.resolution_in_degree(
        #    self.center_lat, self.ard_parameters['resolution'])

        # --------------------------------------------
        # 5 run the burst to ard batch routine
        filelist, out_bs, out_ls, error = grd_to_ard(
            [infile],
            self.config_file,
        )

        # print error if any
        if error:
            logger.info(error)
        else:
            # remove temp folder
            h.remove_folder_content(out_dir.joinpath('temp'))
            out_dir.joinpath('temp').rmdir()
            self.ard_dimap = out_bs
Esempio n. 2
0
    def grds_to_ard(self,
                    inventory_df=None,
                    subset=None,
                    timeseries=False,
                    timescan=False,
                    mosaic=False,
                    overwrite=False,
                    exec_file=None,
                    cut_to_aoi=False):

        self.update_ard_parameters()

        if overwrite:
            print(' INFO: Deleting processing folder to start from scratch')
            h.remove_folder_content(self.processing_dir)

        # set resolution in degree
#        self.center_lat = loads(self.aoi).centroid.y
#        if float(self.center_lat) > 59 or float(self.center_lat) < -59:
#            print(' INFO: Scene is outside SRTM coverage. Will use 30m ASTER'
#                  ' DEM instead.')
#            self.ard_parameters['dem'] = 'ASTER 1sec GDEM'

        if subset:
            if subset.split('.')[-1] == '.shp':
                subset = str(vec.shp_to_wkt(subset, buffer=0.1, envelope=True))
            elif subset.startswith('POLYGON (('):
                subset = loads(subset).buffer(0.1).to_wkt()
            else:
                print(
                    ' ERROR: No valid subset given.'
                    ' Should be either path to a shapefile or a WKT Polygon.')
                sys.exit()

        # check number of already prcessed acquisitions
        nr_of_processed = len(
            glob.glob(opj(self.processing_dir, '*', '20*', '.processed')))

        # number of acquisitions to process
        nr_of_acq = len(
            inventory_df.groupby(['relativeorbit', 'acquisitiondate']))

        # check and retry function
        i = 0
        while nr_of_acq > nr_of_processed:

            # the grd to ard batch routine
            grd_batch.grd_to_ard_batch(inventory_df, self.download_dir,
                                       self.processing_dir, self.temp_dir,
                                       self.proc_file, subset, self.data_mount,
                                       exec_file)

            # reset number of already processed acquisitions
            nr_of_processed = len(
                glob.glob(opj(self.processing_dir, '*', '20*', '.processed')))
            i += 1

            # not more than 5 trys
            if i == 5:
                break

        # time-series part
        if timeseries or timescan:

            nr_of_processed = len(
                glob.glob(
                    opj(self.processing_dir, '*', 'Timeseries',
                        '.*processed')))

            nr_of_polar = len(
                inventory_df.polarisationmode.unique()[0].split(' '))
            nr_of_tracks = len(inventory_df.relativeorbit.unique())
            nr_of_ts = nr_of_polar * nr_of_tracks

            # check and retry function
            i = 0
            while nr_of_ts > nr_of_processed:

                grd_batch.ards_to_timeseries(inventory_df, self.processing_dir,
                                             self.temp_dir, self.proc_file,
                                             exec_file)

                nr_of_processed = len(
                    glob.glob(
                        opj(self.processing_dir, '*', 'Timeseries',
                            '.*processed')))
                i += 1

                # not more than 5 trys
                if i == 5:
                    break

        if timescan:

            # number of already processed timescans
            nr_of_processed = len(
                glob.glob(
                    opj(self.processing_dir, '*', 'Timescan', '.*processed')))

            # number of expected timescans
            nr_of_polar = len(
                inventory_df.polarisationmode.unique()[0].split(' '))
            nr_of_tracks = len(inventory_df.relativeorbit.unique())
            nr_of_ts = nr_of_polar * nr_of_tracks

            i = 0
            while nr_of_ts > nr_of_processed:

                grd_batch.timeseries_to_timescan(inventory_df,
                                                 self.processing_dir,
                                                 self.proc_file)

                nr_of_processed = len(
                    glob.glob(
                        opj(self.processing_dir, '*', 'Timescan',
                            '.*processed')))

                i += 1

                # not more than 5 trys
                if i == 5:
                    break

            if i < 5 and exec_file:
                print(' create vrt command')

        if cut_to_aoi:
            cut_to_aoi = self.aoi

        if mosaic and timeseries and not subset:
            grd_batch.mosaic_timeseries(inventory_df, self.processing_dir,
                                        self.temp_dir, cut_to_aoi)

        if mosaic and timescan and not subset:
            grd_batch.mosaic_timescan(inventory_df, self.processing_dir,
                                      self.temp_dir, self.proc_file,
                                      cut_to_aoi)
Esempio n. 3
0
def burst_to_ard(master_file,
                 swath,
                 master_burst_nr,
                 master_burst_id,
                 proc_file,
                 out_dir,
                 temp_dir,
                 slave_file=None,
                 slave_burst_nr=None,
                 slave_burst_id=None,
                 coherence=False,
                 remove_slave_import=False):
    '''The main routine to turn a burst into an ARD product

    Args:
        master_file (str): path to full master SLC scene
        swath (str): subswath
        master_burst_nr (): index number of the burst
        master_burst_id ():
        out_dir (str):
        temp_dir (str):
        slave_file (str):
        slave_burst_nr (str):
        slave_burst_id (str):
        proc_file (str):
        remove_slave_import (bool):

    '''

    # load ards
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']

    # import master
    master_import = opj(temp_dir, '{}_import'.format(master_burst_id))

    if not os.path.exists('{}.dim'.format(master_import)):
        import_log = opj(out_dir, '{}_import.err_log'.format(master_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = _import(master_file, master_import, import_log, swath,
                              master_burst_nr, polars)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

    if ard['H-A-Alpha']:
        # create HAalpha file
        out_haa = opj(temp_dir, '{}_h'.format(master_burst_id))
        haa_log = opj(out_dir, '{}_haa.err_log'.format(master_burst_id))
        return_code = _ha_alpha('{}.dim'.format(master_import), out_haa,
                                haa_log, ard['remove pol speckle'])

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # geo code HAalpha
        out_htc = opj(temp_dir, '{}_pol'.format(master_burst_id))
        haa_tc_log = opj(out_dir, '{}_haa_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_haa), out_htc,
                                          haa_tc_log, ard['resolution'],
                                          ard['dem'])

        # last check on the output files
        return_code = h.check_out_dimap(out_htc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_htc, opj(out_dir, '{}_pol'.format(master_burst_id)))

        # remove HAalpha tmp files
        h.delete_dimap(out_haa)

    # calibrate
    out_cal = opj(temp_dir, '{}_cal'.format(master_burst_id))
    cal_log = opj(out_dir, '{}_cal.err_log'.format(master_burst_id))
    return_code = _calibration('{}.dim'.format(master_import), out_cal,
                               cal_log, ard['product type'])
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    if not coherence:
        #  remove imports
        h.delete_dimap(master_import)

    # speckle filtering
    if ard['remove speckle']:
        speckle_import = opj(temp_dir,
                             '{}_speckle_import'.format(master_burst_id))
        speckle_log = opj(out_dir,
                          '{}_speckle.err_log'.format(master_burst_id))
        return_code = _speckle_filter('{}.dim'.format(out_cal), speckle_import,
                                      speckle_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove temp file
        h.delete_dimap(out_cal)

        # reset master_import for follwoing routine
        out_cal = speckle_import

    # do terrain flattening in case it is selected
    if ard['product type'] == 'RTC':
        # define outfile
        out_rtc = opj(temp_dir, '{}_rtc'.format(master_burst_id))
        rtc_log = opj(out_dir, '{}_rtc.err_log'.format(master_burst_id))
        # do the TF
        return_code = _terrain_flattening('{}.dim'.format(out_cal), out_rtc,
                                          rtc_log, ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_rtc to out_cal for further processing
        out_cal = out_rtc

    if ard['to db']:
        out_db = opj(temp_dir, '{}_cal_db'.format(master_burst_id))
        db_log = opj(out_dir, '{}_cal_db.err_log'.format(master_burst_id))
        return_code = _linear_to_db('{}.dim'.format(out_cal), out_db, db_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_cal to out_db for further processing
        out_cal = out_db

    # geo code backscatter products
    out_tc = opj(temp_dir, '{}_bs'.format(master_burst_id))
    tc_log = opj(out_dir, '{}_bs_tc.err_log'.format(master_burst_id))
    return_code = _terrain_correction('{}.dim'.format(out_cal), out_tc, tc_log,
                                      ard['resolution'], ard['dem'])

    # last check on backscatter data
    return_code = h.check_out_dimap(out_tc)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # we move backscatter to final destination
    h.move_dimap(out_tc, opj(out_dir, '{}_bs'.format(master_burst_id)))

    if ard['create ls mask']:
        # create LS map
        out_ls = opj(temp_dir, '{}_LS'.format(master_burst_id))
        ls_log = opj(out_dir, '{}_LS.err_log'.format(master_burst_id))
        return_code = _ls_mask('{}.dim'.format(out_cal), out_ls, ls_log,
                               ard['resolution'], ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(out_ls, test_stats=False)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move ls data to final destination
        h.move_dimap(out_ls, opj(out_dir, '{}_LS'.format(master_burst_id)))

    # remove calibrated files
    h.delete_dimap(out_cal)

    if coherence:

        # import slave
        slave_import = opj(temp_dir, '{}_import'.format(slave_burst_id))
        import_log = opj(out_dir, '{}_import.err_log'.format(slave_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = _import(slave_file, slave_import, import_log, swath,
                              slave_burst_nr, polars)

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # co-registration
        #filelist = ['{}.dim'.format(master_import),
        #            '{}.dim'.format(slave_import)]
        #filelist = '\'{}\''.format(','.join(filelist))
        out_coreg = opj(temp_dir, '{}_coreg'.format(master_burst_id))
        coreg_log = opj(out_dir, '{}_coreg.err_log'.format(master_burst_id))
        # return_code = _coreg(filelist, out_coreg, coreg_log, dem)
        return_code = _coreg2('{}.dim'.format(master_import),
                              '{}.dim'.format(slave_import), out_coreg,
                              coreg_log, ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        #  remove imports
        h.delete_dimap(master_import)

        if remove_slave_import is True:
            h.delete_dimap(slave_import)

        # calculate coherence and deburst
        out_coh = opj(temp_dir, '{}_c'.format(master_burst_id))
        coh_log = opj(out_dir, '{}_coh.err_log'.format(master_burst_id))
        coh_polars = ard['coherence bands'].replace(' ', '')
        return_code = _coherence('{}.dim'.format(out_coreg), out_coh, coh_log,
                                 coh_polars)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove coreg tmp files
        h.delete_dimap(out_coreg)

        # geocode
        out_tc = opj(temp_dir, '{}_coh'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_coh_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_coh), out_tc,
                                          tc_log, ard['resolution'],
                                          ard['dem'])

        # last check on coherence data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_coh'.format(master_burst_id)))

        # remove tmp files
        h.delete_dimap(out_coh)

    # write file, so we know this burst has been succesfully processed
    if return_code == 0:
        check_file = opj(out_dir, '.processed')
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')
    else:
        h.remove_folder_content(temp_dir)
        h.remove_folder_content(out_dir)

    return return_code
Esempio n. 4
0
def grd_to_ard(filelist,
               output_dir,
               file_id,
               temp_dir,
               proc_file,
               subset=None):
    '''The main function for the grd to ard generation

    This function represents the full workflow for the generation of an
    Analysis-Ready-Data product. The standard parameters reflect the CEOS
    ARD defintion for Sentinel-1 backcsatter products.

    By changing the parameters, taking care of all parameters
    that can be given. The function can handle multiple inputs of the same
    acquisition, given that there are consecutive data takes.

    Args:
        filelist (list): must be a list with one or more absolute
                  paths to GRD scene(s)
        output_dir: os.path object or string for the folder
                    where the output file should be written#
        file_id (str): prefix of the final output file
        temp_dir:
        resolution: the resolution of the output product in meters
        ls_mask: layover/shadow mask generation (Boolean)
        speckle_filter: speckle filtering (Boolean)

    Returns:
        nothing

    Notes:
        no explicit return value, since output file is our actual return
    '''

    # load ard parameters
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']
        polars = ard['polarisation'].replace(' ', '')

    # slice assembly if more than one scene
    if len(filelist) > 1:

        for file in filelist:

            grd_import = opj(temp_dir,
                             '{}_imported'.format(os.path.basename(file)[:-5]))
            logfile = opj(
                output_dir,
                '{}.Import.errLog'.format(os.path.basename(file)[:-5]))

            return_code = _grd_frame_import(file, grd_import, logfile, polars)
            if return_code != 0:
                h.remove_folder_content(temp_dir)
                return return_code

        # create list of scenes for full acquisition in
        # preparation of slice assembly
        scenelist = ' '.join(glob.glob(opj(temp_dir, '*imported.dim')))

        # create file strings
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}._slice_assembly.errLog'.format(file_id))
        return_code = _slice_assembly(
            scenelist,
            grd_import,
            logfile,
        )
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        for file in filelist:
            h.delete_dimap(
                opj(temp_dir,
                    '{}_imported'.format(os.path.basename(str(file))[:-5])))

        if subset:
            grd_subset = opj(temp_dir, '{}_imported_subset'.format(file_id))
            return_code = _grd_subset_georegion('{}.dim'.format(grd_import),
                                                grd_subset, logfile, subset)
            if return_code != 0:
                h.remove_folder_content(temp_dir)
                return return_code

            # delete slice assembly
            h.delete_dimap(grd_import)

    # single scene case
    else:
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}.Import.errLog'.format(file_id))

        if subset is None:
            return_code = _grd_frame_import(filelist[0], grd_import, logfile,
                                            polars)
        else:
            return_code = _grd_frame_import_subset(filelist[0], grd_import,
                                                   subset, logfile, polars)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code
    # ---------------------------------------------------------------------
    # Remove the grd border noise from existent channels (OST routine)

    if ard['remove border noise'] and not subset:
        for polarisation in ['VV', 'VH', 'HH', 'HV']:

            infile = glob.glob(
                opj(temp_dir, '{}_imported*data'.format(file_id),
                    'Intensity_{}.img'.format(polarisation)))

            if len(infile) == 1:
                # run grd Border Remove
                print(' INFO: Remove border noise for {} band.'.format(
                    polarisation))
                _grd_remove_border(infile[0])

    # set new infile
    infile = glob.glob(opj(temp_dir, '{}_imported*dim'.format(file_id)))[0]
    # -------------------------------------------
    # in case we want to apply Speckle filtering
    if ard['remove speckle']:

        logfile = opj(temp_dir, '{}.Speckle.errLog'.format(file_id))
        outfile = opj(temp_dir, '{}_spk'.format(file_id))

        # run processing
        return_code = _grd_speckle_filter(infile, outfile, logfile)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # delete input
        h.delete_dimap(infile[:-4])
        # define infile for next processing step
        infile = '{}.dim'.format(outfile)

    # ----------------------
    # do the calibration
    outfile = opj(temp_dir, '{}.{}'.format(file_id, ard['product type']))
    logfile = opj(output_dir, '{}.Backscatter.errLog'.format(file_id))
    return_code = _grd_backscatter(infile, outfile, logfile, ard['dem'],
                                   ard['product type'])

    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # delete input file
    h.delete_dimap(infile[:-4])

    # input file for follwoing
    infile = '{}.dim'.format(outfile)

    # ----------------------------------------------
    # let's create a Layover shadow mask if needed
    if ard['create ls mask'] is True:
        outfile = opj(temp_dir, '{}.ls_mask'.format(file_id))
        logfile = opj(output_dir, '{}.ls_mask.errLog'.format(file_id))
        return_code = _grd_ls_mask(infile, outfile, logfile, ard['resolution'],
                                   ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(outfile, test_stats=False)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        out_ls_mask = opj(output_dir, '{}.LS'.format(file_id))

        # delete original file sin case they exist
        if os.path.exists(str(out_ls_mask) + '.dim'):
            h.delete_dimap(out_ls_mask)

        # move out of temp
        shutil.move('{}.dim'.format(outfile), '{}.dim'.format(out_ls_mask))
        shutil.move('{}.data'.format(outfile), '{}.data'.format(out_ls_mask))

    # to db
    if ard['to db']:
        logfile = opj(output_dir, '{}.linToDb.errLog'.format(file_id))
        outfile = opj(temp_dir, '{}_{}_db'.format(file_id,
                                                  ard['product type']))
        return_code = _grd_to_db(infile, outfile, logfile)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # delete
        h.delete_dimap(infile[:-4])
        # re-define infile
        infile = opj(temp_dir, '{}_{}_db.dim'.format(file_id,
                                                     ard['product type']))

    # -----------------------
    # let's geocode the data
    # infile = opj(temp_dir, '{}.{}.dim'.format(file_id, product_type))
    outfile = opj(temp_dir, '{}.bs'.format(file_id))
    logfile = opj(output_dir, '{}.bs.errLog'.format(file_id))
    return_code = _grd_terrain_correction(infile, outfile, logfile,
                                          ard['resolution'], ard['dem'])
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # remove calibrated files
    h.delete_dimap(infile[:-4])

    # move to final destination
    out_final = opj(output_dir, '{}.bs'.format(file_id))

    # remove file if exists
    if os.path.exists(out_final + '.dim'):
        h.delete_dimap(out_final)

    return_code = h.check_out_dimap(outfile)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    shutil.move('{}.dim'.format(outfile), '{}.dim'.format(out_final))
    shutil.move('{}.data'.format(outfile), '{}.data'.format(out_final))

    # write file, so we know this burst has been succesfully processed
    if return_code == 0:
        check_file = opj(output_dir, '.processed')
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')
    else:
        h.remove_folder_content(temp_dir)
        h.remove_folder_content(output_dir)
Esempio n. 5
0
    def bursts_to_ard(self,
                      timeseries=False,
                      timescan=False,
                      mosaic=False,
                      overwrite=False,
                      exec_file=None,
                      cut_to_aoi=False):

        # in case ard parameters have been updated, write them to json file
        self.update_ard_parameters()

        if overwrite:
            print(' INFO: Deleting processing folder to start from scratch')
            h.remove_folder_content(self.processing_dir)

        # set resolution in degree
        self.center_lat = loads(self.aoi).centroid.y
        if float(self.center_lat) > 59 or float(self.center_lat) < -59:
            print(' INFO: Scene is outside SRTM coverage. Will use 30m ASTER'
                  ' DEM instead.')
            self.ard_parameters['single ARD']['dem'] = 'ASTER 1sec GDEM'

        # set resolution to degree
        # self.ard_parameters['resolution'] = h.resolution_in_degree(
        #    self.center_lat, self.ard_parameters['resolution'])

        nr_of_processed = len(
            glob.glob(opj(self.processing_dir, '*', '*', '.processed')))

        # check and retry function
        i = 0
        while len(self.burst_inventory) > nr_of_processed:

            burst.burst_to_ard_batch(self.burst_inventory, self.download_dir,
                                     self.processing_dir, self.temp_dir,
                                     self.proc_file, self.data_mount,
                                     exec_file)

            nr_of_processed = len(
                glob.glob(opj(self.processing_dir, '*', '*', '.processed')))

            i += 1

            # not more than 5 trys
            if i == 5:
                break

        # do we delete the downloads here?
        if timeseries or timescan:
            burst.burst_ards_to_timeseries(self.burst_inventory,
                                           self.processing_dir, self.temp_dir,
                                           self.proc_file, exec_file)

            # do we deleete the single ARDs here?
            if timescan:
                burst.timeseries_to_timescan(self.burst_inventory,
                                             self.processing_dir,
                                             self.temp_dir, self.proc_file)

        if cut_to_aoi:
            cut_to_aoi = self.aoi

        if mosaic and timeseries:
            burst.mosaic_timeseries(self.burst_inventory, self.processing_dir,
                                    self.temp_dir, cut_to_aoi)

        if mosaic and timescan:
            burst.mosaic_timescan(self.burst_inventory, self.processing_dir,
                                  self.temp_dir, self.proc_file, cut_to_aoi)
Esempio n. 6
0
def grd_to_ard(filelist, config_file):
    """Main function for the grd to ard generation

    This function represents the full workflow for the generation of an
    Analysis-Ready-Data product. The standard parameters reflect the CEOS
    ARD defintion for Sentinel-1 backcsatter products.

    By changing the parameters, taking care of all parameters
    that can be given. The function can handle multiple inputs of the same
    acquisition, given that there are consecutive data takes.

    :param filelist: must be a list with one or more
                     absolute paths to GRD scene(s)
    :param config_file:
    :return:
    """

    from ost.s1.s1scene import Sentinel1Scene

    # ----------------------------------------------------
    # 1 load relevant config parameters
    with open(config_file, 'r') as file:
        config_dict = json.load(file)
        ard = config_dict['processing']['single_ARD']
        processing_dir = Path(config_dict['processing_dir'])
        subset = config_dict['subset']

    # ----------------------------------------------------
    # 2 define final destination dir/file and ls mask

    # get acq data and track from first scene in list
    first = Sentinel1Scene(Path(filelist[0]).stem)
    acquisition_date = first.start_date
    track = first.rel_orbit

    logger.info(
        f'Processing acquisition from {acquisition_date} over track {track}.'
    )

    # construct namespace for out directory etc.
    out_dir = processing_dir.joinpath(f'{track}/{acquisition_date}')
    out_dir.mkdir(parents=True, exist_ok=True)
    file_id = f'{acquisition_date}_{track}'
    out_final = out_dir.joinpath(f'{file_id}_bs')
    out_ls_mask = out_dir.joinpath(f'{file_id}_LS')

    suf = '.tif' if ard['to_tif'] else '.dim'

    # ----------------------------------------------------
    # 3 check if already processed
    if out_dir.joinpath('.processed').exists() and \
            out_final.with_suffix(suf).exists():
        logger.info(
            f'Acquisition from {acquisition_date} of track {track} '
            f'already processed'
        )

        if out_ls_mask.with_suffix(suf).exists():
            out_ls = out_ls_mask.with_suffix(suf)
        else:
            out_ls = None

        return filelist, out_final.with_suffix(suf), out_ls, None

    # ----------------------------------------------------
    # 4 run the processing routine

    # this might happen in the create_ard from s1scene class
    if not config_dict['temp_dir']:
        temp_dir = processing_dir.joinpath('temp')
        temp_dir.mkdir(parents=True, exist_ok=True)
    else:
        temp_dir = config_dict['temp_dir']

    with TemporaryDirectory(prefix=f"{temp_dir}/") as temp:

        # convert temp directory to Path object
        temp = Path(temp)

        # ---------------------------------------------------------------------
        # 4.1 Import
        # slice assembly if more than one scene
        if len(filelist) > 1:

            # if more than one frame import all files
            for file in filelist:

                # unzip for faster import?
                unpack = None
                if Path(file).suffix == '.zip':
                    with zipfile.ZipFile(file, 'r') as zip_ref:
                        zip_ref.extractall(temp)

                    file = temp.joinpath(f'{file.stem}.SAFE')
                    unpack = True

                # create namespace for temporary imported product
                grd_import = temp.joinpath(f'{file.stem}_imported')

                # create namespace for import log
                logfile = out_dir.joinpath(f'{file.stem}.Import.errLog')

                # set subset tempraoally to false for import routine
                config_dict['subset'] = False
                # frame import
                try:
                    grd.grd_frame_import(
                        file, grd_import, logfile, config_dict
                    )
                except (GPTRuntimeError, NotValidFileError) as error:
                    logger.info(error)
                    return filelist, None, None, error

                config_dict['subset'] = subset

                if unpack:
                    h.remove_folder_content(file)
                    file.rmdir()

            # create list of scenes for full acquisition in
            # preparation of slice assembly
            scenelist = ' '.join(
                [str(file) for file in list(temp.glob('*imported.dim'))]
            )

            # create namespace for temporary slice assembled import product
            grd_import = temp.joinpath(f'{file_id}_imported')

            # create namespace for slice assembled log
            logfile = out_dir.joinpath(f'{file_id}._slice_assembly.errLog')

            # run slice assembly
            try:
                grd.slice_assembly(scenelist, grd_import, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete imported frames
            for file in filelist:
                h.delete_dimap(temp.joinpath(f'{file.stem}_imported'))

            # subset mode after slice assembly
            if subset:

                # create namespace for temporary subset product
                grd_subset = temp.joinpath(f'{file_id}_imported_subset')

                # create namespace for subset log
                logfile = out_dir.joinpath(f'{file_id}._slice_assembly.errLog')

                # run subset routine
                try:
                    grd.grd_subset_georegion(
                        grd_import.with_suffix('.dim'), grd_subset, logfile,
                        config_dict
                    )
                except (GPTRuntimeError, NotValidFileError) as error:
                    logger.info(error)
                    return filelist, None, None, error

                # delete slice assembly input to subset
                h.delete_dimap(grd_import)

                # set subset to import for subsequent functions
                grd_import = grd_subset

        # single scene case
        else:

            file = filelist[0]

            # unzip for faster import
            unpack = None
            if Path(file).suffix == '.zip':
                with zipfile.ZipFile(file, 'r') as zip_ref:
                    zip_ref.extractall(temp)

                file = temp.joinpath(f'{file.stem}.SAFE')
                unpack = True

            # create namespace for temporary imported product
            grd_import = temp.joinpath(f'{file_id}_imported')

            # create namespace for import log
            logfile = out_dir.joinpath(f'{file_id}.Import.errLog')

            # run frame import
            try:
                grd.grd_frame_import(file, grd_import, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            if unpack:
                h.remove_folder_content(file)
                file.rmdir()

        # set input for next step
        infile = grd_import.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.2 GRD Border Noise
        if ard['remove_border_noise'] and not subset:

            # loop through possible polarisations
            for polarisation in ['VV', 'VH', 'HH', 'HV']:

                # get input file
                file = list(temp.glob(
                    f'{file_id}_imported*data/Intensity_{polarisation}.img'
                ))

                # remove border noise
                if len(file) == 1:
                    # run grd Border Remove
                    grd.grd_remove_border(file[0])

        # ---------------------------------------------------------------------
        # 4.3 Calibration

        # create namespace for temporary calibrated product
        calibrated = temp.joinpath(f'{file_id}_cal')

        # create namespace for calibration log
        logfile = out_dir.joinpath(f'{file_id}.calibration.errLog')

        # run calibration
        try:
            grd.calibration(infile, calibrated, logfile, config_dict)
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return filelist, None, None, error

        # delete input
        h.delete_dimap(infile.with_suffix(''))

        # input for next step
        infile = calibrated.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.4 Multi-looking
        if int(ard['resolution']) >= 20:

            # create namespace for temporary multi-looked product
            multi_looked = temp.joinpath(f'{file_id}_ml')

            # create namespace for multi-loook log
            logfile = out_dir.joinpath(f'{file_id}.multilook.errLog')

            # run multi-looking
            try:
                grd.multi_look(infile, multi_looked, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete input
            h.delete_dimap(infile.with_suffix(''))

            # define input for next step
            infile = multi_looked.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.5 Layover shadow mask
        out_ls = None  # set to none for final return statement
        if ard['create_ls_mask'] is True:

            # create namespace for temporary ls mask product
            ls_mask = temp.joinpath(f'{file_id}_ls_mask')

            # create namespace for ls mask log
            logfile = out_dir.joinpath(f'{file_id}.ls_mask.errLog')

            # run ls mask routine
            try:
                common.ls_mask(infile, ls_mask, logfile, config_dict)
                out_ls = out_ls_mask.with_suffix('.dim')
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # polygonize
            ls_raster = list(ls_mask.with_suffix('.data').glob('*img'))[0]
            ras.polygonize_ls(ls_raster, ls_mask.with_suffix('.json'))

        # ---------------------------------------------------------------------
        # 4.6 Speckle filtering
        if ard['remove_speckle']:

            # create namespace for temporary speckle filtered product
            filtered = temp.joinpath(f'{file_id}_spk')

            # create namespace for speckle filter log
            logfile = out_dir.joinpath(f'{file_id}.Speckle.errLog')

            # run speckle filter
            try:
                common.speckle_filter(infile, filtered, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete input
            h.delete_dimap(infile.with_suffix(''))

            # define input for next step
            infile = filtered.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.7 Terrain flattening
        if ard['product_type'] == 'RTC-gamma0':

            # create namespace for temporary terrain flattened product
            flattened = temp.joinpath(f'{file_id}_flat')

            # create namespace for terrain flattening log
            logfile = out_dir.joinpath(f'{file_id}.tf.errLog')

            # run terrain flattening
            try:
                common.terrain_flattening(
                    infile, flattened, logfile, config_dict
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete input file
            h.delete_dimap(infile.with_suffix(''))

            # define input for next step
            infile = flattened.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.8 Linear to db
        if ard['to_db']:

            # create namespace for temporary db scaled product
            db_scaled = temp.joinpath(f'{file_id}_db')

            # create namespace for db scaled log
            logfile = out_dir.joinpath(f'{file_id}.db.errLog')

            # run db scaling routine
            try:
                common.linear_to_db(infile, db_scaled, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete input file
            h.delete_dimap(infile.with_suffix(''))

            # set input for next step
            infile = db_scaled.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.9 Geocoding

        # create namespace for temporary geocoded product
        geocoded = temp.joinpath(f'{file_id}_bs')

        # create namespace for geocoding log
        logfile = out_dir.joinpath(f'{file_id}_bs.errLog')

        # run geocoding
        try:
            common.terrain_correction(infile, geocoded, logfile, config_dict)
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return filelist, None, None, error

        # delete input file
        h.delete_dimap(infile.with_suffix(''))

        # define final destination
        out_final = out_dir.joinpath(f'{file_id}_bs')

        # ---------------------------------------------------------------------
        # 4.11 Create an outline
        ras.image_bounds(geocoded.with_suffix('.data'))

        # ---------------------------------------------------------------------
        # 4.11 Copy LS Mask vector to data dir
        if ard['create_ls_mask'] is True:
            ls_mask.with_suffix('.json').rename(
                geocoded.with_suffix('.data')
                .joinpath(ls_mask.name).with_suffix('.json')
            )

        # ---------------------------------------------------------------------
        # 4.12 Move to output directory
        h.move_dimap(geocoded, out_final, ard['to_tif'])

    # ---------------------------------------------------------------------
    # 5 write processed file to keep track of files already processed
    with open(out_dir.joinpath('.processed'), 'w') as file:
        file.write('passed all tests \n')

    return filelist, out_final.with_suffix('.dim'), out_ls, None
Esempio n. 7
0
def burst_to_ard(master_file,
                 swath,
                 master_burst_nr,
                 master_burst_id,
                 out_dir,
                 temp_dir,
                 slave_file=None,
                 slave_burst_nr=None,
                 slave_burst_id=None,
                 coherence=False,
                 polarimetry=False,
                 pol_speckle_filter=False,
                 resolution=20,
                 product_type='GTCgamma',
                 speckle_filter=False,
                 to_db=False,
                 ls_mask_create=False,
                 dem='SRTM 1sec HGT',
                 remove_slave_import=False):
    '''The main routine to turn a burst into an ARD product

    Args:
        master_file (str): path to full master SLC scene
        swath (str): subswath
        master_burst_nr (): index number of the burst
        master_burst_id ():
        out_dir (str):
        temp_dir (str):
        slave_file (str):
        slave_burst_nr (str):
        slave_burst_id (str):
        coherence (bool):
        polarimetry (bool):
        pol_speckle_filter (bool):
        resolution (int):
        product_type (str):
        speckle_filter (bool):
        to_db (bool):
        ls_mask (bool):
        dem (str):
        remove_slave_import (bool):

    '''

    # import master
    master_import = opj(temp_dir, '{}_import'.format(master_burst_id))

    if not os.path.exists('{}.dim'.format(master_import)):
        import_log = opj(out_dir, '{}_import.err_log'.format(master_burst_id))
        return_code = _import(master_file, master_import, import_log, swath,
                              master_burst_nr)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

    if polarimetry:
        # create HAalpha file
        out_haa = opj(temp_dir, '{}_h'.format(master_burst_id))
        haa_log = opj(out_dir, '{}_haa.err_log'.format(master_burst_id))
        return_code = _ha_alpha('{}.dim'.format(master_import), out_haa,
                                haa_log, pol_speckle_filter)

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # geo code HAalpha
        out_htc = opj(temp_dir, '{}_ha_alpha'.format(master_burst_id))
        haa_tc_log = opj(out_dir, '{}_haa_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_haa), out_htc,
                                          haa_tc_log, resolution, dem)

        # last check on the output files
        return_code = h.check_out_dimap(out_htc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_htc,
                     opj(out_dir, '{}_ha_alpha'.format(master_burst_id)))

        # remove HAalpha tmp files
        h.delete_dimap(out_haa)

    # calibrate
    out_cal = opj(temp_dir, '{}_cal'.format(master_burst_id))
    cal_log = opj(out_dir, '{}_cal.err_log'.format(master_burst_id))
    return_code = _calibration('{}.dim'.format(master_import), out_cal,
                               cal_log, product_type)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    if not coherence:
        #  remove imports
        h.delete_dimap(master_import)

    # speckle filtering
    if speckle_filter:
        speckle_import = opj(temp_dir,
                             '{}_speckle_import'.format(master_burst_id))
        speckle_log = opj(out_dir,
                          '{}_speckle.err_log'.format(master_burst_id))
        return_code = _speckle_filter('{}.dim'.format(out_cal), speckle_import,
                                      speckle_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove temp file
        h.delete_dimap(out_cal)

        # reset master_import for follwoing routine
        out_cal = speckle_import

    # do terrain flattening in case it is selected
    if product_type == 'RTC':
        # define outfile
        out_rtc = opj(temp_dir, '{}_rtc'.format(master_burst_id))
        rtc_log = opj(out_dir, '{}_rtc.err_log'.format(master_burst_id))
        # do the TF
        return_code = _terrain_flattening('{}.dim'.format(out_cal), out_rtc,
                                          rtc_log, dem)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_rtc to out_cal for further processing
        out_cal = out_rtc

    if to_db:
        out_db = opj(temp_dir, '{}_cal_db'.format(master_burst_id))
        db_log = opj(out_dir, '{}_cal_db.err_log'.format(master_burst_id))
        return_code = _linear_to_db('{}.dim'.format(out_cal), out_db, db_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_cal to out_db for further processing
        out_cal = out_db

    # geo code backscatter products
    out_tc = opj(temp_dir, '{}_BS'.format(master_burst_id))
    tc_log = opj(out_dir, '{}_BS_tc.err_log'.format(master_burst_id))
    return_code = _terrain_correction('{}.dim'.format(out_cal), out_tc, tc_log,
                                      resolution, dem)

    # last check on backscatter data
    return_code = h.check_out_dimap(out_tc)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # we move backscatter to final destination
    h.move_dimap(out_tc, opj(out_dir, '{}_BS'.format(master_burst_id)))

    if ls_mask_create:
        # create LS map
        out_ls = opj(temp_dir, '{}_LS'.format(master_burst_id))
        ls_log = opj(out_dir, '{}_LS.err_log'.format(master_burst_id))
        return_code = _ls_mask('{}.dim'.format(out_cal), out_ls, ls_log,
                               resolution, dem)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(out_ls, test_stats=False)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move ls data to final destination
        h.move_dimap(out_ls, opj(out_dir, '{}_LS'.format(master_burst_id)))

    # remove calibrated files
    h.delete_dimap(out_cal)

    if coherence:

        # import slave
        slave_import = opj(temp_dir, '{}_import'.format(slave_burst_id))
        import_log = opj(out_dir, '{}_import.err_log'.format(slave_burst_id))
        return_code = _import(slave_file, slave_import, import_log, swath,
                              slave_burst_nr)

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # co-registration
        # filelist = ['{}.dim'.format(master_import),
        #            '{}.dim'.format(slave_import)]
        # filelist = '\'{}\''.format(','.join(filelist))
        out_coreg = opj(temp_dir, '{}_coreg'.format(master_burst_id))
        coreg_log = opj(out_dir, '{}_coreg.err_log'.format(master_burst_id))
        # return_code = _coreg2(filelist, out_coreg, coreg_log, dem)
        return_code = _coreg2('{}.dim'.format(master_import),
                              '{}.dim'.format(slave_import), out_coreg,
                              coreg_log, dem)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        #  remove imports
        h.delete_dimap(master_import)

        if remove_slave_import is True:
            h.delete_dimap(slave_import)

        # calculate coherence and deburst
        out_coh = opj(temp_dir, '{}_c'.format(master_burst_id))
        coh_log = opj(out_dir, '{}_coh.err_log'.format(master_burst_id))
        return_code = _coherence('{}.dim'.format(out_coreg), out_coh, coh_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove coreg tmp files
        h.delete_dimap(out_coreg)

        # geocode
        out_tc = opj(temp_dir, '{}_coh'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_coh_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_coh), out_tc,
                                          tc_log, resolution, dem)
        # last check on coherence data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_coh'.format(master_burst_id)))

        # remove tmp files
        h.delete_dimap(out_coh)

    # write file, so we know this burst has been succesfully processed
    if return_code == 0:
        check_file = opj(out_dir, '.processed')
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')
    else:
        h.remove_folder_content(temp_dir)
        h.remove_folder_content(out_dir)

    return return_code
Esempio n. 8
0
def ard_to_ts(list_of_files,
              processing_dir,
              temp_dir,
              burst,
              proc_file,
              product,
              pol,
              ncores=os.cpu_count()):
    if type(list_of_files) == str:
        list_of_files = list_of_files.replace("'", '').strip('][').split(', ')

    # get the burst directory
    burst_dir = opj(processing_dir, burst)

    # check routine if timeseries has already been processed
    check_file = opj(burst_dir, 'Timeseries',
                     '.{}.{}.processed'.format(product, pol))
    if os.path.isfile(check_file):
        print(' INFO: Timeseries of {} for {} in {} polarisation already'
              ' processed'.format(burst, product, pol))
        return

    # load ard parameters
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']
        ard_mt = ard_params['time-series ARD']
        if ard_mt['remove mt speckle'] is True:
            ard_mt_speck = ard_params['time-series ARD']['mt speckle filter']
    # get the db scaling right
    to_db = ard['to db']
    if to_db or product != 'bs':
        to_db = False
        print('INFO: Not converting to dB for {}'.format(product))
    else:
        to_db = ard_mt['to db']
        print('INFO: Converting to dB for {}'.format(product))

    if ard['apply ls mask']:
        extent = opj(burst_dir, '{}.extent.masked.shp'.format(burst))
    else:
        extent = opj(burst_dir, '{}.extent.shp'.format(burst))

    # min max dict for stretching in case of 16 or 8 bit datatype
    mm_dict = {
        'bs': {
            'min': -30,
            'max': 5
        },
        'coh': {
            'min': 0.000001,
            'max': 1
        },
        'Alpha': {
            'min': 0.000001,
            'max': 90
        },
        'Anisotropy': {
            'min': 0.000001,
            'max': 1
        },
        'Entropy': {
            'min': 0.000001,
            'max': 1
        }
    }

    stretch = pol if pol in ['Alpha', 'Anisotropy', 'Entropy'] else product

    # define out_dir for stacking routine
    out_dir = opj(processing_dir, '{}'.format(burst), 'Timeseries')
    os.makedirs(out_dir, exist_ok=True)

    # create namespaces
    temp_stack = opj(temp_dir, '{}_{}_{}'.format(burst, product, pol))
    out_stack = opj(temp_dir, '{}_{}_{}_mt'.format(burst, product, pol))
    stack_log = opj(out_dir,
                    '{}_{}_{}_stack.err_log'.format(burst, product, pol))

    # run stacking routines
    # convert list of files readable for snap
    list_of_files = '\'{}\''.format(','.join(list_of_files))

    if pol in ['Alpha', 'Anisotropy', 'Entropy']:
        print(
            ' INFO: Creating multi-temporal stack of images of burst/track {} for'
            ' the {} band of the polarimetric H-A-Alpha'
            ' decomposition.'.format(burst, pol))
        create_stack(list_of_files, temp_stack, stack_log, pattern=pol)
    else:
        print(
            ' INFO: Creating multi-temporal stack of images of burst/track {} for'
            ' {} product in {} polarization.'.format(burst, product, pol))
        create_stack(list_of_files, temp_stack, stack_log, polarisation=pol)

    # run mt speckle filter
    if ard_mt['remove mt speckle'] is True:
        speckle_log = opj(
            out_dir, '{}_{}_{}_mt_speckle.err_log'.format(burst, product, pol))

        print(' INFO: Applying multi-temporal speckle filter')
        mt_speckle_filter('{}.dim'.format(temp_stack),
                          out_stack,
                          speckle_log,
                          speckle_dict=ard_mt_speck,
                          ncores=ncores)
        # remove tmp files
        h.delete_dimap(temp_stack)
    else:
        out_stack = temp_stack

    if product == 'coh':

        # get slave and master Date
        mstDates = [
            datetime.datetime.strptime(
                os.path.basename(x).split('_')[3].split('.')[0], '%d%b%Y')
            for x in glob.glob(opj('{}.data'.format(out_stack), '*img'))
        ]

        slvDates = [
            datetime.datetime.strptime(
                os.path.basename(x).split('_')[4].split('.')[0], '%d%b%Y')
            for x in glob.glob(opj('{}.data'.format(out_stack), '*img'))
        ]
        # sort them
        mstDates.sort()
        slvDates.sort()
        # write them back to string for following loop
        sortedMstDates = [
            datetime.datetime.strftime(ts, "%d%b%Y") for ts in mstDates
        ]
        sortedSlvDates = [
            datetime.datetime.strftime(ts, "%d%b%Y") for ts in slvDates
        ]

        i, outfiles = 1, []
        for mst, slv in zip(sortedMstDates, sortedSlvDates):

            inMst = datetime.datetime.strptime(mst, '%d%b%Y')
            inSlv = datetime.datetime.strptime(slv, '%d%b%Y')

            outMst = datetime.datetime.strftime(inMst, '%y%m%d')
            outSlv = datetime.datetime.strftime(inSlv, '%y%m%d')
            infile = glob.glob(
                opj('{}.data'.format(out_stack),
                    '*{}*{}_{}*img'.format(pol, mst, slv)))[0]

            outfile = opj(
                out_dir,
                '{:02d}.{}.{}.{}.{}.tif'.format(i, outMst, outSlv, product,
                                                pol))

            ras.mask_by_shape(infile,
                              outfile,
                              extent,
                              to_db=to_db,
                              datatype=ard_mt['dtype output'],
                              min_value=mm_dict[stretch]['min'],
                              max_value=mm_dict[stretch]['max'],
                              ndv=0.0,
                              description=True)
            # add ot a list for subsequent vrt creation
            outfiles.append(outfile)
            i += 1

    else:
        # get the dates of the files
        dates = [
            datetime.datetime.strptime(x.split('_')[-1][:-4], '%d%b%Y')
            for x in glob.glob(opj('{}.data'.format(out_stack), '*img'))
        ]
        # sort them
        dates.sort()
        # write them back to string for following loop
        sortedDates = [
            datetime.datetime.strftime(ts, "%d%b%Y") for ts in dates
        ]

        i, outfiles = 1, []
        for date in sortedDates:

            # restructure date to YYMMDD
            inDate = datetime.datetime.strptime(date, '%d%b%Y')
            outDate = datetime.datetime.strftime(inDate, '%y%m%d')

            infile = glob.glob(
                opj('{}.data'.format(out_stack),
                    '*{}*{}*img'.format(pol, date)))[0]

            # create outfile
            outfile = opj(
                out_dir, '{:02d}.{}.{}.{}.tif'.format(i, outDate, product,
                                                      pol))

            ras.mask_by_shape(infile,
                              outfile,
                              extent,
                              to_db=to_db,
                              datatype=ard_mt['dtype output'],
                              min_value=mm_dict[stretch]['min'],
                              max_value=mm_dict[stretch]['max'],
                              ndv=0.0)

            # add ot a list for subsequent vrt creation
            outfiles.append(outfile)
            i += 1

    for file in outfiles:
        return_code = h.check_out_tiff(file)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            os.remove(file)
            return return_code

    # write file, so we know this ts has been succesfully processed
    if return_code == 0:
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')

    # build vrt of timeseries
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    gdal.BuildVRT(opj(out_dir, 'Timeseries.{}.{}.vrt'.format(product, pol)),
                  outfiles,
                  options=vrt_options)

    # remove tmp files
    h.delete_dimap(out_stack)
Esempio n. 9
0
def combine_timeseries(processing_dir, config_dict, timescan=True):

    # namespaces for folder
    comb_dir = processing_dir.joinpath('combined')
    if comb_dir.exists():
        h.remove_folder_content(comb_dir)

    tseries_dir = comb_dir.joinpath('Timeseries')
    tseries_dir.mkdir(parents=True, exist_ok=True)

    PRODUCT_LIST = [
        'bs.HH', 'bs.VV', 'bs.HV', 'bs.VH', 'coh.VV', 'coh.VH', 'coh.HH',
        'coh.HV', 'pol.Entropy', 'pol.Anisotropy', 'pol.Alpha'
    ]

    out_files, iter_list = [], []
    for product_type in PRODUCT_LIST:

        filelist = list(
            processing_dir.glob(f'*/Timeseries/*{product_type}.tif'))

        if len(filelist) > 1:
            datelist = sorted([file.name.split('.')[1] for file in filelist])

            for i, date in enumerate(datelist):
                file = list(
                    processing_dir.glob(
                        f'*/Timeseries/*{date}*{product_type}.tif'))
                outfile = tseries_dir.joinpath(
                    f'{i+1:02d}.{date}.{product_type}.tif')

                shutil.copy(file[0], str(outfile))
                out_files.append(str(outfile))

            vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
            out_vrt = str(
                tseries_dir.joinpath(f'Timeseries.{product_type}.vrt'))
            gdal.BuildVRT(str(out_vrt), out_files, options=vrt_options)

            if timescan:
                from ost.generic import timescan as ts
                ard = config_dict['processing']['single_ARD']
                ard_mt = config_dict['processing']['time-series_ARD']
                ard_tscan = config_dict['processing']['time-scan_ARD']

                # get the db scaling right
                to_db = ard['to_db']
                if ard['to_db'] or ard_mt['to_db']:
                    to_db = True

                dtype_conversion = True if ard_mt[
                    'dtype_output'] != 'float32' else False

                tscan_dir = comb_dir.joinpath('Timescan')
                tscan_dir.mkdir(parents=True, exist_ok=True)

                # get timeseries vrt
                time_series = tseries_dir.joinpath(
                    f'Timeseries.{product_type}.vrt')

                if not time_series.exists():
                    continue

                # create a datelist for harmonics
                scene_list = [
                    str(file)
                    for file in list(tseries_dir.glob(f'*{product_type}.tif'))
                ]

                # create a datelist for harmonics calculation
                datelist = []
                for file in sorted(scene_list):
                    datelist.append(os.path.basename(file).split('.')[1])

                # define timescan prefix
                timescan_prefix = tscan_dir.joinpath(f'{product_type}')

                iter_list.append([
                    time_series, timescan_prefix, ard_tscan['metrics'],
                    dtype_conversion, to_db, ard_tscan['remove_outliers'],
                    datelist
                ])

    if timescan:
        # now we run with godale, which works also with 1 worker
        executor = Executor(executor=config_dict['executor_type'],
                            max_workers=config_dict['max_workers'])

        # run timescan creation
        out_dict = {'track': [], 'prefix': [], 'metrics': [], 'error': []}
        for task in executor.as_completed(func=ts.gd_mt_metrics,
                                          iterable=iter_list):
            burst, prefix, metrics, error = task.result()
            out_dict['track'].append(burst)
            out_dict['prefix'].append(prefix)
            out_dict['metrics'].append(metrics)
            out_dict['error'].append(error)

        create_tscan_vrt(tscan_dir, config_dict)
Esempio n. 10
0
    def grds_to_ards(self,
                     inventory_df,
                     timeseries=False,
                     timescan=False,
                     mosaic=False,
                     overwrite=False,
                     max_workers=1,
                     executor_type='billiard'):

        self.config_dict['max_workers'] = max_workers
        self.config_dict['executor_type'] = executor_type

        # in case we start from scratch, delete all data
        # within processing folder
        if overwrite:
            logger.info('Deleting processing folder to start from scratch')
            h.remove_folder_content(self.processing_dir)

        # --------------------------------------------
        # 2 Check if within SRTM coverage
        # set ellipsoid correction and force GTC production
        # when outside SRTM
        center_lat = loads(self.aoi).centroid.y
        if float(center_lat) > 59 or float(center_lat) < -59:
            logger.info(
                'Scene is outside SRTM coverage. Snap will therefore use '
                'the GETASSE30 DEM. Also consider to use a stereographic '
                'projection. and project to NSIDC Sea Ice Polar '
                'Stereographic North projection (EPSG 3413).')
            epsg = input(
                'Please type the EPSG you want to project the output data or '
                'just press enter for keeping Lat/Lon coordinate system '
                '(e.g. 3413 for NSIDC Sea Ice Polar Stereographic North '
                'projection, or 3976 for NSIDC Sea Ice Polar Stereographic '
                'South projection')
            if not epsg:
                epsg = 4326

            self.ard_parameters['single_ARD']['dem']['dem_name'] = 'GETASSE30'
            self.ard_parameters['single_ARD']['dem']['out_projection'] = int(
                epsg)

        # --------------------------------------------
        # 3 subset determination
        # we need a check function that checks
        self.config_dict.update(subset=vec.set_subset(self.aoi, inventory_df))

        # dump to json file
        with open(self.config_file, 'w') as outfile:
            json.dump(self.config_dict, outfile, indent=4)

        # --------------------------------------------
        # 4 Check ard parameters in case they have been updated,
        #   and write them to json file
        self.update_ard_parameters()

        # --------------------------------------------
        # 1 delete data in case of previous runs
        # delete data in temporary directory in case there is
        # something left from aborted previous runs
        h.remove_folder_content(self.config_dict['temp_dir'])

        # --------------------------------------------
        # 5 set resolution in degree
        # self.center_lat = loads(self.aoi).centroid.y
        # if float(self.center_lat) > 59 or float(self.center_lat) < -59:
        #   logger.info(
        #       'Scene is outside SRTM coverage. Will use 30m #
        #       'ASTER DEM instead.'
        #   )
        #   self.ard_parameters['dem'] = 'ASTER 1sec GDEM'

        # --------------------------------------------
        # 5 set resolution in degree
        # the grd to ard batch routine
        processing_df = grd_batch.grd_to_ard_batch(inventory_df,
                                                   self.config_file)

        # time-series part
        if timeseries or timescan:
            grd_batch.ards_to_timeseries(inventory_df, self.config_file)

        if timescan:
            grd_batch.timeseries_to_timescan(inventory_df, self.config_file)

        if mosaic and timeseries:
            grd_batch.mosaic_timeseries(inventory_df, self.config_file)

        # --------------------------------------------
        # 9 mosaic the timescans
        if mosaic and timescan:
            grd_batch.mosaic_timescan(self.config_file)

        return processing_df
Esempio n. 11
0
    def bursts_to_ards(self,
                       timeseries=False,
                       timescan=False,
                       mosaic=False,
                       overwrite=False):
        """Batch processing function for full burst pre-processing workflow

        This function allows for the generation of the

        :param timeseries: if True, Time-series will be generated for
        each burst id
        :type timeseries: bool, optional
        :param timescan: if True, Timescans will be generated for each burst id
        type: timescan: bool, optional
        :param mosaic: if True, Mosaics will be generated from the
                       Time-Series/Timescans of each burst id
        :type mosaic: bool, optional
        :param overwrite: (if True, the processing folder will be
        emptied
        :type overwrite: bool, optional
        :param max_workers: number of parallel burst
        :type max_workers: int, default=1
        processing jobs
        :return:
        """

        # --------------------------------------------
        # 2 Check if within SRTM coverage
        # set ellipsoid correction and force GTC production
        # when outside SRTM
        center_lat = loads(self.aoi).centroid.y
        if float(center_lat) > 59 or float(center_lat) < -59:
            logger.info('Scene is outside SRTM coverage. Snap will therefore '
                        'use the Earth\'s geoid model.')
            self.ard_parameters['single_ARD']['dem'][
                'dem_name'] = 'Aster 1sec GDEM'

        # --------------------------------------------
        # 3 subset determination
        # we need a check function that checks
        self.config_dict['subset'] = False
        # This does not work at the moment, and maybe does not even make sense,
        # since for the co-registration we would need a sufficient
        # part of the image
        # self.config_dict['subset'] = vec.set_subset(
        #     self.aoi, self.burst_inventory
        # )

        # --------------------------------------------
        # 4 Check ard parameters in case they have been updated,
        #   and write them to json file
        self.update_ard_parameters()

        # --------------------------------------------
        # 1 delete data from previous runnings
        # delete data in temporary directory in case there is
        # something left from previous runs
        h.remove_folder_content(self.config_dict['temp_dir'])

        # in case we strat from scratch, delete all data
        # within processing folder
        if overwrite:
            logger.info('Deleting processing folder to start from scratch')
            h.remove_folder_content(self.config_dict['processing_dir'])

        # --------------------------------------------
        # 5 set resolution to degree
        # self.ard_parameters['resolution'] = h.resolution_in_degree(
        #    self.center_lat, self.ard_parameters['resolution'])

        if self.config_dict['max_workers'] > 1:
            self.pre_download_srtm()

        # --------------------------------------------
        # 6 run the burst to ard batch routine (up to 3 times if needed)
        i = 1
        while i < 4:
            processed_bursts_df = burst_batch.bursts_to_ards(
                self.burst_inventory, self.config_file)

            if False in processed_bursts_df.error.isnull().tolist():
                i += 1
            else:
                i = 5

        # write processed df to file
        processing_dir = Path(self.config_dict['processing_dir'])
        processed_bursts_df.to_pickle(
            processing_dir.joinpath('processed_bursts.pickle'))

        # if not all have been processed, raise an error to avoid
        # false time-series processing
        if i == 4:
            raise RuntimeError(
                'Not all all bursts have been successfully processed')

        # --------------------------------------------
        # 6 run the timeseries creation
        if timeseries or timescan:
            tseries_df = burst_batch.ards_to_timeseries(
                self.burst_inventory, self.config_file)

        # --------------------------------------------
        # 7 run the timescan creation
        if timescan:
            burst_batch.timeseries_to_timescan(self.burst_inventory,
                                               self.config_file)

        # --------------------------------------------
        # 8 mosaic the time-series
        if mosaic and timeseries:
            burst_batch.mosaic_timeseries(self.burst_inventory,
                                          self.config_file)

        # --------------------------------------------
        # 9 mosaic the timescans
        if mosaic and timescan:
            burst_batch.mosaic_timescan(self.burst_inventory, self.config_file)
Esempio n. 12
0
def mosaic_timescan(burst_inventory, config_file):
    """

    :param burst_inventory:
    :param config_file:
    :return:
    """

    print(' -----------------------------------------------------------------')
    logger.info('Mosaicking time-scan layers.')
    print(' -----------------------------------------------------------------')

    # -------------------------------------
    # 1 load project config
    with open(config_file, 'r') as ard_file:
        config_dict = json.load(ard_file)
        processing_dir = Path(config_dict['processing_dir'])
        metrics = config_dict['processing']['time-scan_ARD']['metrics']

    if 'harmonics' in metrics:
        metrics.remove('harmonics')
        metrics.extend(['amplitude', 'phase', 'residuals'])

    if 'percentiles' in metrics:
        metrics.remove('percentiles')
        metrics.extend(['p95', 'p5'])

    # create output folder
    ts_dir = processing_dir.joinpath('Mosaic/Timescan')
    ts_dir.mkdir(parents=True, exist_ok=True)

    temp_mosaic = processing_dir.joinpath('Mosaic/temp')
    temp_mosaic.mkdir(parents=True, exist_ok=True)
    # -------------------------------------
    # 2 create iterable
    # loop through each product
    iter_list, vrt_iter_list = [], []
    for product, metric in itertools.product(PRODUCT_LIST, metrics):

        for track in burst_inventory.Track.unique():

            filelist = list(
                processing_dir.glob(
                    f'[A,D]{track}_IW*/Timescan/*{product}.{metric}.tif'))

            if not len(filelist) >= 1:
                continue

            temp_acq = temp_mosaic.joinpath(f'{track}.{product}.{metric}.tif')

            if temp_acq:
                iter_list.append(
                    [track, metric, product, temp_acq, config_file])

    # now we run with godale, which works also with 1 worker
    executor = Executor(executor=config_dict['executor_type'],
                        max_workers=config_dict['max_workers'])

    # run vrt creation
    for task in executor.as_completed(func=mosaic.gd_mosaic_slc_acquisition,
                                      iterable=iter_list):
        task.result()

    iter_list, vrt_iter_list = [], []
    for product, metric in itertools.product(PRODUCT_LIST, metrics):

        list_of_files = list(temp_mosaic.glob(f'*{product}.{metric}.tif'))

        if not list_of_files:
            continue

        # turn to OTB readable format
        list_of_files = ' '.join([str(file) for file in list_of_files])

        # create namespace for outfile
        outfile = ts_dir.joinpath(f'{product}.{metric}.tif')
        check_file = outfile.parent.joinpath(f'.{outfile.name[:-4]}.processed')

        if check_file.exists():
            logger.info(f'Mosaic layer {outfile.name} already processed.')
            continue

        logger.info(f'Mosaicking layer {outfile.name}.')

        iter_list.append([list_of_files, outfile, config_file])

    # now we run with godale, which works also with 1 worker
    executor = Executor(executor=config_dict['executor_type'],
                        max_workers=config_dict['max_workers'])

    # run mosaicking
    for task in executor.as_completed(func=mosaic.gd_mosaic,
                                      iterable=iter_list):
        task.result()

    ras.create_tscan_vrt(ts_dir, config_file)

    # remove temp folder
    h.remove_folder_content(temp_mosaic)
Esempio n. 13
0
def mosaic_timeseries(burst_inventory, config_file):
    print(' -----------------------------------------------------------------')
    logger.info('Mosaicking time-series layers.')
    print(' -----------------------------------------------------------------')

    # -------------------------------------
    # 1 load project config
    with open(config_file, 'r') as ard_file:
        config_dict = json.load(ard_file)
        processing_dir = Path(config_dict['processing_dir'])

    # create output folder
    ts_dir = processing_dir.joinpath('Mosaic/Timeseries')
    ts_dir.mkdir(parents=True, exist_ok=True)

    temp_mosaic = processing_dir.joinpath('Mosaic/temp')
    temp_mosaic.mkdir(parents=True, exist_ok=True)
    # -------------------------------------
    # 2 create iterable
    # loop through each product
    iter_list, vrt_iter_list = [], []
    for product in PRODUCT_LIST:

        for track in burst_inventory.Track.unique():

            dates = [
                date[2:] for date in sorted(burst_inventory.Date[
                    burst_inventory.Track == track].unique())
            ]

            for i, date in enumerate(dates):

                if 'coh' in product:
                    # we do the try, since for the last date
                    # there is no dates[i+1] for coherence
                    try:
                        temp_acq = temp_mosaic.joinpath(
                            f'{i}.{date}.{dates[i + 1]}.{track}.{product}.tif')
                    except IndexError:
                        temp_acq = None
                else:
                    temp_acq = temp_mosaic.joinpath(
                        f'{i}.{date}.{track}.{product}.tif')

                if temp_acq:
                    iter_list.append(
                        [track, date, product, temp_acq, config_file])

    # now we run with godale, which works also with 1 worker
    executor = Executor(executor=config_dict['executor_type'],
                        max_workers=config_dict['max_workers'])

    # run vrt creation
    for task in executor.as_completed(func=mosaic.gd_mosaic_slc_acquisition,
                                      iterable=iter_list):
        task.result()

    # mosaic the acquisitions
    iter_list, vrt_iter_list = [], []
    for product in PRODUCT_LIST:

        outfiles = []
        for i in range(len(dates)):

            list_of_files = list(temp_mosaic.glob(f'{i}.*{product}.tif'))

            if not list_of_files:
                continue

            datelist = []
            for file in list_of_files:
                if 'coh' in product:
                    datelist.append(
                        f"{file.name.split('.')[2]}_{file.name.split('.')[1]}")
                else:
                    datelist.append(file.name.split('.')[1])

            # get start and endate of mosaic
            start, end = sorted(datelist)[0], sorted(datelist)[-1]
            list_of_files = ' '.join([str(file) for file in list_of_files])

            # create namespace for output file
            if start == end:
                outfile = ts_dir.joinpath(f'{i + 1:02d}.{start}.{product}.tif')

                # with the above operation, the list automatically
                # turns into string, so we can call directly list_of_files
                shutil.move(list_of_files, outfile)
                outfiles.append(outfile)
                continue

            else:
                outfile = ts_dir.joinpath(
                    f'{i + 1:02d}.{start}-{end}.{product}.tif')

            # create namespace for check_file
            check_file = outfile.parent.joinpath(
                f'.{outfile.name[:-4]}.processed')

            if check_file.exists():
                logger.info(f'Mosaic layer {outfile} already processed.')
                continue

            # append to list of outfile for vrt creation
            outfiles.append(outfile)
            iter_list.append([list_of_files, outfile, config_file])

        vrt_iter_list.append([ts_dir, product, outfiles])

    # now we run with godale, which works also with 1 worker
    executor = Executor(executor=config_dict['executor_type'],
                        max_workers=config_dict['max_workers'])

    # run mosaicking
    for task in executor.as_completed(func=mosaic.gd_mosaic,
                                      iterable=iter_list):
        task.result()

    # run mosaicking vrts
    for task in executor.as_completed(func=mosaic.create_timeseries_mosaic_vrt,
                                      iterable=vrt_iter_list):
        task.result()

    # remove temp folder
    h.remove_folder_content(temp_mosaic)
Esempio n. 14
0
def burst_to_ard(master_file,
                 swath,
                 master_burst_nr,
                 master_burst_id,
                 proc_file,
                 out_dir,
                 temp_dir,
                 slave_file=None,
                 slave_burst_nr=None,
                 slave_burst_id=None,
                 coherence=False,
                 remove_slave_import=False,
                 ncores=os.cpu_count()):
    '''The main routine to turn a burst into an ARD product

    Args:
        master_file (str): path to full master SLC scene
        swath (str): subswath
        master_burst_nr (): index number of the burst
        master_burst_id ():
        out_dir (str):
        temp_dir (str):
        slave_file (str):
        slave_burst_nr (str):
        slave_burst_id (str):
        proc_file (str):
        remove_slave_import (bool):
        ncores (int): number of cpus used - useful for parallel processing
    '''
    if type(remove_slave_import) == str:
        if remove_slave_import == 'True':
            remove_slave_import = True
        elif remove_slave_import == 'False':
            remove_slave_import = False
    if type(coherence) == str:
        if coherence == 'True':
            coherence = True
        elif coherence == 'False':
            coherence = False
    # load ards
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']
     
    # ---------------------------------------------------------------------
    # 1 Import
    # import master
    master_import = opj(temp_dir, '{}_import'.format(master_burst_id))

    if not os.path.exists('{}.dim'.format(master_import)):
        import_log = opj(out_dir, '{}_import.err_log'.format(master_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = slc._import(master_file, master_import, import_log,
                              swath, master_burst_nr, polars, ncores
        )
        if return_code != 0:
            h.delete_dimap(master_import)
            return return_code

    imported = '{}.dim'.format(master_import)
    # ---------------------------------------------------------------------
    # 2 H-A-Alpha
    if ard['H-A-Alpha']:
        # create HAalpha file
        out_haa = opj(temp_dir, '{}_h'.format(master_burst_id))
        haa_log = opj(out_dir, '{}_haa.err_log'.format(master_burst_id))
        return_code = slc._ha_alpha(imported,
                                out_haa, haa_log, 
                                ard['remove pol speckle'], 
                                ard['pol speckle filter'],
                                ncores
        )

        # delete files in case of error
        if return_code != 0:
            h.delete_dimap(out_haa)
            h.delete_dimap(master_import)
            return return_code

        # geo code HAalpha
        out_htc = opj(temp_dir, '{}_pol'.format(master_burst_id))
        haa_tc_log = opj(out_dir, '{}_haa_tc.err_log'.format(
            master_burst_id))
        return_code = common._terrain_correction(
            '{}.dim'.format(out_haa), out_htc, haa_tc_log, 
            ard['resolution'], ard['dem'], ncores
        )

        # remove HAalpha tmp files
        h.delete_dimap(out_haa)
        
        # last check on the output files
        return_code = h.check_out_dimap(out_htc)
        if return_code != 0:
            h.delete_dimap(out_htc)
            h.delete_dimap(master_import)
            return return_code

        # move to final destination
        h.move_dimap(out_htc, opj(out_dir, '{}_pol'.format(master_burst_id)))

    # ---------------------------------------------------------------------
    # 3 Calibration
    out_cal = opj(temp_dir, '{}_cal'.format(master_burst_id))
    cal_log = opj(out_dir, '{}_cal.err_log'.format(master_burst_id))
    return_code = slc._calibration(
        imported, out_cal, cal_log, ard['product type'], ncores)

    # delete output if command failed for some reason and return
    if return_code != 0:
        h.delete_dimap(out_cal)
        h.delete_dimap(master_import)
        return return_code

    if not coherence:
        #  remove imports
        h.delete_dimap(master_import)

    # ---------------------------------------------------------------------
    # 4 Speckle filtering
    if ard['remove speckle']:
        speckle_import = opj(
            temp_dir, '{}_speckle_import'.format(master_burst_id)
        )
        speckle_log = opj(
            out_dir, '{}_speckle.err_log'.format(master_burst_id)
        )

        return_code = common._speckle_filter(
            '{}.dim'.format(out_cal), speckle_import, speckle_log,
            ard['speckle filter'], ncores
        )

        # remove input 
        h.delete_dimap(out_cal)

        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(speckle_import)
            h.delete_dimap(master_import)
            return return_code

        # reset master_import for follwoing routine
        out_cal = speckle_import

    # ---------------------------------------------------------------------
    # 5 Terrain Flattening
    if ard['product type'] == 'RTC-gamma0':
        # define outfile
        out_rtc = opj(temp_dir, '{}_rtc'.format(master_burst_id))
        rtc_log = opj(out_dir, '{}_rtc.err_log'.format(
            master_burst_id))
        # do the TF
        return_code = common._terrain_flattening(
            '{}.dim'.format(out_cal), out_rtc, rtc_log, ard['dem'], ncores
        )

        # remove tmp files
        h.delete_dimap(out_cal)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_rtc)
            h.delete_dimap(master_import)
            return return_code

        # set out_rtc to out_cal for further processing
        out_cal = out_rtc

    # ---------------------------------------------------------------------
    # 7 to dB scale
    if ard['to db']:
        out_db = opj(temp_dir, '{}_cal_db'.format(master_burst_id))
        db_log = opj(out_dir, '{}_cal_db.err_log'.format(master_burst_id))
        return_code = common._linear_to_db('{}.dim'.format(out_cal), out_db, db_log, ncores)

        # remove tmp files
        h.delete_dimap(out_cal)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_db)
            h.delete_dimap(master_import)
            return return_code

        # set out_cal to out_db for further processing
        out_cal = out_db
 
    # ---------------------------------------------------------------------
    # 8 Geocode backscatter
    if ard['product type'] != "Coherence_only":
        out_tc = opj(temp_dir, '{}_bs'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_bs_tc.err_log'.format(master_burst_id))
        return_code = common._terrain_correction(
            '{}.dim'.format(out_cal), out_tc, tc_log,
            ard['resolution'], ard['dem'], ncores)

        # last check on backscatter data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.delete_dimap(out_tc)
            return return_code

        # we move backscatter to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_bs'.format(master_burst_id)))

    # ---------------------------------------------------------------------
    # 9 Layover/Shadow mask
    if ard['create ls mask']:
        
        out_ls = opj(temp_dir, '{}_LS'.format(master_burst_id))
        ls_log = opj(out_dir, '{}_LS.err_log'.format(master_burst_id))
        return_code = common._ls_mask('{}.dim'.format(out_cal), out_ls, ls_log,
                                      ard['resolution'], ard['dem'], ncores)

        if return_code != 0:
            h.delete_dimap(out_ls)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(out_ls, test_stats=False)
        if return_code != 0:
            h.delete_dimap(out_ls)
            return return_code

        # move ls data to final destination
        h.move_dimap(out_ls, opj(out_dir, '{}_LS'.format(master_burst_id)))

    # remove calibrated files
    if ard['product type'] != "Coherence_only":
        h.delete_dimap(out_cal)

    if coherence:

        # import slave
        slave_import = opj(temp_dir, '{}_import'.format(slave_burst_id))
        import_log = opj(out_dir, '{}_import.err_log'.format(slave_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = slc._import(
            slave_file, slave_import, import_log, swath, slave_burst_nr,
            polars, ncores
        )

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # co-registration
        #filelist = ['{}.dim'.format(master_import),
        #            '{}.dim'.format(slave_import)]
        #filelist = '\'{}\''.format(','.join(filelist))
        out_coreg = opj(temp_dir, '{}_coreg'.format(master_burst_id))
        coreg_log = opj(out_dir, '{}_coreg.err_log'.format(master_burst_id))
        # return_code = _coreg(filelist, out_coreg, coreg_log, dem)
        return_code = slc._coreg2('{}.dim'.format(master_import),
                              '{}.dim'.format(slave_import),
                               out_coreg,
                               coreg_log, ard['dem'], ncores)

        # remove imports
        h.delete_dimap(master_import)
        
        if remove_slave_import is True:
            h.delete_dimap(slave_import)
        
        # delete output if command failed for some reason and return   
        if return_code != 0:
            h.delete_dimap(out_coreg)
            h.delete_dimap(slave_import)
            return return_code

        # calculate coherence and deburst
        out_coh = opj(temp_dir, '{}_c'.format(master_burst_id))
        coh_log = opj(out_dir, '{}_coh.err_log'.format(master_burst_id))
        coh_polars = ard['coherence bands'].replace(' ', '')
        return_code = slc._coherence('{}.dim'.format(out_coreg),
                                 out_coh, coh_log, coh_polars, ncores)

        # remove coreg tmp files
        h.delete_dimap(out_coreg)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_coh)
            h.delete_dimap(slave_import)
            return return_code

        # geocode
        out_tc = opj(temp_dir, '{}_coh'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_coh_tc.err_log'.format(master_burst_id))
        return_code = common._terrain_correction(
            '{}.dim'.format(out_coh), out_tc, tc_log, 
            ard['resolution'], ard['dem'], ncores)
        
        # remove tmp files
        h.delete_dimap(out_coh)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_tc)
            h.delete_dimap(slave_import)
            return return_code
        
        # remove tmp files
        h.delete_dimap(out_coh)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_tc)
            h.delete_dimap(slave_import)
            return return_code
        
        # last check on coherence data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.delete_dimap(out_tc)
            return return_code

        # move to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_coh'.format(master_burst_id)))

    # write out check file for tracking that it is processed
    with open(opj(out_dir, '.processed'), 'w') as file:
        file.write('passed all tests \n')
    
    return return_code