Beispiel #1
0
def burst_to_ard(master_file,
                 swath,
                 master_burst_nr,
                 master_burst_id,
                 out_dir,
                 temp_dir,
                 slave_file=None,
                 slave_burst_nr=None,
                 slave_burst_id=None,
                 coherence=False,
                 polarimetry=False,
                 pol_speckle_filter=False,
                 resolution=20,
                 product_type='GTCgamma',
                 speckle_filter=False,
                 to_db=False,
                 ls_mask_create=False,
                 dem='SRTM 1sec HGT',
                 remove_slave_import=False):
    '''The main routine to turn a burst into an ARD product

    Args:
        master_file (str): path to full master SLC scene
        swath (str): subswath
        master_burst_nr (): index number of the burst
        master_burst_id ():
        out_dir (str):
        temp_dir (str):
        slave_file (str):
        slave_burst_nr (str):
        slave_burst_id (str):
        coherence (bool):
        polarimetry (bool):
        pol_speckle_filter (bool):
        resolution (int):
        product_type (str):
        speckle_filter (bool):
        to_db (bool):
        ls_mask (bool):
        dem (str):
        remove_slave_import (bool):

    '''

    # import master
    master_import = opj(temp_dir, '{}_import'.format(master_burst_id))

    if not os.path.exists('{}.dim'.format(master_import)):
        import_log = opj(out_dir, '{}_import.err_log'.format(master_burst_id))
        return_code = _import(master_file, master_import, import_log, swath,
                              master_burst_nr)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

    if polarimetry:
        # create HAalpha file
        out_haa = opj(temp_dir, '{}_h'.format(master_burst_id))
        haa_log = opj(out_dir, '{}_haa.err_log'.format(master_burst_id))
        return_code = _ha_alpha('{}.dim'.format(master_import), out_haa,
                                haa_log, pol_speckle_filter)

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # geo code HAalpha
        out_htc = opj(temp_dir, '{}_ha_alpha'.format(master_burst_id))
        haa_tc_log = opj(out_dir, '{}_haa_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_haa), out_htc,
                                          haa_tc_log, resolution, dem)

        # last check on the output files
        return_code = h.check_out_dimap(out_htc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_htc,
                     opj(out_dir, '{}_ha_alpha'.format(master_burst_id)))

        # remove HAalpha tmp files
        h.delete_dimap(out_haa)

    # calibrate
    out_cal = opj(temp_dir, '{}_cal'.format(master_burst_id))
    cal_log = opj(out_dir, '{}_cal.err_log'.format(master_burst_id))
    return_code = _calibration('{}.dim'.format(master_import), out_cal,
                               cal_log, product_type)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    if not coherence:
        #  remove imports
        h.delete_dimap(master_import)

    # speckle filtering
    if speckle_filter:
        speckle_import = opj(temp_dir,
                             '{}_speckle_import'.format(master_burst_id))
        speckle_log = opj(out_dir,
                          '{}_speckle.err_log'.format(master_burst_id))
        return_code = _speckle_filter('{}.dim'.format(out_cal), speckle_import,
                                      speckle_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove temp file
        h.delete_dimap(out_cal)

        # reset master_import for follwoing routine
        out_cal = speckle_import

    # do terrain flattening in case it is selected
    if product_type == 'RTC':
        # define outfile
        out_rtc = opj(temp_dir, '{}_rtc'.format(master_burst_id))
        rtc_log = opj(out_dir, '{}_rtc.err_log'.format(master_burst_id))
        # do the TF
        return_code = _terrain_flattening('{}.dim'.format(out_cal), out_rtc,
                                          rtc_log, dem)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_rtc to out_cal for further processing
        out_cal = out_rtc

    if to_db:
        out_db = opj(temp_dir, '{}_cal_db'.format(master_burst_id))
        db_log = opj(out_dir, '{}_cal_db.err_log'.format(master_burst_id))
        return_code = _linear_to_db('{}.dim'.format(out_cal), out_db, db_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_cal to out_db for further processing
        out_cal = out_db

    # geo code backscatter products
    out_tc = opj(temp_dir, '{}_BS'.format(master_burst_id))
    tc_log = opj(out_dir, '{}_BS_tc.err_log'.format(master_burst_id))
    return_code = _terrain_correction('{}.dim'.format(out_cal), out_tc, tc_log,
                                      resolution, dem)

    # last check on backscatter data
    return_code = h.check_out_dimap(out_tc)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # we move backscatter to final destination
    h.move_dimap(out_tc, opj(out_dir, '{}_BS'.format(master_burst_id)))

    if ls_mask_create:
        # create LS map
        out_ls = opj(temp_dir, '{}_LS'.format(master_burst_id))
        ls_log = opj(out_dir, '{}_LS.err_log'.format(master_burst_id))
        return_code = _ls_mask('{}.dim'.format(out_cal), out_ls, ls_log,
                               resolution, dem)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(out_ls, test_stats=False)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move ls data to final destination
        h.move_dimap(out_ls, opj(out_dir, '{}_LS'.format(master_burst_id)))

    # remove calibrated files
    h.delete_dimap(out_cal)

    if coherence:

        # import slave
        slave_import = opj(temp_dir, '{}_import'.format(slave_burst_id))
        import_log = opj(out_dir, '{}_import.err_log'.format(slave_burst_id))
        return_code = _import(slave_file, slave_import, import_log, swath,
                              slave_burst_nr)

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # co-registration
        # filelist = ['{}.dim'.format(master_import),
        #            '{}.dim'.format(slave_import)]
        # filelist = '\'{}\''.format(','.join(filelist))
        out_coreg = opj(temp_dir, '{}_coreg'.format(master_burst_id))
        coreg_log = opj(out_dir, '{}_coreg.err_log'.format(master_burst_id))
        # return_code = _coreg2(filelist, out_coreg, coreg_log, dem)
        return_code = _coreg2('{}.dim'.format(master_import),
                              '{}.dim'.format(slave_import), out_coreg,
                              coreg_log, dem)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        #  remove imports
        h.delete_dimap(master_import)

        if remove_slave_import is True:
            h.delete_dimap(slave_import)

        # calculate coherence and deburst
        out_coh = opj(temp_dir, '{}_c'.format(master_burst_id))
        coh_log = opj(out_dir, '{}_coh.err_log'.format(master_burst_id))
        return_code = _coherence('{}.dim'.format(out_coreg), out_coh, coh_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove coreg tmp files
        h.delete_dimap(out_coreg)

        # geocode
        out_tc = opj(temp_dir, '{}_coh'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_coh_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_coh), out_tc,
                                          tc_log, resolution, dem)
        # last check on coherence data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_coh'.format(master_burst_id)))

        # remove tmp files
        h.delete_dimap(out_coh)

    # write file, so we know this burst has been succesfully processed
    if return_code == 0:
        check_file = opj(out_dir, '.processed')
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')
    else:
        h.remove_folder_content(temp_dir)
        h.remove_folder_content(out_dir)

    return return_code
Beispiel #2
0
def burst_to_ard(master_file,
                 swath,
                 master_burst_nr,
                 master_burst_id,
                 proc_file,
                 out_dir,
                 temp_dir,
                 slave_file=None,
                 slave_burst_nr=None,
                 slave_burst_id=None,
                 coherence=False,
                 remove_slave_import=False):
    '''The main routine to turn a burst into an ARD product

    Args:
        master_file (str): path to full master SLC scene
        swath (str): subswath
        master_burst_nr (): index number of the burst
        master_burst_id ():
        out_dir (str):
        temp_dir (str):
        slave_file (str):
        slave_burst_nr (str):
        slave_burst_id (str):
        proc_file (str):
        remove_slave_import (bool):

    '''

    # load ards
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']

    # import master
    master_import = opj(temp_dir, '{}_import'.format(master_burst_id))

    if not os.path.exists('{}.dim'.format(master_import)):
        import_log = opj(out_dir, '{}_import.err_log'.format(master_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = _import(master_file, master_import, import_log, swath,
                              master_burst_nr, polars)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

    if ard['H-A-Alpha']:
        # create HAalpha file
        out_haa = opj(temp_dir, '{}_h'.format(master_burst_id))
        haa_log = opj(out_dir, '{}_haa.err_log'.format(master_burst_id))
        return_code = _ha_alpha('{}.dim'.format(master_import), out_haa,
                                haa_log, ard['remove pol speckle'])

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # geo code HAalpha
        out_htc = opj(temp_dir, '{}_pol'.format(master_burst_id))
        haa_tc_log = opj(out_dir, '{}_haa_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_haa), out_htc,
                                          haa_tc_log, ard['resolution'],
                                          ard['dem'])

        # last check on the output files
        return_code = h.check_out_dimap(out_htc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_htc, opj(out_dir, '{}_pol'.format(master_burst_id)))

        # remove HAalpha tmp files
        h.delete_dimap(out_haa)

    # calibrate
    out_cal = opj(temp_dir, '{}_cal'.format(master_burst_id))
    cal_log = opj(out_dir, '{}_cal.err_log'.format(master_burst_id))
    return_code = _calibration('{}.dim'.format(master_import), out_cal,
                               cal_log, ard['product type'])
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    if not coherence:
        #  remove imports
        h.delete_dimap(master_import)

    # speckle filtering
    if ard['remove speckle']:
        speckle_import = opj(temp_dir,
                             '{}_speckle_import'.format(master_burst_id))
        speckle_log = opj(out_dir,
                          '{}_speckle.err_log'.format(master_burst_id))
        return_code = _speckle_filter('{}.dim'.format(out_cal), speckle_import,
                                      speckle_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove temp file
        h.delete_dimap(out_cal)

        # reset master_import for follwoing routine
        out_cal = speckle_import

    # do terrain flattening in case it is selected
    if ard['product type'] == 'RTC':
        # define outfile
        out_rtc = opj(temp_dir, '{}_rtc'.format(master_burst_id))
        rtc_log = opj(out_dir, '{}_rtc.err_log'.format(master_burst_id))
        # do the TF
        return_code = _terrain_flattening('{}.dim'.format(out_cal), out_rtc,
                                          rtc_log, ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_rtc to out_cal for further processing
        out_cal = out_rtc

    if ard['to db']:
        out_db = opj(temp_dir, '{}_cal_db'.format(master_burst_id))
        db_log = opj(out_dir, '{}_cal_db.err_log'.format(master_burst_id))
        return_code = _linear_to_db('{}.dim'.format(out_cal), out_db, db_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_cal to out_db for further processing
        out_cal = out_db

    # geo code backscatter products
    out_tc = opj(temp_dir, '{}_bs'.format(master_burst_id))
    tc_log = opj(out_dir, '{}_bs_tc.err_log'.format(master_burst_id))
    return_code = _terrain_correction('{}.dim'.format(out_cal), out_tc, tc_log,
                                      ard['resolution'], ard['dem'])

    # last check on backscatter data
    return_code = h.check_out_dimap(out_tc)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # we move backscatter to final destination
    h.move_dimap(out_tc, opj(out_dir, '{}_bs'.format(master_burst_id)))

    if ard['create ls mask']:
        # create LS map
        out_ls = opj(temp_dir, '{}_LS'.format(master_burst_id))
        ls_log = opj(out_dir, '{}_LS.err_log'.format(master_burst_id))
        return_code = _ls_mask('{}.dim'.format(out_cal), out_ls, ls_log,
                               ard['resolution'], ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(out_ls, test_stats=False)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move ls data to final destination
        h.move_dimap(out_ls, opj(out_dir, '{}_LS'.format(master_burst_id)))

    # remove calibrated files
    h.delete_dimap(out_cal)

    if coherence:

        # import slave
        slave_import = opj(temp_dir, '{}_import'.format(slave_burst_id))
        import_log = opj(out_dir, '{}_import.err_log'.format(slave_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = _import(slave_file, slave_import, import_log, swath,
                              slave_burst_nr, polars)

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # co-registration
        #filelist = ['{}.dim'.format(master_import),
        #            '{}.dim'.format(slave_import)]
        #filelist = '\'{}\''.format(','.join(filelist))
        out_coreg = opj(temp_dir, '{}_coreg'.format(master_burst_id))
        coreg_log = opj(out_dir, '{}_coreg.err_log'.format(master_burst_id))
        # return_code = _coreg(filelist, out_coreg, coreg_log, dem)
        return_code = _coreg2('{}.dim'.format(master_import),
                              '{}.dim'.format(slave_import), out_coreg,
                              coreg_log, ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        #  remove imports
        h.delete_dimap(master_import)

        if remove_slave_import is True:
            h.delete_dimap(slave_import)

        # calculate coherence and deburst
        out_coh = opj(temp_dir, '{}_c'.format(master_burst_id))
        coh_log = opj(out_dir, '{}_coh.err_log'.format(master_burst_id))
        coh_polars = ard['coherence bands'].replace(' ', '')
        return_code = _coherence('{}.dim'.format(out_coreg), out_coh, coh_log,
                                 coh_polars)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove coreg tmp files
        h.delete_dimap(out_coreg)

        # geocode
        out_tc = opj(temp_dir, '{}_coh'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_coh_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_coh), out_tc,
                                          tc_log, ard['resolution'],
                                          ard['dem'])

        # last check on coherence data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_coh'.format(master_burst_id)))

        # remove tmp files
        h.delete_dimap(out_coh)

    # write file, so we know this burst has been succesfully processed
    if return_code == 0:
        check_file = opj(out_dir, '.processed')
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')
    else:
        h.remove_folder_content(temp_dir)
        h.remove_folder_content(out_dir)

    return return_code
Beispiel #3
0
def grd_to_ard(filelist, config_file):
    """Main function for the grd to ard generation

    This function represents the full workflow for the generation of an
    Analysis-Ready-Data product. The standard parameters reflect the CEOS
    ARD defintion for Sentinel-1 backcsatter products.

    By changing the parameters, taking care of all parameters
    that can be given. The function can handle multiple inputs of the same
    acquisition, given that there are consecutive data takes.

    :param filelist: must be a list with one or more
                     absolute paths to GRD scene(s)
    :param config_file:
    :return:
    """

    from ost.s1.s1scene import Sentinel1Scene

    # ----------------------------------------------------
    # 1 load relevant config parameters
    with open(config_file, 'r') as file:
        config_dict = json.load(file)
        ard = config_dict['processing']['single_ARD']
        processing_dir = Path(config_dict['processing_dir'])
        subset = config_dict['subset']

    # ----------------------------------------------------
    # 2 define final destination dir/file and ls mask

    # get acq data and track from first scene in list
    first = Sentinel1Scene(Path(filelist[0]).stem)
    acquisition_date = first.start_date
    track = first.rel_orbit

    logger.info(
        f'Processing acquisition from {acquisition_date} over track {track}.'
    )

    # construct namespace for out directory etc.
    out_dir = processing_dir.joinpath(f'{track}/{acquisition_date}')
    out_dir.mkdir(parents=True, exist_ok=True)
    file_id = f'{acquisition_date}_{track}'
    out_final = out_dir.joinpath(f'{file_id}_bs')
    out_ls_mask = out_dir.joinpath(f'{file_id}_LS')

    suf = '.tif' if ard['to_tif'] else '.dim'

    # ----------------------------------------------------
    # 3 check if already processed
    if out_dir.joinpath('.processed').exists() and \
            out_final.with_suffix(suf).exists():
        logger.info(
            f'Acquisition from {acquisition_date} of track {track} '
            f'already processed'
        )

        if out_ls_mask.with_suffix(suf).exists():
            out_ls = out_ls_mask.with_suffix(suf)
        else:
            out_ls = None

        return filelist, out_final.with_suffix(suf), out_ls, None

    # ----------------------------------------------------
    # 4 run the processing routine

    # this might happen in the create_ard from s1scene class
    if not config_dict['temp_dir']:
        temp_dir = processing_dir.joinpath('temp')
        temp_dir.mkdir(parents=True, exist_ok=True)
    else:
        temp_dir = config_dict['temp_dir']

    with TemporaryDirectory(prefix=f"{temp_dir}/") as temp:

        # convert temp directory to Path object
        temp = Path(temp)

        # ---------------------------------------------------------------------
        # 4.1 Import
        # slice assembly if more than one scene
        if len(filelist) > 1:

            # if more than one frame import all files
            for file in filelist:

                # unzip for faster import?
                unpack = None
                if Path(file).suffix == '.zip':
                    with zipfile.ZipFile(file, 'r') as zip_ref:
                        zip_ref.extractall(temp)

                    file = temp.joinpath(f'{file.stem}.SAFE')
                    unpack = True

                # create namespace for temporary imported product
                grd_import = temp.joinpath(f'{file.stem}_imported')

                # create namespace for import log
                logfile = out_dir.joinpath(f'{file.stem}.Import.errLog')

                # set subset tempraoally to false for import routine
                config_dict['subset'] = False
                # frame import
                try:
                    grd.grd_frame_import(
                        file, grd_import, logfile, config_dict
                    )
                except (GPTRuntimeError, NotValidFileError) as error:
                    logger.info(error)
                    return filelist, None, None, error

                config_dict['subset'] = subset

                if unpack:
                    h.remove_folder_content(file)
                    file.rmdir()

            # create list of scenes for full acquisition in
            # preparation of slice assembly
            scenelist = ' '.join(
                [str(file) for file in list(temp.glob('*imported.dim'))]
            )

            # create namespace for temporary slice assembled import product
            grd_import = temp.joinpath(f'{file_id}_imported')

            # create namespace for slice assembled log
            logfile = out_dir.joinpath(f'{file_id}._slice_assembly.errLog')

            # run slice assembly
            try:
                grd.slice_assembly(scenelist, grd_import, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete imported frames
            for file in filelist:
                h.delete_dimap(temp.joinpath(f'{file.stem}_imported'))

            # subset mode after slice assembly
            if subset:

                # create namespace for temporary subset product
                grd_subset = temp.joinpath(f'{file_id}_imported_subset')

                # create namespace for subset log
                logfile = out_dir.joinpath(f'{file_id}._slice_assembly.errLog')

                # run subset routine
                try:
                    grd.grd_subset_georegion(
                        grd_import.with_suffix('.dim'), grd_subset, logfile,
                        config_dict
                    )
                except (GPTRuntimeError, NotValidFileError) as error:
                    logger.info(error)
                    return filelist, None, None, error

                # delete slice assembly input to subset
                h.delete_dimap(grd_import)

                # set subset to import for subsequent functions
                grd_import = grd_subset

        # single scene case
        else:

            file = filelist[0]

            # unzip for faster import
            unpack = None
            if Path(file).suffix == '.zip':
                with zipfile.ZipFile(file, 'r') as zip_ref:
                    zip_ref.extractall(temp)

                file = temp.joinpath(f'{file.stem}.SAFE')
                unpack = True

            # create namespace for temporary imported product
            grd_import = temp.joinpath(f'{file_id}_imported')

            # create namespace for import log
            logfile = out_dir.joinpath(f'{file_id}.Import.errLog')

            # run frame import
            try:
                grd.grd_frame_import(file, grd_import, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            if unpack:
                h.remove_folder_content(file)
                file.rmdir()

        # set input for next step
        infile = grd_import.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.2 GRD Border Noise
        if ard['remove_border_noise'] and not subset:

            # loop through possible polarisations
            for polarisation in ['VV', 'VH', 'HH', 'HV']:

                # get input file
                file = list(temp.glob(
                    f'{file_id}_imported*data/Intensity_{polarisation}.img'
                ))

                # remove border noise
                if len(file) == 1:
                    # run grd Border Remove
                    grd.grd_remove_border(file[0])

        # ---------------------------------------------------------------------
        # 4.3 Calibration

        # create namespace for temporary calibrated product
        calibrated = temp.joinpath(f'{file_id}_cal')

        # create namespace for calibration log
        logfile = out_dir.joinpath(f'{file_id}.calibration.errLog')

        # run calibration
        try:
            grd.calibration(infile, calibrated, logfile, config_dict)
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return filelist, None, None, error

        # delete input
        h.delete_dimap(infile.with_suffix(''))

        # input for next step
        infile = calibrated.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.4 Multi-looking
        if int(ard['resolution']) >= 20:

            # create namespace for temporary multi-looked product
            multi_looked = temp.joinpath(f'{file_id}_ml')

            # create namespace for multi-loook log
            logfile = out_dir.joinpath(f'{file_id}.multilook.errLog')

            # run multi-looking
            try:
                grd.multi_look(infile, multi_looked, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete input
            h.delete_dimap(infile.with_suffix(''))

            # define input for next step
            infile = multi_looked.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.5 Layover shadow mask
        out_ls = None  # set to none for final return statement
        if ard['create_ls_mask'] is True:

            # create namespace for temporary ls mask product
            ls_mask = temp.joinpath(f'{file_id}_ls_mask')

            # create namespace for ls mask log
            logfile = out_dir.joinpath(f'{file_id}.ls_mask.errLog')

            # run ls mask routine
            try:
                common.ls_mask(infile, ls_mask, logfile, config_dict)
                out_ls = out_ls_mask.with_suffix('.dim')
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # polygonize
            ls_raster = list(ls_mask.with_suffix('.data').glob('*img'))[0]
            ras.polygonize_ls(ls_raster, ls_mask.with_suffix('.json'))

        # ---------------------------------------------------------------------
        # 4.6 Speckle filtering
        if ard['remove_speckle']:

            # create namespace for temporary speckle filtered product
            filtered = temp.joinpath(f'{file_id}_spk')

            # create namespace for speckle filter log
            logfile = out_dir.joinpath(f'{file_id}.Speckle.errLog')

            # run speckle filter
            try:
                common.speckle_filter(infile, filtered, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete input
            h.delete_dimap(infile.with_suffix(''))

            # define input for next step
            infile = filtered.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.7 Terrain flattening
        if ard['product_type'] == 'RTC-gamma0':

            # create namespace for temporary terrain flattened product
            flattened = temp.joinpath(f'{file_id}_flat')

            # create namespace for terrain flattening log
            logfile = out_dir.joinpath(f'{file_id}.tf.errLog')

            # run terrain flattening
            try:
                common.terrain_flattening(
                    infile, flattened, logfile, config_dict
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete input file
            h.delete_dimap(infile.with_suffix(''))

            # define input for next step
            infile = flattened.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.8 Linear to db
        if ard['to_db']:

            # create namespace for temporary db scaled product
            db_scaled = temp.joinpath(f'{file_id}_db')

            # create namespace for db scaled log
            logfile = out_dir.joinpath(f'{file_id}.db.errLog')

            # run db scaling routine
            try:
                common.linear_to_db(infile, db_scaled, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete input file
            h.delete_dimap(infile.with_suffix(''))

            # set input for next step
            infile = db_scaled.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.9 Geocoding

        # create namespace for temporary geocoded product
        geocoded = temp.joinpath(f'{file_id}_bs')

        # create namespace for geocoding log
        logfile = out_dir.joinpath(f'{file_id}_bs.errLog')

        # run geocoding
        try:
            common.terrain_correction(infile, geocoded, logfile, config_dict)
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return filelist, None, None, error

        # delete input file
        h.delete_dimap(infile.with_suffix(''))

        # define final destination
        out_final = out_dir.joinpath(f'{file_id}_bs')

        # ---------------------------------------------------------------------
        # 4.11 Create an outline
        ras.image_bounds(geocoded.with_suffix('.data'))

        # ---------------------------------------------------------------------
        # 4.11 Copy LS Mask vector to data dir
        if ard['create_ls_mask'] is True:
            ls_mask.with_suffix('.json').rename(
                geocoded.with_suffix('.data')
                .joinpath(ls_mask.name).with_suffix('.json')
            )

        # ---------------------------------------------------------------------
        # 4.12 Move to output directory
        h.move_dimap(geocoded, out_final, ard['to_tif'])

    # ---------------------------------------------------------------------
    # 5 write processed file to keep track of files already processed
    with open(out_dir.joinpath('.processed'), 'w') as file:
        file.write('passed all tests \n')

    return filelist, out_final.with_suffix('.dim'), out_ls, None
Beispiel #4
0
def create_backscatter_layers(
        import_file, out_dir, burst_prefix, config_dict
):
    """Pipeline for backscatter processing

    :param import_file:
    :param out_dir:
    :param burst_prefix:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']

    # temp dir for intermediate files
    with TemporaryDirectory(prefix=f"{config_dict['temp_dir']}/") as temp:

        temp = Path(temp)
        # ---------------------------------------------------------------------
        # 1 Calibration

        # create namespace for temporary calibrated product
        out_cal = temp.joinpath(f'{burst_prefix}_cal')

        # create namespace for calibrate log
        cal_log = out_dir.joinpath(f'{burst_prefix}_cal.err_log')

        # run calibration on imported scene
        try:
            slc.calibration(
                import_file, out_cal, cal_log, config_dict
            )
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return None, None, error

        # ---------------------------------------------------------------------
        # 2 Speckle filtering
        if ard['remove_speckle']:

            # create namespace for temporary speckle filtered product
            speckle_import = temp.joinpath(f'{burst_prefix}_speckle_import')

            # create namespace for speckle filter log
            speckle_log = out_dir.joinpath(f'{burst_prefix}_speckle.err_log')

            # run speckle filter on calibrated input
            try:
                common.speckle_filter(
                    out_cal.with_suffix('.dim'), speckle_import, speckle_log,
                    config_dict
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return None, None, error

            # remove input
            h.delete_dimap(out_cal)

            # reset master_import for following routine
            out_cal = speckle_import

        # ---------------------------------------------------------------------
        # 3 dB scaling
        if ard['to_db']:

            # create namespace for temporary db scaled product
            out_db = temp.joinpath(f'{burst_prefix}_cal_db')

            # create namespace for db scaling log
            db_log = out_dir.joinpath(f'{burst_prefix}_cal_db.err_log')

            # run db scaling on calibrated/speckle filtered input
            try:
                common.linear_to_db(
                    out_cal.with_suffix('.dim'), out_db, db_log, config_dict
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return None, None, error

            # remove tmp files
            h.delete_dimap(out_cal)

            # set out_cal to out_db for further processing
            out_cal = out_db

        # ---------------------------------------------------------------------
        # 4 Geocoding

        # create namespace for temporary geocoded product
        out_tc = temp.joinpath(f'{burst_prefix}_bs')

        # create namespace for geocoding log
        tc_log = out_dir.joinpath(f'{burst_prefix}_bs_tc.err_log')

        # run terrain correction on calibrated/speckle filtered/db  input
        try:
            common.terrain_correction(
                out_cal.with_suffix('.dim'), out_tc, tc_log, config_dict
            )
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return None, None, error

        # ---------------------------------------------------------------------
        # 5 Create an outline
        ras.image_bounds(out_tc.with_suffix('.data'))

        # ---------------------------------------------------------------------
        # 6 Layover/Shadow mask
        out_ls = None  # set to none for final return statement
        if ard['create_ls_mask'] is True:

            # create namespace for temporary ls mask product
            ls_mask = temp.joinpath(f'{burst_prefix}_ls_mask')

            # create namespace for ls mask log
            logfile = out_dir.joinpath(f'{burst_prefix}.ls_mask.errLog')

            # run ls mask routine
            try:
                common.ls_mask(
                    out_cal.with_suffix('.dim'), ls_mask, logfile, config_dict
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return None, None, error

            # polygonize
            ls_raster = list(ls_mask.with_suffix('.data').glob('*img'))[0]
            ras.polygonize_ls(ls_raster, ls_mask.with_suffix('.json'))

            out_ls = out_tc.with_suffix('.data')\
                .joinpath(ls_mask.name).with_suffix('.json')

            # move to product folder
            ls_mask.with_suffix('.json').rename(out_ls)

        # move final backscatter product to actual output directory
        h.move_dimap(
            out_tc, out_dir.joinpath(f'{burst_prefix}_bs'), ard['to_tif']
        )

        # write out check file for tracking that it is processed
        with open(out_dir.joinpath('.bs.processed'), 'w+') as file:
            file.write('passed all tests \n')

        return (
            str(out_dir.joinpath(f'{burst_prefix}_bs').with_suffix('.dim')),
            str(out_ls),
            None
        )
Beispiel #5
0
def create_coherence_layers(
        master_import, slave_import, out_dir,
        master_prefix, config_dict
):
    """Pipeline for Dual-polarimetric decomposition

    :param master_import:
    :param slave_import:
    :param out_dir:
    :param master_prefix:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']

    with TemporaryDirectory(prefix=f"{config_dict['temp_dir']}/") as temp:

        temp = Path(temp)
        # ---------------------------------------------------------------
        # 1 Co-registration
        # create namespace for temporary co-registered stack
        out_coreg = temp.joinpath(f'{master_prefix}_coreg')

        # create namespace for co-registration log
        coreg_log = out_dir.joinpath(f'{master_prefix}_coreg.err_log')

        # run co-registration
        try:
            slc.coreg2(
                master_import, slave_import, out_coreg, coreg_log, config_dict
            )
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            h.delete_dimap(out_coreg)

            # remove imports
            h.delete_dimap(master_import)
            return None, error

        # remove imports
        h.delete_dimap(master_import)
        h.delete_dimap(slave_import)

        # ---------------------------------------------------------------
        # 2 Coherence calculation

        # create namespace for temporary coherence product
        out_coh = temp.joinpath(f'{master_prefix}_coherence')

        # create namespace for coherence log
        coh_log = out_dir.joinpath(f'{master_prefix}_coh.err_log')

        # run coherence estimation
        try:
            slc.coherence(
                out_coreg.with_suffix('.dim'), out_coh, coh_log, config_dict
            )
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return None, error

        # remove coreg tmp files
        h.delete_dimap(out_coreg)

        # ---------------------------------------------------------------
        # 3 Geocoding

        # create namespace for temporary geocoded roduct
        out_tc = temp.joinpath(f'{master_prefix}_coh')

        # create namespace for geocoded log
        tc_log = out_dir.joinpath(f'{master_prefix}_coh_tc.err_log')

        # run geocoding
        try:
            common.terrain_correction(
                out_coh.with_suffix('.dim'), out_tc, tc_log, config_dict
            )
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return None, error

        # ---------------------------------------------------------------
        # 4 Checks and Clean-up

        # remove tmp files
        h.delete_dimap(out_coh)

        # ---------------------------------------------------------------------
        # 5 Create an outline
        ras.image_bounds(out_tc.with_suffix('.data'))

        # move to final destination
        h.move_dimap(
            out_tc, out_dir.joinpath(f'{master_prefix}_coh'), ard['to_tif']
        )

        # write out check file for tracking that it is processed
        with open(out_dir.joinpath('.coh.processed'), 'w+') as file:
            file.write('passed all tests \n')

        return (
            str(out_dir.joinpath(f'{master_prefix}_coh').with_suffix('.dim')),
            None
        )
Beispiel #6
0
def create_polarimetric_layers(
        import_file, out_dir, burst_prefix, config_dict
):
    """Pipeline for Dual-polarimetric decomposition

    :param import_file:
    :param out_dir:
    :param burst_prefix:
    :param config_dict:
    :return:
    """

    # temp dir for intermediate files
    with TemporaryDirectory(prefix=f"{config_dict['temp_dir']}/") as temp:
        temp = Path(temp)
        # -------------------------------------------------------
        # 1 Polarimetric Decomposition

        # create namespace for temporary decomposed product
        out_haa = temp.joinpath(f'{burst_prefix}_h')

        # create namespace for decompose log
        haa_log = out_dir.joinpath(f'{burst_prefix}_haa.err_log')

        # run polarimetric decomposition
        try:
            slc.ha_alpha(import_file, out_haa, haa_log, config_dict)
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return None, error
        # -------------------------------------------------------
        # 2 Geocoding

        # create namespace for temporary geocoded product
        out_htc = temp.joinpath(f'{burst_prefix}_pol')

        # create namespace for geocoding log
        haa_tc_log = out_dir.joinpath(f'{burst_prefix}_haa_tc.err_log')

        # run geocoding
        try:
            common.terrain_correction(
                out_haa.with_suffix('.dim'), out_htc, haa_tc_log, config_dict
            )
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return None, error

        # set nans to 0 (issue from SNAP for polarimetric layers)
        for infile in list(out_htc.with_suffix('.data').glob('*.img')):

            with rasterio.open(str(infile), 'r') as src:
                meta = src.meta.copy()
                array = src.read()
                array[np.isnan(array)] = 0

            with rasterio.open(str(infile), 'w', **meta) as dest:
                dest.write(array)

        # ---------------------------------------------------------------------
        # 5 Create an outline
        ras.image_bounds(out_htc.with_suffix('.data'))

        # move to final destination
        ard = config_dict['processing']['single_ARD']
        h.move_dimap(
            out_htc, out_dir.joinpath(f'{burst_prefix}_pol'), ard['to_tif']
        )

        # write out check file for tracking that it is processed
        with open(out_dir.joinpath('.pol.processed'), 'w+') as file:
            file.write('passed all tests \n')

        return (
            str(out_dir.joinpath(f'{burst_prefix}_pol').with_suffix('.dim')),
            None
        )
Beispiel #7
0
def burst_to_ard(master_file,
                 swath,
                 master_burst_nr,
                 master_burst_id,
                 proc_file,
                 out_dir,
                 temp_dir,
                 slave_file=None,
                 slave_burst_nr=None,
                 slave_burst_id=None,
                 coherence=False,
                 remove_slave_import=False,
                 ncores=os.cpu_count()):
    '''The main routine to turn a burst into an ARD product

    Args:
        master_file (str): path to full master SLC scene
        swath (str): subswath
        master_burst_nr (): index number of the burst
        master_burst_id ():
        out_dir (str):
        temp_dir (str):
        slave_file (str):
        slave_burst_nr (str):
        slave_burst_id (str):
        proc_file (str):
        remove_slave_import (bool):
        ncores (int): number of cpus used - useful for parallel processing
    '''
    if type(remove_slave_import) == str:
        if remove_slave_import == 'True':
            remove_slave_import = True
        elif remove_slave_import == 'False':
            remove_slave_import = False
    if type(coherence) == str:
        if coherence == 'True':
            coherence = True
        elif coherence == 'False':
            coherence = False
    # load ards
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']
     
    # ---------------------------------------------------------------------
    # 1 Import
    # import master
    master_import = opj(temp_dir, '{}_import'.format(master_burst_id))

    if not os.path.exists('{}.dim'.format(master_import)):
        import_log = opj(out_dir, '{}_import.err_log'.format(master_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = slc._import(master_file, master_import, import_log,
                              swath, master_burst_nr, polars, ncores
        )
        if return_code != 0:
            h.delete_dimap(master_import)
            return return_code

    imported = '{}.dim'.format(master_import)
    # ---------------------------------------------------------------------
    # 2 H-A-Alpha
    if ard['H-A-Alpha']:
        # create HAalpha file
        out_haa = opj(temp_dir, '{}_h'.format(master_burst_id))
        haa_log = opj(out_dir, '{}_haa.err_log'.format(master_burst_id))
        return_code = slc._ha_alpha(imported,
                                out_haa, haa_log, 
                                ard['remove pol speckle'], 
                                ard['pol speckle filter'],
                                ncores
        )

        # delete files in case of error
        if return_code != 0:
            h.delete_dimap(out_haa)
            h.delete_dimap(master_import)
            return return_code

        # geo code HAalpha
        out_htc = opj(temp_dir, '{}_pol'.format(master_burst_id))
        haa_tc_log = opj(out_dir, '{}_haa_tc.err_log'.format(
            master_burst_id))
        return_code = common._terrain_correction(
            '{}.dim'.format(out_haa), out_htc, haa_tc_log, 
            ard['resolution'], ard['dem'], ncores
        )

        # remove HAalpha tmp files
        h.delete_dimap(out_haa)
        
        # last check on the output files
        return_code = h.check_out_dimap(out_htc)
        if return_code != 0:
            h.delete_dimap(out_htc)
            h.delete_dimap(master_import)
            return return_code

        # move to final destination
        h.move_dimap(out_htc, opj(out_dir, '{}_pol'.format(master_burst_id)))

    # ---------------------------------------------------------------------
    # 3 Calibration
    out_cal = opj(temp_dir, '{}_cal'.format(master_burst_id))
    cal_log = opj(out_dir, '{}_cal.err_log'.format(master_burst_id))
    return_code = slc._calibration(
        imported, out_cal, cal_log, ard['product type'], ncores)

    # delete output if command failed for some reason and return
    if return_code != 0:
        h.delete_dimap(out_cal)
        h.delete_dimap(master_import)
        return return_code

    if not coherence:
        #  remove imports
        h.delete_dimap(master_import)

    # ---------------------------------------------------------------------
    # 4 Speckle filtering
    if ard['remove speckle']:
        speckle_import = opj(
            temp_dir, '{}_speckle_import'.format(master_burst_id)
        )
        speckle_log = opj(
            out_dir, '{}_speckle.err_log'.format(master_burst_id)
        )

        return_code = common._speckle_filter(
            '{}.dim'.format(out_cal), speckle_import, speckle_log,
            ard['speckle filter'], ncores
        )

        # remove input 
        h.delete_dimap(out_cal)

        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(speckle_import)
            h.delete_dimap(master_import)
            return return_code

        # reset master_import for follwoing routine
        out_cal = speckle_import

    # ---------------------------------------------------------------------
    # 5 Terrain Flattening
    if ard['product type'] == 'RTC-gamma0':
        # define outfile
        out_rtc = opj(temp_dir, '{}_rtc'.format(master_burst_id))
        rtc_log = opj(out_dir, '{}_rtc.err_log'.format(
            master_burst_id))
        # do the TF
        return_code = common._terrain_flattening(
            '{}.dim'.format(out_cal), out_rtc, rtc_log, ard['dem'], ncores
        )

        # remove tmp files
        h.delete_dimap(out_cal)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_rtc)
            h.delete_dimap(master_import)
            return return_code

        # set out_rtc to out_cal for further processing
        out_cal = out_rtc

    # ---------------------------------------------------------------------
    # 7 to dB scale
    if ard['to db']:
        out_db = opj(temp_dir, '{}_cal_db'.format(master_burst_id))
        db_log = opj(out_dir, '{}_cal_db.err_log'.format(master_burst_id))
        return_code = common._linear_to_db('{}.dim'.format(out_cal), out_db, db_log, ncores)

        # remove tmp files
        h.delete_dimap(out_cal)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_db)
            h.delete_dimap(master_import)
            return return_code

        # set out_cal to out_db for further processing
        out_cal = out_db
 
    # ---------------------------------------------------------------------
    # 8 Geocode backscatter
    if ard['product type'] != "Coherence_only":
        out_tc = opj(temp_dir, '{}_bs'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_bs_tc.err_log'.format(master_burst_id))
        return_code = common._terrain_correction(
            '{}.dim'.format(out_cal), out_tc, tc_log,
            ard['resolution'], ard['dem'], ncores)

        # last check on backscatter data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.delete_dimap(out_tc)
            return return_code

        # we move backscatter to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_bs'.format(master_burst_id)))

    # ---------------------------------------------------------------------
    # 9 Layover/Shadow mask
    if ard['create ls mask']:
        
        out_ls = opj(temp_dir, '{}_LS'.format(master_burst_id))
        ls_log = opj(out_dir, '{}_LS.err_log'.format(master_burst_id))
        return_code = common._ls_mask('{}.dim'.format(out_cal), out_ls, ls_log,
                                      ard['resolution'], ard['dem'], ncores)

        if return_code != 0:
            h.delete_dimap(out_ls)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(out_ls, test_stats=False)
        if return_code != 0:
            h.delete_dimap(out_ls)
            return return_code

        # move ls data to final destination
        h.move_dimap(out_ls, opj(out_dir, '{}_LS'.format(master_burst_id)))

    # remove calibrated files
    if ard['product type'] != "Coherence_only":
        h.delete_dimap(out_cal)

    if coherence:

        # import slave
        slave_import = opj(temp_dir, '{}_import'.format(slave_burst_id))
        import_log = opj(out_dir, '{}_import.err_log'.format(slave_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = slc._import(
            slave_file, slave_import, import_log, swath, slave_burst_nr,
            polars, ncores
        )

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # co-registration
        #filelist = ['{}.dim'.format(master_import),
        #            '{}.dim'.format(slave_import)]
        #filelist = '\'{}\''.format(','.join(filelist))
        out_coreg = opj(temp_dir, '{}_coreg'.format(master_burst_id))
        coreg_log = opj(out_dir, '{}_coreg.err_log'.format(master_burst_id))
        # return_code = _coreg(filelist, out_coreg, coreg_log, dem)
        return_code = slc._coreg2('{}.dim'.format(master_import),
                              '{}.dim'.format(slave_import),
                               out_coreg,
                               coreg_log, ard['dem'], ncores)

        # remove imports
        h.delete_dimap(master_import)
        
        if remove_slave_import is True:
            h.delete_dimap(slave_import)
        
        # delete output if command failed for some reason and return   
        if return_code != 0:
            h.delete_dimap(out_coreg)
            h.delete_dimap(slave_import)
            return return_code

        # calculate coherence and deburst
        out_coh = opj(temp_dir, '{}_c'.format(master_burst_id))
        coh_log = opj(out_dir, '{}_coh.err_log'.format(master_burst_id))
        coh_polars = ard['coherence bands'].replace(' ', '')
        return_code = slc._coherence('{}.dim'.format(out_coreg),
                                 out_coh, coh_log, coh_polars, ncores)

        # remove coreg tmp files
        h.delete_dimap(out_coreg)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_coh)
            h.delete_dimap(slave_import)
            return return_code

        # geocode
        out_tc = opj(temp_dir, '{}_coh'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_coh_tc.err_log'.format(master_burst_id))
        return_code = common._terrain_correction(
            '{}.dim'.format(out_coh), out_tc, tc_log, 
            ard['resolution'], ard['dem'], ncores)
        
        # remove tmp files
        h.delete_dimap(out_coh)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_tc)
            h.delete_dimap(slave_import)
            return return_code
        
        # remove tmp files
        h.delete_dimap(out_coh)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_tc)
            h.delete_dimap(slave_import)
            return return_code
        
        # last check on coherence data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.delete_dimap(out_tc)
            return return_code

        # move to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_coh'.format(master_burst_id)))

    # write out check file for tracking that it is processed
    with open(opj(out_dir, '.processed'), 'w') as file:
        file.write('passed all tests \n')
    
    return return_code