Esempio n. 1
0
def burst_to_ard(master_file,
                 swath,
                 master_burst_nr,
                 master_burst_id,
                 proc_file,
                 out_dir,
                 temp_dir,
                 slave_file=None,
                 slave_burst_nr=None,
                 slave_burst_id=None,
                 coherence=False,
                 remove_slave_import=False):
    '''The main routine to turn a burst into an ARD product

    Args:
        master_file (str): path to full master SLC scene
        swath (str): subswath
        master_burst_nr (): index number of the burst
        master_burst_id ():
        out_dir (str):
        temp_dir (str):
        slave_file (str):
        slave_burst_nr (str):
        slave_burst_id (str):
        proc_file (str):
        remove_slave_import (bool):

    '''

    # load ards
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']

    # import master
    master_import = opj(temp_dir, '{}_import'.format(master_burst_id))

    if not os.path.exists('{}.dim'.format(master_import)):
        import_log = opj(out_dir, '{}_import.err_log'.format(master_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = _import(master_file, master_import, import_log, swath,
                              master_burst_nr, polars)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

    if ard['H-A-Alpha']:
        # create HAalpha file
        out_haa = opj(temp_dir, '{}_h'.format(master_burst_id))
        haa_log = opj(out_dir, '{}_haa.err_log'.format(master_burst_id))
        return_code = _ha_alpha('{}.dim'.format(master_import), out_haa,
                                haa_log, ard['remove pol speckle'])

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # geo code HAalpha
        out_htc = opj(temp_dir, '{}_pol'.format(master_burst_id))
        haa_tc_log = opj(out_dir, '{}_haa_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_haa), out_htc,
                                          haa_tc_log, ard['resolution'],
                                          ard['dem'])

        # last check on the output files
        return_code = h.check_out_dimap(out_htc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_htc, opj(out_dir, '{}_pol'.format(master_burst_id)))

        # remove HAalpha tmp files
        h.delete_dimap(out_haa)

    # calibrate
    out_cal = opj(temp_dir, '{}_cal'.format(master_burst_id))
    cal_log = opj(out_dir, '{}_cal.err_log'.format(master_burst_id))
    return_code = _calibration('{}.dim'.format(master_import), out_cal,
                               cal_log, ard['product type'])
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    if not coherence:
        #  remove imports
        h.delete_dimap(master_import)

    # speckle filtering
    if ard['remove speckle']:
        speckle_import = opj(temp_dir,
                             '{}_speckle_import'.format(master_burst_id))
        speckle_log = opj(out_dir,
                          '{}_speckle.err_log'.format(master_burst_id))
        return_code = _speckle_filter('{}.dim'.format(out_cal), speckle_import,
                                      speckle_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove temp file
        h.delete_dimap(out_cal)

        # reset master_import for follwoing routine
        out_cal = speckle_import

    # do terrain flattening in case it is selected
    if ard['product type'] == 'RTC':
        # define outfile
        out_rtc = opj(temp_dir, '{}_rtc'.format(master_burst_id))
        rtc_log = opj(out_dir, '{}_rtc.err_log'.format(master_burst_id))
        # do the TF
        return_code = _terrain_flattening('{}.dim'.format(out_cal), out_rtc,
                                          rtc_log, ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_rtc to out_cal for further processing
        out_cal = out_rtc

    if ard['to db']:
        out_db = opj(temp_dir, '{}_cal_db'.format(master_burst_id))
        db_log = opj(out_dir, '{}_cal_db.err_log'.format(master_burst_id))
        return_code = _linear_to_db('{}.dim'.format(out_cal), out_db, db_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_cal to out_db for further processing
        out_cal = out_db

    # geo code backscatter products
    out_tc = opj(temp_dir, '{}_bs'.format(master_burst_id))
    tc_log = opj(out_dir, '{}_bs_tc.err_log'.format(master_burst_id))
    return_code = _terrain_correction('{}.dim'.format(out_cal), out_tc, tc_log,
                                      ard['resolution'], ard['dem'])

    # last check on backscatter data
    return_code = h.check_out_dimap(out_tc)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # we move backscatter to final destination
    h.move_dimap(out_tc, opj(out_dir, '{}_bs'.format(master_burst_id)))

    if ard['create ls mask']:
        # create LS map
        out_ls = opj(temp_dir, '{}_LS'.format(master_burst_id))
        ls_log = opj(out_dir, '{}_LS.err_log'.format(master_burst_id))
        return_code = _ls_mask('{}.dim'.format(out_cal), out_ls, ls_log,
                               ard['resolution'], ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(out_ls, test_stats=False)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move ls data to final destination
        h.move_dimap(out_ls, opj(out_dir, '{}_LS'.format(master_burst_id)))

    # remove calibrated files
    h.delete_dimap(out_cal)

    if coherence:

        # import slave
        slave_import = opj(temp_dir, '{}_import'.format(slave_burst_id))
        import_log = opj(out_dir, '{}_import.err_log'.format(slave_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = _import(slave_file, slave_import, import_log, swath,
                              slave_burst_nr, polars)

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # co-registration
        #filelist = ['{}.dim'.format(master_import),
        #            '{}.dim'.format(slave_import)]
        #filelist = '\'{}\''.format(','.join(filelist))
        out_coreg = opj(temp_dir, '{}_coreg'.format(master_burst_id))
        coreg_log = opj(out_dir, '{}_coreg.err_log'.format(master_burst_id))
        # return_code = _coreg(filelist, out_coreg, coreg_log, dem)
        return_code = _coreg2('{}.dim'.format(master_import),
                              '{}.dim'.format(slave_import), out_coreg,
                              coreg_log, ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        #  remove imports
        h.delete_dimap(master_import)

        if remove_slave_import is True:
            h.delete_dimap(slave_import)

        # calculate coherence and deburst
        out_coh = opj(temp_dir, '{}_c'.format(master_burst_id))
        coh_log = opj(out_dir, '{}_coh.err_log'.format(master_burst_id))
        coh_polars = ard['coherence bands'].replace(' ', '')
        return_code = _coherence('{}.dim'.format(out_coreg), out_coh, coh_log,
                                 coh_polars)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove coreg tmp files
        h.delete_dimap(out_coreg)

        # geocode
        out_tc = opj(temp_dir, '{}_coh'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_coh_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_coh), out_tc,
                                          tc_log, ard['resolution'],
                                          ard['dem'])

        # last check on coherence data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_coh'.format(master_burst_id)))

        # remove tmp files
        h.delete_dimap(out_coh)

    # write file, so we know this burst has been succesfully processed
    if return_code == 0:
        check_file = opj(out_dir, '.processed')
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')
    else:
        h.remove_folder_content(temp_dir)
        h.remove_folder_content(out_dir)

    return return_code
Esempio n. 2
0
def grd_to_ard(filelist,
               output_dir,
               file_id,
               temp_dir,
               processing_dict,
               subset=None,
               polar='VV,VH,HH,HV'):
    '''The main function for the grd to ard generation

    This function represents the full workflow for the generation of an
    Analysis-Ready-Data product. The standard parameters reflect the CEOS
    ARD defintion for Sentinel-1 backcsatter products.

    By changing the parameters, taking care of all parameters
    that can be given. The function can handle multiple inputs of the same
    acquisition, given that there are consecutive data takes.

    Args:
        filelist (list): must be a list with one or more absolute
                  paths to GRD scene(s)
        output_dir: os.path object or string for the folder
                    where the output file should be written#
        file_id (str): prefix of the final output file
        temp_dir:
        resolution: the resolution of the output product in meters
        ls_mask: layover/shadow mask generation (Boolean)
        speckle_filter: speckle filtering (Boolean)

    Returns:
        nothing

    Notes:
        no explicit return value, since output file is our actual return
    '''

    # get processing parameters from dict
    resolution = processing_dict['resolution']
    product_type = processing_dict['product_type']
    ls_mask = processing_dict['ls_mask']
    speckle_filter = processing_dict['speckle_filter']
    border_noise = processing_dict['border_noise']
    dem = processing_dict['dem']
    to_db = processing_dict['to_db']

    # slice assembly if more than one scene
    if len(filelist) > 1:

        for file in filelist:

            grd_import = opj(temp_dir,
                             '{}_imported'.format(os.path.basename(file)[:-5]))
            logfile = opj(
                output_dir,
                '{}.Import.errLog'.format(os.path.basename(file)[:-5]))
            _grd_frame_import(file, grd_import, logfile)

        # create list of scenes for full acquisition in
        # preparation of slice assembly
        scenelist = ' '.join(glob.glob(opj(temp_dir, '*imported.dim')))

        # create file strings
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}._slice_assembly.errLog'.format(file_id))
        _slice_assembly(scenelist, grd_import, logfile, polar)

        for file in filelist:
            h.delete_dimap(
                opj(temp_dir,
                    '{}_imported'.format(os.path.basename(str(file))[:-5])))

        if subset is not None:
            grd_subset = opj(temp_dir, '{}_imported_subset'.format(file_id))
            georegion = vec.shp_to_wkt(subset, buffer=0.1, envelope=True)
            _grd_subset_georegion('{}.dim'.format(grd_import), grd_subset,
                                  logfile, georegion)

            # delete slice assembly
            h.delete_dimap(grd_import)
            glob.glob('{}/{}*imported*.data'.format(temp_dir, file_id))

    else:
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}.Import.errLog'.format(file_id))

        if subset is None:
            _grd_frame_import(filelist[0], grd_import, logfile, polar)
        else:
            georegion = vec.shp_to_wkt(subset, buffer=0.1, envelope=True)
            _grd_frame_import_subset(filelist[0], grd_import, georegion,
                                     logfile, polar)
    # ---------------------------------------------------------------------
    # Remove the grd border noise from existent channels (OST routine)

    if border_noise:
        for polarisation in ['VV', 'VH', 'HH', 'HV']:

            infile = glob.glob(
                opj(temp_dir, '{}_imported*data'.format(file_id),
                    'Intensity_{}.img'.format(polarisation)))

            if len(infile) == 1:
                # run grd Border Remove
                print(' INFO: Remove border noise for {} band.'.format(
                    polarisation))
                _grd_remove_border(infile[0])

    # -------------------------------------------
    # in case we want to apply Speckle filtering
    if speckle_filter:
        infile = glob.glob(opj(temp_dir, '{}_imported*dim'.format(file_id)))[0]
        logfile = opj(temp_dir, '{}.Speckle.errLog'.format(file_id))
        outfile = opj(temp_dir, '{}_imported_spk'.format(file_id))

        # run processing
        _grd_speckle_filter(infile, outfile, logfile)

        # define infile for next processing step
        infile = opj(temp_dir, '{}_imported_spk.dim'.format(file_id))
        data_dir = glob.glob(opj(temp_dir,
                                 '{}*imported*.data'.format(file_id)))
        h.delete_dimap(str(data_dir[0])[:-5])

    else:
        # let's calibrate the data
        infile = glob.glob(opj(temp_dir, '{}_imported*dim'.format(file_id)))[0]

    # ----------------------
    # do the calibration
    outfile = opj(temp_dir, '{}.{}'.format(file_id, product_type))
    logfile = opj(output_dir, '{}.Backscatter.errLog'.format(file_id))
    _grd_backscatter(infile, outfile, logfile, product_type, dem)

    data_dir = glob.glob(opj(temp_dir, '{}*imported*.data'.format(file_id)))
    h.delete_dimap(str(data_dir[0])[:-5])

    # input file for follwoing
    infile = opj(temp_dir, '{}.{}.dim'.format(file_id, product_type))

    # to db
    if to_db:
        logfile = opj(output_dir, '{}.linToDb.errLog'.format(file_id))
        outfile = opj(temp_dir, '{}_{}_db'.format(file_id, product_type))
        _grd_to_db(infile, outfile, logfile)
        # delete
        h.delete_dimap(infile[:-4])
        # re-define infile
        infile = opj(temp_dir, '{}_{}_db.dim'.format(file_id, product_type))

    # -----------------------
    # let's geocode the data
    # infile = opj(temp_dir, '{}.{}.dim'.format(file_id, product_type))
    outfile = opj(temp_dir, '{}.{}.TC'.format(file_id, product_type))
    logfile = opj(output_dir, '{}.TC.errLog'.format(file_id))
    _grd_terrain_correction_deg(infile, outfile, logfile, resolution, dem)

    # move to final destination
    out_final = opj(output_dir, '{}.{}.TC'.format(file_id, product_type))

    # remove file if exists
    if os.path.exists(out_final + '.dim'):
        h.delete_dimap(out_final)

    shutil.move('{}.dim'.format(outfile), '{}.dim'.format(out_final))
    shutil.move('{}.data'.format(outfile), '{}.data'.format(out_final))

    # ----------------------------------------------
    # let's create a Layover shadow mask if needed
    if ls_mask is True:
        outfile = opj(temp_dir, '{}.ls_mask'.format(file_id))
        logfile = opj(output_dir, '{}.ls_mask.errLog'.format(file_id))
        _grd_ls_mask(infile, outfile, logfile, resolution, dem)

        # move to final destination
        out_ls_mask = opj(output_dir, '{}.LS'.format(file_id))

        # delete original file sin case they exist
        if os.path.exists(str(out_ls_mask) + '.dim'):
            h.delete_dimap(out_ls_mask)

        # move out of temp
        shutil.move('{}.dim'.format(outfile), '{}.dim'.format(out_ls_mask))
        shutil.move('{}.data'.format(outfile), '{}.data'.format(out_ls_mask))

    # remove calibrated files
    h.delete_dimap(infile[:-4])
Esempio n. 3
0
def burst_to_ard(master_file,
                 swath,
                 master_burst_nr,
                 master_burst_id,
                 out_dir,
                 temp_dir,
                 slave_file=None,
                 slave_burst_nr=None,
                 slave_burst_id=None,
                 coherence=False,
                 polarimetry=False,
                 pol_speckle_filter=False,
                 resolution=20,
                 product_type='GTCgamma',
                 speckle_filter=False,
                 to_db=False,
                 ls_mask_create=False,
                 dem='SRTM 1sec HGT',
                 remove_slave_import=False):
    '''The main routine to turn a burst into an ARD product

    Args:
        master_file (str): path to full master SLC scene
        swath (str): subswath
        master_burst_nr (): index number of the burst
        master_burst_id ():
        out_dir (str):
        temp_dir (str):
        slave_file (str):
        slave_burst_nr (str):
        slave_burst_id (str):
        coherence (bool):
        polarimetry (bool):
        pol_speckle_filter (bool):
        resolution (int):
        product_type (str):
        speckle_filter (bool):
        to_db (bool):
        ls_mask (bool):
        dem (str):
        remove_slave_import (bool):

    '''

    # import master
    master_import = opj(temp_dir, '{}_import'.format(master_burst_id))

    if not os.path.exists('{}.dim'.format(master_import)):
        import_log = opj(out_dir, '{}_import.err_log'.format(master_burst_id))
        return_code = _import(master_file, master_import, import_log, swath,
                              master_burst_nr)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

    if polarimetry:
        # create HAalpha file
        out_haa = opj(temp_dir, '{}_h'.format(master_burst_id))
        haa_log = opj(out_dir, '{}_haa.err_log'.format(master_burst_id))
        return_code = _ha_alpha('{}.dim'.format(master_import), out_haa,
                                haa_log, pol_speckle_filter)

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # geo code HAalpha
        out_htc = opj(temp_dir, '{}_ha_alpha'.format(master_burst_id))
        haa_tc_log = opj(out_dir, '{}_haa_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_haa), out_htc,
                                          haa_tc_log, resolution, dem)

        # last check on the output files
        return_code = h.check_out_dimap(out_htc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_htc,
                     opj(out_dir, '{}_ha_alpha'.format(master_burst_id)))

        # remove HAalpha tmp files
        h.delete_dimap(out_haa)

    # calibrate
    out_cal = opj(temp_dir, '{}_cal'.format(master_burst_id))
    cal_log = opj(out_dir, '{}_cal.err_log'.format(master_burst_id))
    return_code = _calibration('{}.dim'.format(master_import), out_cal,
                               cal_log, product_type)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    if not coherence:
        #  remove imports
        h.delete_dimap(master_import)

    # speckle filtering
    if speckle_filter:
        speckle_import = opj(temp_dir,
                             '{}_speckle_import'.format(master_burst_id))
        speckle_log = opj(out_dir,
                          '{}_speckle.err_log'.format(master_burst_id))
        return_code = _speckle_filter('{}.dim'.format(out_cal), speckle_import,
                                      speckle_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove temp file
        h.delete_dimap(out_cal)

        # reset master_import for follwoing routine
        out_cal = speckle_import

    # do terrain flattening in case it is selected
    if product_type == 'RTC':
        # define outfile
        out_rtc = opj(temp_dir, '{}_rtc'.format(master_burst_id))
        rtc_log = opj(out_dir, '{}_rtc.err_log'.format(master_burst_id))
        # do the TF
        return_code = _terrain_flattening('{}.dim'.format(out_cal), out_rtc,
                                          rtc_log, dem)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_rtc to out_cal for further processing
        out_cal = out_rtc

    if to_db:
        out_db = opj(temp_dir, '{}_cal_db'.format(master_burst_id))
        db_log = opj(out_dir, '{}_cal_db.err_log'.format(master_burst_id))
        return_code = _linear_to_db('{}.dim'.format(out_cal), out_db, db_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_cal to out_db for further processing
        out_cal = out_db

    # geo code backscatter products
    out_tc = opj(temp_dir, '{}_BS'.format(master_burst_id))
    tc_log = opj(out_dir, '{}_BS_tc.err_log'.format(master_burst_id))
    return_code = _terrain_correction('{}.dim'.format(out_cal), out_tc, tc_log,
                                      resolution, dem)

    # last check on backscatter data
    return_code = h.check_out_dimap(out_tc)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # we move backscatter to final destination
    h.move_dimap(out_tc, opj(out_dir, '{}_BS'.format(master_burst_id)))

    if ls_mask_create:
        # create LS map
        out_ls = opj(temp_dir, '{}_LS'.format(master_burst_id))
        ls_log = opj(out_dir, '{}_LS.err_log'.format(master_burst_id))
        return_code = _ls_mask('{}.dim'.format(out_cal), out_ls, ls_log,
                               resolution, dem)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(out_ls, test_stats=False)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move ls data to final destination
        h.move_dimap(out_ls, opj(out_dir, '{}_LS'.format(master_burst_id)))

    # remove calibrated files
    h.delete_dimap(out_cal)

    if coherence:

        # import slave
        slave_import = opj(temp_dir, '{}_import'.format(slave_burst_id))
        import_log = opj(out_dir, '{}_import.err_log'.format(slave_burst_id))
        return_code = _import(slave_file, slave_import, import_log, swath,
                              slave_burst_nr)

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # co-registration
        # filelist = ['{}.dim'.format(master_import),
        #            '{}.dim'.format(slave_import)]
        # filelist = '\'{}\''.format(','.join(filelist))
        out_coreg = opj(temp_dir, '{}_coreg'.format(master_burst_id))
        coreg_log = opj(out_dir, '{}_coreg.err_log'.format(master_burst_id))
        # return_code = _coreg2(filelist, out_coreg, coreg_log, dem)
        return_code = _coreg2('{}.dim'.format(master_import),
                              '{}.dim'.format(slave_import), out_coreg,
                              coreg_log, dem)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        #  remove imports
        h.delete_dimap(master_import)

        if remove_slave_import is True:
            h.delete_dimap(slave_import)

        # calculate coherence and deburst
        out_coh = opj(temp_dir, '{}_c'.format(master_burst_id))
        coh_log = opj(out_dir, '{}_coh.err_log'.format(master_burst_id))
        return_code = _coherence('{}.dim'.format(out_coreg), out_coh, coh_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove coreg tmp files
        h.delete_dimap(out_coreg)

        # geocode
        out_tc = opj(temp_dir, '{}_coh'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_coh_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_coh), out_tc,
                                          tc_log, resolution, dem)
        # last check on coherence data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_coh'.format(master_burst_id)))

        # remove tmp files
        h.delete_dimap(out_coh)

    # write file, so we know this burst has been succesfully processed
    if return_code == 0:
        check_file = opj(out_dir, '.processed')
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')
    else:
        h.remove_folder_content(temp_dir)
        h.remove_folder_content(out_dir)

    return return_code
Esempio n. 4
0
def grd_to_ard(filelist,
               output_dir,
               file_id,
               temp_dir,
               proc_file,
               subset=None):
    '''The main function for the grd to ard generation

    This function represents the full workflow for the generation of an
    Analysis-Ready-Data product. The standard parameters reflect the CEOS
    ARD defintion for Sentinel-1 backcsatter products.

    By changing the parameters, taking care of all parameters
    that can be given. The function can handle multiple inputs of the same
    acquisition, given that there are consecutive data takes.

    Args:
        filelist (list): must be a list with one or more absolute
                  paths to GRD scene(s)
        output_dir: os.path object or string for the folder
                    where the output file should be written#
        file_id (str): prefix of the final output file
        temp_dir:
        resolution: the resolution of the output product in meters
        ls_mask: layover/shadow mask generation (Boolean)
        speckle_filter: speckle filtering (Boolean)

    Returns:
        nothing

    Notes:
        no explicit return value, since output file is our actual return
    '''

    # load ard parameters
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']
        polars = ard['polarisation'].replace(' ', '')

    # slice assembly if more than one scene
    if len(filelist) > 1:

        for file in filelist:

            grd_import = opj(temp_dir,
                             '{}_imported'.format(os.path.basename(file)[:-5]))
            logfile = opj(
                output_dir,
                '{}.Import.errLog'.format(os.path.basename(file)[:-5]))

            return_code = _grd_frame_import(file, grd_import, logfile, polars)
            if return_code != 0:
                h.remove_folder_content(temp_dir)
                return return_code

        # create list of scenes for full acquisition in
        # preparation of slice assembly
        scenelist = ' '.join(glob.glob(opj(temp_dir, '*imported.dim')))

        # create file strings
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}._slice_assembly.errLog'.format(file_id))
        return_code = _slice_assembly(
            scenelist,
            grd_import,
            logfile,
        )
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        for file in filelist:
            h.delete_dimap(
                opj(temp_dir,
                    '{}_imported'.format(os.path.basename(str(file))[:-5])))

        if subset:
            grd_subset = opj(temp_dir, '{}_imported_subset'.format(file_id))
            return_code = _grd_subset_georegion('{}.dim'.format(grd_import),
                                                grd_subset, logfile, subset)
            if return_code != 0:
                h.remove_folder_content(temp_dir)
                return return_code

            # delete slice assembly
            h.delete_dimap(grd_import)

    # single scene case
    else:
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}.Import.errLog'.format(file_id))

        if subset is None:
            return_code = _grd_frame_import(filelist[0], grd_import, logfile,
                                            polars)
        else:
            return_code = _grd_frame_import_subset(filelist[0], grd_import,
                                                   subset, logfile, polars)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code
    # ---------------------------------------------------------------------
    # Remove the grd border noise from existent channels (OST routine)

    if ard['remove border noise'] and not subset:
        for polarisation in ['VV', 'VH', 'HH', 'HV']:

            infile = glob.glob(
                opj(temp_dir, '{}_imported*data'.format(file_id),
                    'Intensity_{}.img'.format(polarisation)))

            if len(infile) == 1:
                # run grd Border Remove
                print(' INFO: Remove border noise for {} band.'.format(
                    polarisation))
                _grd_remove_border(infile[0])

    # set new infile
    infile = glob.glob(opj(temp_dir, '{}_imported*dim'.format(file_id)))[0]
    # -------------------------------------------
    # in case we want to apply Speckle filtering
    if ard['remove speckle']:

        logfile = opj(temp_dir, '{}.Speckle.errLog'.format(file_id))
        outfile = opj(temp_dir, '{}_spk'.format(file_id))

        # run processing
        return_code = _grd_speckle_filter(infile, outfile, logfile)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # delete input
        h.delete_dimap(infile[:-4])
        # define infile for next processing step
        infile = '{}.dim'.format(outfile)

    # ----------------------
    # do the calibration
    outfile = opj(temp_dir, '{}.{}'.format(file_id, ard['product type']))
    logfile = opj(output_dir, '{}.Backscatter.errLog'.format(file_id))
    return_code = _grd_backscatter(infile, outfile, logfile, ard['dem'],
                                   ard['product type'])

    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # delete input file
    h.delete_dimap(infile[:-4])

    # input file for follwoing
    infile = '{}.dim'.format(outfile)

    # ----------------------------------------------
    # let's create a Layover shadow mask if needed
    if ard['create ls mask'] is True:
        outfile = opj(temp_dir, '{}.ls_mask'.format(file_id))
        logfile = opj(output_dir, '{}.ls_mask.errLog'.format(file_id))
        return_code = _grd_ls_mask(infile, outfile, logfile, ard['resolution'],
                                   ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(outfile, test_stats=False)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        out_ls_mask = opj(output_dir, '{}.LS'.format(file_id))

        # delete original file sin case they exist
        if os.path.exists(str(out_ls_mask) + '.dim'):
            h.delete_dimap(out_ls_mask)

        # move out of temp
        shutil.move('{}.dim'.format(outfile), '{}.dim'.format(out_ls_mask))
        shutil.move('{}.data'.format(outfile), '{}.data'.format(out_ls_mask))

    # to db
    if ard['to db']:
        logfile = opj(output_dir, '{}.linToDb.errLog'.format(file_id))
        outfile = opj(temp_dir, '{}_{}_db'.format(file_id,
                                                  ard['product type']))
        return_code = _grd_to_db(infile, outfile, logfile)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # delete
        h.delete_dimap(infile[:-4])
        # re-define infile
        infile = opj(temp_dir, '{}_{}_db.dim'.format(file_id,
                                                     ard['product type']))

    # -----------------------
    # let's geocode the data
    # infile = opj(temp_dir, '{}.{}.dim'.format(file_id, product_type))
    outfile = opj(temp_dir, '{}.bs'.format(file_id))
    logfile = opj(output_dir, '{}.bs.errLog'.format(file_id))
    return_code = _grd_terrain_correction(infile, outfile, logfile,
                                          ard['resolution'], ard['dem'])
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # remove calibrated files
    h.delete_dimap(infile[:-4])

    # move to final destination
    out_final = opj(output_dir, '{}.bs'.format(file_id))

    # remove file if exists
    if os.path.exists(out_final + '.dim'):
        h.delete_dimap(out_final)

    return_code = h.check_out_dimap(outfile)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    shutil.move('{}.dim'.format(outfile), '{}.dim'.format(out_final))
    shutil.move('{}.data'.format(outfile), '{}.data'.format(out_final))

    # write file, so we know this burst has been succesfully processed
    if return_code == 0:
        check_file = opj(output_dir, '.processed')
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')
    else:
        h.remove_folder_content(temp_dir)
        h.remove_folder_content(output_dir)
Esempio n. 5
0
def ard_to_ts(list_of_files, burst, product, pol, config_file):

    # -------------------------------------------
    # 1 unpack list of args
    # convert list of files readable for snap
    list_of_files = f"\'{','.join(str(x) for x in list_of_files)}\'"

    # -------------------------------------------
    # 2 read config file
    with open(config_file, 'r') as file:
        config_dict = json.load(file)
        processing_dir = Path(config_dict['processing_dir'])
        ard = config_dict['processing']['single_ARD']
        ard_mt = config_dict['processing']['time-series_ARD']

    # -------------------------------------------
    # 3 get namespace of directories and check if already processed
    # get the burst directory
    burst_dir = processing_dir.joinpath(burst)

    # get timeseries directory and create if non existent
    out_dir = burst_dir.joinpath('Timeseries')
    Path.mkdir(out_dir, parents=True, exist_ok=True)

    # in case some processing has been done before, check if already processed
    check_file = out_dir.joinpath(f'.{product}.{pol}.processed')
    if Path.exists(check_file):
        logger.info(
            f'Timeseries of {burst} for {product} in {pol} '
            f'polarisation already processed.'
        )

        out_files = 'already_processed'
        out_vrt = 'already_processed'

        return (
            burst, list_of_files, out_files, out_vrt, f'{product}.{pol}', None
        )

    # -------------------------------------------
    # 4 adjust processing parameters according to config
    # get the db scaling right
    to_db = ard['to_db']
    if to_db or product != 'bs':
        to_db = False
        logger.debug(f'Not converting to dB for {product}')
    else:
        to_db = ard_mt['to_db']
        logger.debug(f'Converting to dB for {product}')

    if ard_mt['apply_ls_mask']:
        extent = burst_dir.joinpath(f'{burst}.valid.json')
    else:
        extent = burst_dir.joinpath(f'{burst}.min_bounds.json')

    # -------------------------------------------
    # 5 SNAP processing
    with TemporaryDirectory(prefix=f"{config_dict['temp_dir']}/") as temp:

        # turn to Path object
        temp = Path(temp)

        # create namespaces
        temp_stack = temp.joinpath(f'{burst}_{product}_{pol}')
        out_stack = temp.joinpath(f'{burst}_{product}_{pol}_mt')
        stack_log = out_dir.joinpath(f'{burst}_{product}_{pol}_stack.err_log')

        # run stacking routine
        if pol in ['Alpha', 'Anisotropy', 'Entropy']:
            logger.info(
                f'Creating multi-temporal stack of images of burst/track '
                f'{burst} for the {pol} band of the polarimetric '
                f'H-A-Alpha decomposition.'
            )
            try:
                create_stack(
                    list_of_files, temp_stack, stack_log, config_dict,
                    pattern=pol
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return None, None, None, None, None, error
        else:
            logger.info(
                f'Creating multi-temporal stack of images of burst/track '
                f'{burst} for {product} product in {pol} polarization.'
            )
            try:
                create_stack(
                    list_of_files, temp_stack, stack_log, config_dict,
                    polarisation=pol
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return None, None, None, None, None, error

        # run mt speckle filter
        if ard_mt['remove_mt_speckle'] is True:

            speckle_log = out_dir.joinpath(
                f'{burst}_{product}_{pol}_mt_speckle.err_log'
            )

            logger.debug('Applying multi-temporal speckle filter')
            try:
                mt_speckle_filter(
                    temp_stack.with_suffix('.dim'), out_stack, speckle_log,
                    config_dict
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return None, None, None, None, None, error

            # remove tmp files
            h.delete_dimap(temp_stack)
        else:
            out_stack = temp_stack

        # -----------------------------------------------
        # 6 Conversion to GeoTiff

        # min max dict for stretching in case of 16 or 8 bit datatype
        mm_dict = {'bs': {'min': -30, 'max': 5},
                   'coh': {'min': 0.000001, 'max': 1},
                   'Alpha': {'min': 0.000001, 'max': 90},
                   'Anisotropy': {'min': 0.000001, 'max': 1},
                   'Entropy': {'min': 0.000001, 'max': 1}
                   }
        stretch = pol if pol in ['Alpha', 'Anisotropy', 'Entropy'] else product

        if product == 'coh':

            # get slave and master dates from file names and sort them
            mst_dates = sorted([
                dt.strptime(
                    file.name.split('_')[3].split('.')[0], SNAP_DATEFORMAT
                )
                for file in list(out_stack.with_suffix('.data').glob('*.img'))
            ])

            slv_dates = sorted([
                dt.strptime(
                    file.name.split('_')[4].split('.')[0], SNAP_DATEFORMAT
                )
                for file in list(out_stack.with_suffix('.data').glob('*.img'))
            ])

            # write them back to string for following loop
            mst_dates = [dt.strftime(ts, SNAP_DATEFORMAT) for ts in mst_dates]
            slv_dates = [dt.strftime(ts, SNAP_DATEFORMAT) for ts in slv_dates]

            out_files = []
            for i, (mst, slv) in enumerate(zip(mst_dates, slv_dates)):

                # re-construct namespace for input file
                infile = list(
                    out_stack.with_suffix('.data').glob(
                        f'*{pol}*{mst}_{slv}*img'
                    )
                )[0]

                # rename dates to YYYYMMDD format
                mst = dt.strftime(dt.strptime(mst, SNAP_DATEFORMAT), '%y%m%d')
                slv = dt.strftime(dt.strptime(slv, SNAP_DATEFORMAT), '%y%m%d')

                # create namespace for output file with renamed dates
                outfile = out_dir.joinpath(
                    f'{i+1:02d}.{mst}.{slv}.{product}.{pol}.tif'
                )

                # fill internal values if any
                #with rasterio.open(str(infile), 'r') as src:
                #    meta = src.meta.copy()
                #    filled = ras.fill_internal_nans(src.read())

                #with rasterio.open(str(infile), 'w', **meta) as dest:
                #    dest.write(filled)

                #print('filled')
                # produce final outputfile,
                # including dtype conversion and ls mask
                ras.mask_by_shape(
                    infile, outfile, extent, to_db=to_db,
                    datatype=ard_mt['dtype_output'],
                    min_value=mm_dict[stretch]['min'],
                    max_value=mm_dict[stretch]['max'],
                    ndv=0.0, description=True)

                # add ot a list for subsequent vrt creation
                out_files.append(str(outfile))

        else:
            # get the dates of the files
            dates = sorted([dt.strptime(
                file.name.split('_')[-1][:-4], SNAP_DATEFORMAT)
                for file in list(out_stack.with_suffix('.data').glob('*.img'))
            ])

            # write them back to string for following loop
            dates = [dt.strftime(ts, "%d%b%Y") for ts in dates]

            out_files = []
            for i, date in enumerate(dates):

                # re-construct namespace for input file
                infile = list(
                    out_stack.with_suffix('.data').glob(f'*{pol}*{date}*img')
                )[0]

                # restructure date to YYMMDD
                date = dt.strftime(
                    dt.strptime(date, SNAP_DATEFORMAT), '%y%m%d'
                )

                # create namespace for output file
                outfile = out_dir.joinpath(
                    f'{i+1:02d}.{date}.{product}.{pol}.tif'
                )

                # fill internal nodata
                #if ard['image_type'] == 'SLC':
                    #with rasterio.open(str(infile), 'r') as src:
                    #    meta = src.meta.copy()
                        #filled = ras.fill_internal_nans(src.read())

                    #with rasterio.open(str(infile), 'w', **meta) as dest:
                    #    dest.write(filled)
                    #print('filledbs')
                # run conversion routine
                ras.mask_by_shape(infile, outfile, extent,
                                  to_db=to_db,
                                  datatype=ard_mt['dtype_output'],
                                  min_value=mm_dict[stretch]['min'],
                                  max_value=mm_dict[stretch]['max'],
                                  ndv=0.0)

                # add ot a list for subsequent vrt creation
                out_files.append(str(outfile))

    # -----------------------------------------------
    # 7 Filechecks
    for file in out_files:
        return_code = h.check_out_tiff(file)
        if return_code != 0:

            for file_ in out_files:
                Path(file_).unlink()
                if Path(f'{file}.xml').exists():
                    Path(f'{file}.xml').unlink()

            return (
                burst, list_of_files, None, None,
                f'{product}.{pol}', return_code
            )

    # write file, so we know this ts has been successfully processed
    with open(str(check_file), 'w') as file:
        file.write('passed all tests \n')

    # -----------------------------------------------
    # 8 Create vrts
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    out_vrt = str(out_dir.joinpath(f'Timeseries.{product}.{pol}.vrt'))
    gdal.BuildVRT(
        out_vrt,
        out_files,
        options=vrt_options
    )

    return burst, list_of_files, out_files, out_vrt, f'{product}.{pol}', None
Esempio n. 6
0
def grd_to_ard(filelist, 
               output_dir, 
               file_id, 
               temp_dir, 
               proc_file,
               subset=None):
    '''The main function for the grd to ard generation

    This function represents the full workflow for the generation of an
    Analysis-Ready-Data product. The standard parameters reflect the CEOS
    ARD defintion for Sentinel-1 backcsatter products.

    By changing the parameters, taking care of all parameters
    that can be given. The function can handle multiple inputs of the same
    acquisition, given that there are consecutive data takes.

    Args:
        filelist (list): must be a list with one or more absolute
                  paths to GRD scene(s)
        output_dir: os.path object or string for the folder
                    where the output file should be written#
        file_id (str): prefix of the final output file
        temp_dir:
        resolution: the resolution of the output product in meters
        ls_mask: layover/shadow mask generation (Boolean)
        speckle_filter: speckle filtering (Boolean)

    Returns:
        nothing

    Notes:
        no explicit return value, since output file is our actual return
    '''

    # load ard parameters
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']
        polars = ard['polarisation'].replace(' ', '')
        
    # ---------------------------------------------------------------------
    # 1 Import
    
    # slice assembly if more than one scene
    if len(filelist) > 1:

        for file in filelist:

            grd_import = opj(temp_dir, '{}_imported'.format(
                os.path.basename(file)[:-5]))
            logfile = opj(output_dir, '{}.Import.errLog'.format(
                os.path.basename(file)[:-5]))
            
            return_code = _grd_frame_import(file, grd_import, logfile, polars)
            if return_code != 0:
                h.delete_dimap(grd_import)
                return return_code

        # create list of scenes for full acquisition in
        # preparation of slice assembly
        scenelist = ' '.join(glob.glob(opj(temp_dir, '*imported.dim')))

        # create file strings
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}._slice_assembly.errLog'.format(file_id))
        return_code = _slice_assembly(scenelist, grd_import, logfile)
        
        # delete inputs
        for file in filelist:
            h.delete_dimap(opj(temp_dir, '{}_imported'.format(
                os.path.basename(str(file))[:-5])))
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(grd_import)
            return return_code

        # subset mode after slice assembly
        if subset:
            grd_subset = opj(temp_dir, '{}_imported_subset'.format(file_id))
            return_code = _grd_subset_georegion('{}.dim'.format(grd_import), 
                                                grd_subset, logfile, subset)
            
            # delete slice assembly input to subset
            h.delete_dimap(grd_import)
            
            # delete output if command failed for some reason and return
            if return_code != 0:
                h.delete_dimap(grd_subset)
                return return_code
            
    # single scene case
    else:
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}.Import.errLog'.format(file_id))

        if subset is None:
            return_code = _grd_frame_import(filelist[0], grd_import, logfile, 
                                            polars)
        else:
            return_code = _grd_frame_import_subset(filelist[0], grd_import, 
                                                   subset, logfile, 
                                                   polars)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(grd_import)
            return return_code
    
    # ---------------------------------------------------------------------
    # 2 GRD Border Noise
    if ard['remove border noise'] and not subset:
        for polarisation in ['VV', 'VH', 'HH', 'HV']:

            infile = glob.glob(opj(
                    temp_dir, '{}_imported*data'.format(file_id),
                    'Intensity_{}.img'.format(polarisation)))

            if len(infile) == 1:
                # run grd Border Remove
                print(' INFO: Remove border noise for {} band.'.format(
                    polarisation))
                _grd_remove_border(infile[0])

    # set input for next step
    infile = glob.glob(opj(temp_dir, '{}_imported*dim'.format(file_id)))[0]
    
    # ---------------------------------------------------------------------
    # 3 Calibration
    if ard['product type'] == 'GTC-sigma0':
        calibrate_to = 'sigma0'
    elif ard['product type'] == 'GTC-gamma0':
        calibrate_to = 'gamma0'
    elif ard['product type'] == 'RTC-gamma0':
        calibrate_to = 'beta0'
       
    calibrated = opj(temp_dir, '{}_cal'.format(file_id))
    logfile = opj(output_dir, '{}.Calibration.errLog'.format(file_id))
    return_code = common._calibration(infile, calibrated, logfile, calibrate_to)
    
    # delete input
    h.delete_dimap(infile[:-4])
    
    # delete output if command failed for some reason and return
    if return_code != 0:
        h.delete_dimap(calibrated)
        return return_code
    
    # input for next step
    infile = '{}.dim'.format(calibrated)
    
    # ---------------------------------------------------------------------
    # 4 Multi-looking
    if int(ard['resolution']) >= 20:
        # calculate the multi-look factor
        ml_factor = int(int(ard['resolution']) / 10)
        
        multi_looked = opj(temp_dir, '{}_ml'.format(file_id))
        logfile = opj(output_dir, '{}.multilook.errLog'.format(file_id))
        return_code = common._multi_look(infile, multi_looked, logfile,
                                        ml_factor, ml_factor)

        # delete input
        h.delete_dimap(infile[:-4])
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(multi_looked)
            return return_code
            
        # define input for next step
        infile = '{}.dim'.format(multi_looked)
    
    # ---------------------------------------------------------------------
    # 5 Layover shadow mask
    if  ard['create ls mask'] is True:
        ls_mask = opj(temp_dir, '{}.ls_mask'.format(file_id))
        logfile = opj(output_dir, '{}.ls_mask.errLog'.format(file_id))
        return_code = common._ls_mask(infile, ls_mask, logfile, ard['resolution'],
                                      ard['dem']
        )

        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(ls_mask)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(ls_mask, test_stats=False)
        if return_code != 0:
            h.delete_dimap(ls_mask)
            return return_code
        
        # move to final destination
        out_ls_mask = opj(output_dir, '{}.LS'.format(file_id))

        # delete original file sin case they exist
        if os.path.exists(str(out_ls_mask) + '.dim'):
            h.delete_dimap(out_ls_mask)

        # move out of temp
        shutil.move('{}.dim'.format(ls_mask), '{}.dim'.format(out_ls_mask))
        shutil.move('{}.data'.format(ls_mask), '{}.data'.format(out_ls_mask))
        
    # ---------------------------------------------------------------------
    # 6 Speckle filtering
    if ard['remove speckle']:
        
        logfile = opj(output_dir, '{}.Speckle.errLog'.format(file_id))
        filtered = opj(temp_dir, '{}_spk'.format(file_id))

        # run processing
        return_code = common._speckle_filter(infile, filtered, logfile,
                                             ard['speckle filter'])

        # delete input
        h.delete_dimap(infile[:-4])
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(filtered)
            return return_code
       
        # define input for next step
        infile = '{}.dim'.format(filtered)
        
    # ---------------------------------------------------------------------
    # 7 Terrain flattening
    if ard['product type'] == 'RTC-gamma0':
        flattened = opj(temp_dir, '{}_flat'.format(file_id))
        logfile = opj(output_dir, '{}.tf.errLog'.format(file_id))
        return_code = common._terrain_flattening(infile, flattened, logfile,
                                                 ard['dem']
        )

        # delete input file
        h.delete_dimap(infile[:-4])
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(flattened)
            return return_code
        
        # define input for next step
        infile = '{}.dim'.format(flattened)

    # ---------------------------------------------------------------------
    # 8 Linear to db
    if ard['to db']:
        db_scaled = opj(temp_dir, '{}_db'.format(file_id))
        logfile = opj(output_dir, '{}.db.errLog'.format(file_id))
        return_code = common._linear_to_db(infile, db_scaled, logfile)
        
        # delete input file
        h.delete_dimap(infile[:-4])
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(db_scaled)
            return return_code
        
        # set input for next step
        infile = '{}.dim'.format(db_scaled)

    # ---------------------------------------------------------------------
    # 9 Geocoding
    geocoded = opj(temp_dir, '{}_bs'.format(file_id))
    logfile = opj(output_dir, '{}_bs.errLog'.format(file_id))
    return_code = common._terrain_correction(
        infile, geocoded, logfile, ard['resolution'], ard['dem']
    )
    
    # delete input file
    h.delete_dimap(infile[:-4])
    
    # delete output if command failed for some reason and return
    if return_code != 0:
        h.delete_dimap(geocoded)
        return return_code

    # define final destination
    out_final = opj(output_dir, '{}.bs'.format(file_id))

    # ---------------------------------------------------------------------
    # 10 Checks and move to output directory
    # remove output file if exists
    if os.path.exists(out_final + '.dim'):
        h.delete_dimap(out_final)   
    
    # check final output
    return_code = h.check_out_dimap(geocoded)
    if return_code != 0:
        h.delete_dimap(geocoded)
        return return_code
    
    # move to final destination
    shutil.move('{}.dim'.format(geocoded), '{}.dim'.format(out_final))
    shutil.move('{}.data'.format(geocoded), '{}.data'.format(out_final))

    # write processed file to keep track of files already processed
    with open(opj(output_dir, '.processed'), 'w') as file:
        file.write('passed all tests \n')
Esempio n. 7
0
def create_backscatter_layers(
        import_file, out_dir, burst_prefix, config_dict
):
    """Pipeline for backscatter processing

    :param import_file:
    :param out_dir:
    :param burst_prefix:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']

    # temp dir for intermediate files
    with TemporaryDirectory(prefix=f"{config_dict['temp_dir']}/") as temp:

        temp = Path(temp)
        # ---------------------------------------------------------------------
        # 1 Calibration

        # create namespace for temporary calibrated product
        out_cal = temp.joinpath(f'{burst_prefix}_cal')

        # create namespace for calibrate log
        cal_log = out_dir.joinpath(f'{burst_prefix}_cal.err_log')

        # run calibration on imported scene
        try:
            slc.calibration(
                import_file, out_cal, cal_log, config_dict
            )
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return None, None, error

        # ---------------------------------------------------------------------
        # 2 Speckle filtering
        if ard['remove_speckle']:

            # create namespace for temporary speckle filtered product
            speckle_import = temp.joinpath(f'{burst_prefix}_speckle_import')

            # create namespace for speckle filter log
            speckle_log = out_dir.joinpath(f'{burst_prefix}_speckle.err_log')

            # run speckle filter on calibrated input
            try:
                common.speckle_filter(
                    out_cal.with_suffix('.dim'), speckle_import, speckle_log,
                    config_dict
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return None, None, error

            # remove input
            h.delete_dimap(out_cal)

            # reset master_import for following routine
            out_cal = speckle_import

        # ---------------------------------------------------------------------
        # 3 dB scaling
        if ard['to_db']:

            # create namespace for temporary db scaled product
            out_db = temp.joinpath(f'{burst_prefix}_cal_db')

            # create namespace for db scaling log
            db_log = out_dir.joinpath(f'{burst_prefix}_cal_db.err_log')

            # run db scaling on calibrated/speckle filtered input
            try:
                common.linear_to_db(
                    out_cal.with_suffix('.dim'), out_db, db_log, config_dict
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return None, None, error

            # remove tmp files
            h.delete_dimap(out_cal)

            # set out_cal to out_db for further processing
            out_cal = out_db

        # ---------------------------------------------------------------------
        # 4 Geocoding

        # create namespace for temporary geocoded product
        out_tc = temp.joinpath(f'{burst_prefix}_bs')

        # create namespace for geocoding log
        tc_log = out_dir.joinpath(f'{burst_prefix}_bs_tc.err_log')

        # run terrain correction on calibrated/speckle filtered/db  input
        try:
            common.terrain_correction(
                out_cal.with_suffix('.dim'), out_tc, tc_log, config_dict
            )
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return None, None, error

        # ---------------------------------------------------------------------
        # 5 Create an outline
        ras.image_bounds(out_tc.with_suffix('.data'))

        # ---------------------------------------------------------------------
        # 6 Layover/Shadow mask
        out_ls = None  # set to none for final return statement
        if ard['create_ls_mask'] is True:

            # create namespace for temporary ls mask product
            ls_mask = temp.joinpath(f'{burst_prefix}_ls_mask')

            # create namespace for ls mask log
            logfile = out_dir.joinpath(f'{burst_prefix}.ls_mask.errLog')

            # run ls mask routine
            try:
                common.ls_mask(
                    out_cal.with_suffix('.dim'), ls_mask, logfile, config_dict
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return None, None, error

            # polygonize
            ls_raster = list(ls_mask.with_suffix('.data').glob('*img'))[0]
            ras.polygonize_ls(ls_raster, ls_mask.with_suffix('.json'))

            out_ls = out_tc.with_suffix('.data')\
                .joinpath(ls_mask.name).with_suffix('.json')

            # move to product folder
            ls_mask.with_suffix('.json').rename(out_ls)

        # move final backscatter product to actual output directory
        h.move_dimap(
            out_tc, out_dir.joinpath(f'{burst_prefix}_bs'), ard['to_tif']
        )

        # write out check file for tracking that it is processed
        with open(out_dir.joinpath('.bs.processed'), 'w+') as file:
            file.write('passed all tests \n')

        return (
            str(out_dir.joinpath(f'{burst_prefix}_bs').with_suffix('.dim')),
            str(out_ls),
            None
        )
Esempio n. 8
0
def grd_to_ard(filelist, config_file):
    """Main function for the grd to ard generation

    This function represents the full workflow for the generation of an
    Analysis-Ready-Data product. The standard parameters reflect the CEOS
    ARD defintion for Sentinel-1 backcsatter products.

    By changing the parameters, taking care of all parameters
    that can be given. The function can handle multiple inputs of the same
    acquisition, given that there are consecutive data takes.

    :param filelist: must be a list with one or more
                     absolute paths to GRD scene(s)
    :param config_file:
    :return:
    """

    from ost.s1.s1scene import Sentinel1Scene

    # ----------------------------------------------------
    # 1 load relevant config parameters
    with open(config_file, 'r') as file:
        config_dict = json.load(file)
        ard = config_dict['processing']['single_ARD']
        processing_dir = Path(config_dict['processing_dir'])
        subset = config_dict['subset']

    # ----------------------------------------------------
    # 2 define final destination dir/file and ls mask

    # get acq data and track from first scene in list
    first = Sentinel1Scene(Path(filelist[0]).stem)
    acquisition_date = first.start_date
    track = first.rel_orbit

    logger.info(
        f'Processing acquisition from {acquisition_date} over track {track}.'
    )

    # construct namespace for out directory etc.
    out_dir = processing_dir.joinpath(f'{track}/{acquisition_date}')
    out_dir.mkdir(parents=True, exist_ok=True)
    file_id = f'{acquisition_date}_{track}'
    out_final = out_dir.joinpath(f'{file_id}_bs')
    out_ls_mask = out_dir.joinpath(f'{file_id}_LS')

    suf = '.tif' if ard['to_tif'] else '.dim'

    # ----------------------------------------------------
    # 3 check if already processed
    if out_dir.joinpath('.processed').exists() and \
            out_final.with_suffix(suf).exists():
        logger.info(
            f'Acquisition from {acquisition_date} of track {track} '
            f'already processed'
        )

        if out_ls_mask.with_suffix(suf).exists():
            out_ls = out_ls_mask.with_suffix(suf)
        else:
            out_ls = None

        return filelist, out_final.with_suffix(suf), out_ls, None

    # ----------------------------------------------------
    # 4 run the processing routine

    # this might happen in the create_ard from s1scene class
    if not config_dict['temp_dir']:
        temp_dir = processing_dir.joinpath('temp')
        temp_dir.mkdir(parents=True, exist_ok=True)
    else:
        temp_dir = config_dict['temp_dir']

    with TemporaryDirectory(prefix=f"{temp_dir}/") as temp:

        # convert temp directory to Path object
        temp = Path(temp)

        # ---------------------------------------------------------------------
        # 4.1 Import
        # slice assembly if more than one scene
        if len(filelist) > 1:

            # if more than one frame import all files
            for file in filelist:

                # unzip for faster import?
                unpack = None
                if Path(file).suffix == '.zip':
                    with zipfile.ZipFile(file, 'r') as zip_ref:
                        zip_ref.extractall(temp)

                    file = temp.joinpath(f'{file.stem}.SAFE')
                    unpack = True

                # create namespace for temporary imported product
                grd_import = temp.joinpath(f'{file.stem}_imported')

                # create namespace for import log
                logfile = out_dir.joinpath(f'{file.stem}.Import.errLog')

                # set subset tempraoally to false for import routine
                config_dict['subset'] = False
                # frame import
                try:
                    grd.grd_frame_import(
                        file, grd_import, logfile, config_dict
                    )
                except (GPTRuntimeError, NotValidFileError) as error:
                    logger.info(error)
                    return filelist, None, None, error

                config_dict['subset'] = subset

                if unpack:
                    h.remove_folder_content(file)
                    file.rmdir()

            # create list of scenes for full acquisition in
            # preparation of slice assembly
            scenelist = ' '.join(
                [str(file) for file in list(temp.glob('*imported.dim'))]
            )

            # create namespace for temporary slice assembled import product
            grd_import = temp.joinpath(f'{file_id}_imported')

            # create namespace for slice assembled log
            logfile = out_dir.joinpath(f'{file_id}._slice_assembly.errLog')

            # run slice assembly
            try:
                grd.slice_assembly(scenelist, grd_import, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete imported frames
            for file in filelist:
                h.delete_dimap(temp.joinpath(f'{file.stem}_imported'))

            # subset mode after slice assembly
            if subset:

                # create namespace for temporary subset product
                grd_subset = temp.joinpath(f'{file_id}_imported_subset')

                # create namespace for subset log
                logfile = out_dir.joinpath(f'{file_id}._slice_assembly.errLog')

                # run subset routine
                try:
                    grd.grd_subset_georegion(
                        grd_import.with_suffix('.dim'), grd_subset, logfile,
                        config_dict
                    )
                except (GPTRuntimeError, NotValidFileError) as error:
                    logger.info(error)
                    return filelist, None, None, error

                # delete slice assembly input to subset
                h.delete_dimap(grd_import)

                # set subset to import for subsequent functions
                grd_import = grd_subset

        # single scene case
        else:

            file = filelist[0]

            # unzip for faster import
            unpack = None
            if Path(file).suffix == '.zip':
                with zipfile.ZipFile(file, 'r') as zip_ref:
                    zip_ref.extractall(temp)

                file = temp.joinpath(f'{file.stem}.SAFE')
                unpack = True

            # create namespace for temporary imported product
            grd_import = temp.joinpath(f'{file_id}_imported')

            # create namespace for import log
            logfile = out_dir.joinpath(f'{file_id}.Import.errLog')

            # run frame import
            try:
                grd.grd_frame_import(file, grd_import, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            if unpack:
                h.remove_folder_content(file)
                file.rmdir()

        # set input for next step
        infile = grd_import.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.2 GRD Border Noise
        if ard['remove_border_noise'] and not subset:

            # loop through possible polarisations
            for polarisation in ['VV', 'VH', 'HH', 'HV']:

                # get input file
                file = list(temp.glob(
                    f'{file_id}_imported*data/Intensity_{polarisation}.img'
                ))

                # remove border noise
                if len(file) == 1:
                    # run grd Border Remove
                    grd.grd_remove_border(file[0])

        # ---------------------------------------------------------------------
        # 4.3 Calibration

        # create namespace for temporary calibrated product
        calibrated = temp.joinpath(f'{file_id}_cal')

        # create namespace for calibration log
        logfile = out_dir.joinpath(f'{file_id}.calibration.errLog')

        # run calibration
        try:
            grd.calibration(infile, calibrated, logfile, config_dict)
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return filelist, None, None, error

        # delete input
        h.delete_dimap(infile.with_suffix(''))

        # input for next step
        infile = calibrated.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.4 Multi-looking
        if int(ard['resolution']) >= 20:

            # create namespace for temporary multi-looked product
            multi_looked = temp.joinpath(f'{file_id}_ml')

            # create namespace for multi-loook log
            logfile = out_dir.joinpath(f'{file_id}.multilook.errLog')

            # run multi-looking
            try:
                grd.multi_look(infile, multi_looked, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete input
            h.delete_dimap(infile.with_suffix(''))

            # define input for next step
            infile = multi_looked.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.5 Layover shadow mask
        out_ls = None  # set to none for final return statement
        if ard['create_ls_mask'] is True:

            # create namespace for temporary ls mask product
            ls_mask = temp.joinpath(f'{file_id}_ls_mask')

            # create namespace for ls mask log
            logfile = out_dir.joinpath(f'{file_id}.ls_mask.errLog')

            # run ls mask routine
            try:
                common.ls_mask(infile, ls_mask, logfile, config_dict)
                out_ls = out_ls_mask.with_suffix('.dim')
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # polygonize
            ls_raster = list(ls_mask.with_suffix('.data').glob('*img'))[0]
            ras.polygonize_ls(ls_raster, ls_mask.with_suffix('.json'))

        # ---------------------------------------------------------------------
        # 4.6 Speckle filtering
        if ard['remove_speckle']:

            # create namespace for temporary speckle filtered product
            filtered = temp.joinpath(f'{file_id}_spk')

            # create namespace for speckle filter log
            logfile = out_dir.joinpath(f'{file_id}.Speckle.errLog')

            # run speckle filter
            try:
                common.speckle_filter(infile, filtered, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete input
            h.delete_dimap(infile.with_suffix(''))

            # define input for next step
            infile = filtered.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.7 Terrain flattening
        if ard['product_type'] == 'RTC-gamma0':

            # create namespace for temporary terrain flattened product
            flattened = temp.joinpath(f'{file_id}_flat')

            # create namespace for terrain flattening log
            logfile = out_dir.joinpath(f'{file_id}.tf.errLog')

            # run terrain flattening
            try:
                common.terrain_flattening(
                    infile, flattened, logfile, config_dict
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete input file
            h.delete_dimap(infile.with_suffix(''))

            # define input for next step
            infile = flattened.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.8 Linear to db
        if ard['to_db']:

            # create namespace for temporary db scaled product
            db_scaled = temp.joinpath(f'{file_id}_db')

            # create namespace for db scaled log
            logfile = out_dir.joinpath(f'{file_id}.db.errLog')

            # run db scaling routine
            try:
                common.linear_to_db(infile, db_scaled, logfile, config_dict)
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return filelist, None, None, error

            # delete input file
            h.delete_dimap(infile.with_suffix(''))

            # set input for next step
            infile = db_scaled.with_suffix('.dim')

        # ---------------------------------------------------------------------
        # 4.9 Geocoding

        # create namespace for temporary geocoded product
        geocoded = temp.joinpath(f'{file_id}_bs')

        # create namespace for geocoding log
        logfile = out_dir.joinpath(f'{file_id}_bs.errLog')

        # run geocoding
        try:
            common.terrain_correction(infile, geocoded, logfile, config_dict)
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return filelist, None, None, error

        # delete input file
        h.delete_dimap(infile.with_suffix(''))

        # define final destination
        out_final = out_dir.joinpath(f'{file_id}_bs')

        # ---------------------------------------------------------------------
        # 4.11 Create an outline
        ras.image_bounds(geocoded.with_suffix('.data'))

        # ---------------------------------------------------------------------
        # 4.11 Copy LS Mask vector to data dir
        if ard['create_ls_mask'] is True:
            ls_mask.with_suffix('.json').rename(
                geocoded.with_suffix('.data')
                .joinpath(ls_mask.name).with_suffix('.json')
            )

        # ---------------------------------------------------------------------
        # 4.12 Move to output directory
        h.move_dimap(geocoded, out_final, ard['to_tif'])

    # ---------------------------------------------------------------------
    # 5 write processed file to keep track of files already processed
    with open(out_dir.joinpath('.processed'), 'w') as file:
        file.write('passed all tests \n')

    return filelist, out_final.with_suffix('.dim'), out_ls, None
Esempio n. 9
0
def burst_to_ard(burst, config_file):

    # this is a godale thing
    if isinstance(burst, tuple):
        i, burst = burst

    # load relevant config parameters
    with open(config_file, 'r') as file:
        config_dict = json.load(file)
        ard = config_dict['processing']['single_ARD']
        temp_dir = Path(config_dict['temp_dir'])

    # creation of out_directory
    out_dir = Path(burst.out_directory)
    out_dir.mkdir(parents=True, exist_ok=True)

    # existence of processed files
    pol_file = out_dir.joinpath('.pol.processed').exists()
    bs_file = out_dir.joinpath('.bs.processed').exists()
    coh_file = out_dir.joinpath('.coh.processed').exists()

    # set all return values initially to None
    out_bs, out_ls, out_pol, out_coh = None, None, None, None

    # check if we need to produce coherence
    if ard['coherence']:
        # we check if there is actually a slave file or
        # if it is the end of the time-series
        coherence = True if burst.slave_file else False
    else:
        coherence = False

    # get info on master from GeoSeries
    master_prefix = burst['master_prefix']
    master_file = burst['file_location']
    master_burst_nr = burst['BurstNr']
    swath = burst['SwathID']

    logger.info(f'Processing burst {burst.bid} acquired at {burst.Date}')
    # check if somethings already processed
    if (
            (ard['H-A-Alpha'] and not pol_file) or
            (ard['backscatter'] and not bs_file) or
            (coherence and not coh_file)
    ):

        # ---------------------------------------------------------------------
        # 1 Master import
        # create namespace for master import
        master_import = temp_dir.joinpath(f'{master_prefix}_import')

        if not master_import.with_suffix('.dim').exists():

            # create namespace for log file
            import_log = out_dir.joinpath(f'{master_prefix}_import.err_log')

            # run import
            try:
                slc.burst_import(
                    master_file,
                    master_import,
                    import_log,
                    swath,
                    master_burst_nr,
                    config_dict
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                if master_import.with_suffix('.dim').exists():
                    h.delete_dimap(master_import)

                logger.info(error)
                return burst.bid, burst.Date, None, None, None, None, error

        # ---------------------------------------------------------------------
        # 2 Product Generation
        if ard['H-A-Alpha'] and not pol_file:
            out_pol, error = create_polarimetric_layers(
                master_import.with_suffix('.dim'),
                out_dir,
                master_prefix,
                config_dict
            )
        elif ard['H-A-Alpha'] and pol_file:
            # construct namespace for existing pol layer
            out_pol = str(
                out_dir.joinpath(f'{master_prefix}_pol').with_suffix('.dim')
            )

        if ard['backscatter'] and not bs_file:
            out_bs, out_ls, error = create_backscatter_layers(
                master_import.with_suffix('.dim'),
                out_dir,
                master_prefix,
                config_dict
            )
        elif ard['backscatter'] and bs_file:
            out_bs = str(
                out_dir.joinpath(f'{master_prefix}_bs').with_suffix('.dim')
            )

            if ard['create_ls_mask'] and bs_file:
                out_ls = str(
                    out_dir.joinpath(f'{master_prefix}_LS').with_suffix('.dim')
                )

        if coherence and not coh_file:

            # get info on slave from GeoSeries
            slave_prefix = burst['slave_prefix']
            slave_file = burst['slave_file']
            slave_burst_nr = burst['slave_burst_nr']

            with TemporaryDirectory(prefix=f"{str(temp_dir)}/") as temp:

                # convert temp to Path object
                temp = Path(temp)

                # import slave burst
                slave_import = temp.joinpath(f'{slave_prefix}_import')
                import_log = out_dir.joinpath(f'{slave_prefix}_import.err_log')

                try:
                    slc.burst_import(
                        slave_file,
                        slave_import,
                        import_log,
                        swath,
                        slave_burst_nr,
                        config_dict
                    )
                except (GPTRuntimeError, NotValidFileError) as error:
                    if slave_import.with_suffix('.dim').exists():
                        h.delete_dimap(slave_import)

                    logger.info(error)
                    return burst.bid, burst.Date, None, None, None, None, error

                out_coh, error = create_coherence_layers(
                    master_import.with_suffix('.dim'),
                    slave_import.with_suffix('.dim'),
                    out_dir,
                    master_prefix,
                    config_dict
                )

                # remove master import
                h.delete_dimap(master_import)

        elif coherence and coh_file:
            out_coh = str(
                out_dir.joinpath(f'{master_prefix}_coh').with_suffix('.dim')
            )

            # remove master import
            h.delete_dimap(master_import)
        else:
            # remove master import
            h.delete_dimap(master_import)

    # in case everything has been already processed,
    # we re-construct the out names for proper return value
    else:
        if ard['H-A-Alpha'] and pol_file:
            out_pol = str(
                out_dir.joinpath(f'{master_prefix}_pol').with_suffix('.dim')
            )

        if ard['backscatter'] and bs_file:
            out_bs = str(
                out_dir.joinpath(f'{master_prefix}_bs').with_suffix('.dim')
            )

        if ard['create_ls_mask'] and bs_file:
            out_ls = str(
                out_dir.joinpath(f'{master_prefix}_LS').with_suffix('.dim')
            )

        if coherence and coh_file:
            out_coh = str(
                out_dir.joinpath(f'{master_prefix}_coh').with_suffix('.dim')
            )

    return burst.bid, burst.Date, out_bs, out_ls, out_pol, out_coh, None
Esempio n. 10
0
def create_coherence_layers(
        master_import, slave_import, out_dir,
        master_prefix, config_dict
):
    """Pipeline for Dual-polarimetric decomposition

    :param master_import:
    :param slave_import:
    :param out_dir:
    :param master_prefix:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']

    with TemporaryDirectory(prefix=f"{config_dict['temp_dir']}/") as temp:

        temp = Path(temp)
        # ---------------------------------------------------------------
        # 1 Co-registration
        # create namespace for temporary co-registered stack
        out_coreg = temp.joinpath(f'{master_prefix}_coreg')

        # create namespace for co-registration log
        coreg_log = out_dir.joinpath(f'{master_prefix}_coreg.err_log')

        # run co-registration
        try:
            slc.coreg2(
                master_import, slave_import, out_coreg, coreg_log, config_dict
            )
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            h.delete_dimap(out_coreg)

            # remove imports
            h.delete_dimap(master_import)
            return None, error

        # remove imports
        h.delete_dimap(master_import)
        h.delete_dimap(slave_import)

        # ---------------------------------------------------------------
        # 2 Coherence calculation

        # create namespace for temporary coherence product
        out_coh = temp.joinpath(f'{master_prefix}_coherence')

        # create namespace for coherence log
        coh_log = out_dir.joinpath(f'{master_prefix}_coh.err_log')

        # run coherence estimation
        try:
            slc.coherence(
                out_coreg.with_suffix('.dim'), out_coh, coh_log, config_dict
            )
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return None, error

        # remove coreg tmp files
        h.delete_dimap(out_coreg)

        # ---------------------------------------------------------------
        # 3 Geocoding

        # create namespace for temporary geocoded roduct
        out_tc = temp.joinpath(f'{master_prefix}_coh')

        # create namespace for geocoded log
        tc_log = out_dir.joinpath(f'{master_prefix}_coh_tc.err_log')

        # run geocoding
        try:
            common.terrain_correction(
                out_coh.with_suffix('.dim'), out_tc, tc_log, config_dict
            )
        except (GPTRuntimeError, NotValidFileError) as error:
            logger.info(error)
            return None, error

        # ---------------------------------------------------------------
        # 4 Checks and Clean-up

        # remove tmp files
        h.delete_dimap(out_coh)

        # ---------------------------------------------------------------------
        # 5 Create an outline
        ras.image_bounds(out_tc.with_suffix('.data'))

        # move to final destination
        h.move_dimap(
            out_tc, out_dir.joinpath(f'{master_prefix}_coh'), ard['to_tif']
        )

        # write out check file for tracking that it is processed
        with open(out_dir.joinpath('.coh.processed'), 'w+') as file:
            file.write('passed all tests \n')

        return (
            str(out_dir.joinpath(f'{master_prefix}_coh').with_suffix('.dim')),
            None
        )
Esempio n. 11
0
def ard_to_ts(list_of_files,
              processing_dir,
              temp_dir,
              burst,
              proc_file,
              product,
              pol,
              ncores=os.cpu_count()):
    if type(list_of_files) == str:
        list_of_files = list_of_files.replace("'", '').strip('][').split(', ')

    # get the burst directory
    burst_dir = opj(processing_dir, burst)

    # check routine if timeseries has already been processed
    check_file = opj(burst_dir, 'Timeseries',
                     '.{}.{}.processed'.format(product, pol))
    if os.path.isfile(check_file):
        print(' INFO: Timeseries of {} for {} in {} polarisation already'
              ' processed'.format(burst, product, pol))
        return

    # load ard parameters
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']
        ard_mt = ard_params['time-series ARD']
        if ard_mt['remove mt speckle'] is True:
            ard_mt_speck = ard_params['time-series ARD']['mt speckle filter']
    # get the db scaling right
    to_db = ard['to db']
    if to_db or product != 'bs':
        to_db = False
        print('INFO: Not converting to dB for {}'.format(product))
    else:
        to_db = ard_mt['to db']
        print('INFO: Converting to dB for {}'.format(product))

    if ard['apply ls mask']:
        extent = opj(burst_dir, '{}.extent.masked.shp'.format(burst))
    else:
        extent = opj(burst_dir, '{}.extent.shp'.format(burst))

    # min max dict for stretching in case of 16 or 8 bit datatype
    mm_dict = {
        'bs': {
            'min': -30,
            'max': 5
        },
        'coh': {
            'min': 0.000001,
            'max': 1
        },
        'Alpha': {
            'min': 0.000001,
            'max': 90
        },
        'Anisotropy': {
            'min': 0.000001,
            'max': 1
        },
        'Entropy': {
            'min': 0.000001,
            'max': 1
        }
    }

    stretch = pol if pol in ['Alpha', 'Anisotropy', 'Entropy'] else product

    # define out_dir for stacking routine
    out_dir = opj(processing_dir, '{}'.format(burst), 'Timeseries')
    os.makedirs(out_dir, exist_ok=True)

    # create namespaces
    temp_stack = opj(temp_dir, '{}_{}_{}'.format(burst, product, pol))
    out_stack = opj(temp_dir, '{}_{}_{}_mt'.format(burst, product, pol))
    stack_log = opj(out_dir,
                    '{}_{}_{}_stack.err_log'.format(burst, product, pol))

    # run stacking routines
    # convert list of files readable for snap
    list_of_files = '\'{}\''.format(','.join(list_of_files))

    if pol in ['Alpha', 'Anisotropy', 'Entropy']:
        print(
            ' INFO: Creating multi-temporal stack of images of burst/track {} for'
            ' the {} band of the polarimetric H-A-Alpha'
            ' decomposition.'.format(burst, pol))
        create_stack(list_of_files, temp_stack, stack_log, pattern=pol)
    else:
        print(
            ' INFO: Creating multi-temporal stack of images of burst/track {} for'
            ' {} product in {} polarization.'.format(burst, product, pol))
        create_stack(list_of_files, temp_stack, stack_log, polarisation=pol)

    # run mt speckle filter
    if ard_mt['remove mt speckle'] is True:
        speckle_log = opj(
            out_dir, '{}_{}_{}_mt_speckle.err_log'.format(burst, product, pol))

        print(' INFO: Applying multi-temporal speckle filter')
        mt_speckle_filter('{}.dim'.format(temp_stack),
                          out_stack,
                          speckle_log,
                          speckle_dict=ard_mt_speck,
                          ncores=ncores)
        # remove tmp files
        h.delete_dimap(temp_stack)
    else:
        out_stack = temp_stack

    if product == 'coh':

        # get slave and master Date
        mstDates = [
            datetime.datetime.strptime(
                os.path.basename(x).split('_')[3].split('.')[0], '%d%b%Y')
            for x in glob.glob(opj('{}.data'.format(out_stack), '*img'))
        ]

        slvDates = [
            datetime.datetime.strptime(
                os.path.basename(x).split('_')[4].split('.')[0], '%d%b%Y')
            for x in glob.glob(opj('{}.data'.format(out_stack), '*img'))
        ]
        # sort them
        mstDates.sort()
        slvDates.sort()
        # write them back to string for following loop
        sortedMstDates = [
            datetime.datetime.strftime(ts, "%d%b%Y") for ts in mstDates
        ]
        sortedSlvDates = [
            datetime.datetime.strftime(ts, "%d%b%Y") for ts in slvDates
        ]

        i, outfiles = 1, []
        for mst, slv in zip(sortedMstDates, sortedSlvDates):

            inMst = datetime.datetime.strptime(mst, '%d%b%Y')
            inSlv = datetime.datetime.strptime(slv, '%d%b%Y')

            outMst = datetime.datetime.strftime(inMst, '%y%m%d')
            outSlv = datetime.datetime.strftime(inSlv, '%y%m%d')
            infile = glob.glob(
                opj('{}.data'.format(out_stack),
                    '*{}*{}_{}*img'.format(pol, mst, slv)))[0]

            outfile = opj(
                out_dir,
                '{:02d}.{}.{}.{}.{}.tif'.format(i, outMst, outSlv, product,
                                                pol))

            ras.mask_by_shape(infile,
                              outfile,
                              extent,
                              to_db=to_db,
                              datatype=ard_mt['dtype output'],
                              min_value=mm_dict[stretch]['min'],
                              max_value=mm_dict[stretch]['max'],
                              ndv=0.0,
                              description=True)
            # add ot a list for subsequent vrt creation
            outfiles.append(outfile)
            i += 1

    else:
        # get the dates of the files
        dates = [
            datetime.datetime.strptime(x.split('_')[-1][:-4], '%d%b%Y')
            for x in glob.glob(opj('{}.data'.format(out_stack), '*img'))
        ]
        # sort them
        dates.sort()
        # write them back to string for following loop
        sortedDates = [
            datetime.datetime.strftime(ts, "%d%b%Y") for ts in dates
        ]

        i, outfiles = 1, []
        for date in sortedDates:

            # restructure date to YYMMDD
            inDate = datetime.datetime.strptime(date, '%d%b%Y')
            outDate = datetime.datetime.strftime(inDate, '%y%m%d')

            infile = glob.glob(
                opj('{}.data'.format(out_stack),
                    '*{}*{}*img'.format(pol, date)))[0]

            # create outfile
            outfile = opj(
                out_dir, '{:02d}.{}.{}.{}.tif'.format(i, outDate, product,
                                                      pol))

            ras.mask_by_shape(infile,
                              outfile,
                              extent,
                              to_db=to_db,
                              datatype=ard_mt['dtype output'],
                              min_value=mm_dict[stretch]['min'],
                              max_value=mm_dict[stretch]['max'],
                              ndv=0.0)

            # add ot a list for subsequent vrt creation
            outfiles.append(outfile)
            i += 1

    for file in outfiles:
        return_code = h.check_out_tiff(file)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            os.remove(file)
            return return_code

    # write file, so we know this ts has been succesfully processed
    if return_code == 0:
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')

    # build vrt of timeseries
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    gdal.BuildVRT(opj(out_dir, 'Timeseries.{}.{}.vrt'.format(product, pol)),
                  outfiles,
                  options=vrt_options)

    # remove tmp files
    h.delete_dimap(out_stack)
Esempio n. 12
0
def _ard_to_ts(burst_inventory, processing_dir, temp_dir, burst, to_db,
               ls_mask_create, ls_mask_apply, mt_speckle_filter, datatype):

    burst_dir = opj(processing_dir, burst)

    # get common burst extent
    list_of_scenes = glob.glob(opj(burst_dir, '20*', '*data*', '*img'))
    list_of_scenes = [x for x in list_of_scenes if 'layover' not in x]
    extent = opj(burst_dir, '{}.extent.shp'.format(burst))
    ts.mt_extent(list_of_scenes, extent, temp_dir, buffer=-0.0018)

    # remove inital extent
    for file in glob.glob(opj(burst_dir, 'tmp*')):
        os.remove(file)

    # layover/shadow mask
    if ls_mask_create is True:
        list_of_scenes = glob.glob(opj(burst_dir, '20*', '*data*', '*img'))
        list_of_layover = [x for x in list_of_scenes if 'layover' in x]
        out_ls = opj(burst_dir, '{}.ls_mask.tif'.format(burst))
        ts.mt_layover(list_of_layover, out_ls, temp_dir, extent=extent)
        print(' INFO: Our common layover mask is located at {}'.format(out_ls))

    if ls_mask_apply:
        print(' INFO: Calculating symetrical difference of extent and ls_mask')
        ras.polygonize_raster(out_ls, '{}.shp'.format(out_ls[:-4]))
        extent_ls_masked = opj(burst_dir, '{}.extent.masked.shp'.format(burst))
        vec.difference(extent, '{}.shp'.format(out_ls[:-4]), extent_ls_masked)
        extent = extent_ls_masked

    list_of_product_types = {'BS': 'Gamma0', 'coh': 'coh', 'ha_alpha': 'Alpha'}

    # we loop through each possible product
    for p, product_name in list_of_product_types.items():

        # we loop through each polarisation
        for pol in ['VV', 'VH', 'HH', 'HV']:

            # see if there is actually any imagery
            list_of_ts_bursts = sorted(
                glob.glob(
                    opj(processing_dir, burst, '20*', '*data*',
                        '{}*{}*img'.format(product_name, pol))))

            if len(list_of_ts_bursts) > 1:

                # check for all datafiles of this product type
                list_of_ts_bursts = sorted(
                    glob.glob(
                        opj(processing_dir, burst, '20*/',
                            '*{}*dim'.format(p))))
                list_of_ts_bursts = '\'{}\''.format(
                    ','.join(list_of_ts_bursts))

                # define out_dir for stacking routine

                out_dir = opj(processing_dir, '{}/Timeseries'.format(burst))
                os.makedirs(out_dir, exist_ok=True)

                # create namespaces

                temp_stack = opj(temp_dir, '{}_{}_{}_mt'.format(burst, p, pol))

                out_stack = opj(out_dir, '{}_{}_{}_mt'.format(burst, p, pol))

                stack_log = opj(out_dir,
                                '{}_{}_{}_stack.err_log'.format(burst, p, pol))

                # run stacking routines
                ts.create_stack(list_of_ts_bursts,
                                temp_stack,
                                stack_log,
                                polarisation=pol)

                # run mt speckle filter
                if mt_speckle_filter is True:
                    speckle_log = opj(
                        out_dir,
                        '{}_{}_{}_mt_speckle.err_log'.format(burst, p, pol))

                    ts.mt_speckle_filter('{}.dim'.format(temp_stack),
                                         out_stack, speckle_log)
                    # remove tmp files
                    h.delete_dimap(temp_stack)
                else:
                    out_stack = temp_stack

                # convert to GeoTiffs
                if p == 'BS':
                    # get the dates of the files
                    dates = [
                        datetime.datetime.strptime(
                            x.split('_')[-1][:-4], '%d%b%Y')
                        for x in glob.glob(
                            opj('{}.data'.format(out_stack), '*img'))
                    ]
                    # sort them
                    dates.sort()
                    # write them back to string for following loop
                    sortedDates = [
                        datetime.datetime.strftime(ts, "%d%b%Y")
                        for ts in dates
                    ]

                    i, outfiles = 1, []
                    for date in sortedDates:

                        # restructure date to YYMMDD
                        inDate = datetime.datetime.strptime(date, '%d%b%Y')
                        outDate = datetime.datetime.strftime(inDate, '%y%m%d')

                        infile = glob.glob(
                            opj('{}.data'.format(out_stack),
                                '*{}*{}*img'.format(pol, date)))[0]

                        # create outfile
                        outfile = opj(
                            out_dir,
                            '{}.{}.{}.{}.tif'.format(i, outDate, p, pol))

                        # mask by extent
                        ras.mask_by_shape(infile,
                                          outfile,
                                          extent,
                                          to_db=to_db,
                                          datatype=datatype,
                                          min_value=-30,
                                          max_value=5,
                                          ndv=0)
                        # add ot a list for subsequent vrt creation
                        outfiles.append(outfile)

                        i += 1

                    # build vrt of timeseries
                    vrt_options = gdal.BuildVRTOptions(srcNodata=0,
                                                       separate=True)
                    gdal.BuildVRT(opj(out_dir,
                                      'Timeseries.{}.{}.vrt'.format(p, pol)),
                                  outfiles,
                                  options=vrt_options)

                if p == 'coh':

                    # get slave and master Date
                    mstDates = [
                        datetime.datetime.strptime(
                            os.path.basename(x).split('_')[3].split('.')[0],
                            '%d%b%Y') for x in glob.glob(
                                opj('{}.data'.format(out_stack), '*img'))
                    ]

                    slvDates = [
                        datetime.datetime.strptime(
                            os.path.basename(x).split('_')[4].split('.')[0],
                            '%d%b%Y') for x in glob.glob(
                                opj('{}.data'.format(out_stack), '*img'))
                    ]
                    # sort them
                    mstDates.sort()
                    slvDates.sort()
                    # write them back to string for following loop
                    sortedMstDates = [
                        datetime.datetime.strftime(ts, "%d%b%Y")
                        for ts in mstDates
                    ]
                    sortedSlvDates = [
                        datetime.datetime.strftime(ts, "%d%b%Y")
                        for ts in slvDates
                    ]

                    i, outfiles = 1, []
                    for mst, slv in zip(sortedMstDates, sortedSlvDates):

                        inMst = datetime.datetime.strptime(mst, '%d%b%Y')
                        inSlv = datetime.datetime.strptime(slv, '%d%b%Y')

                        outMst = datetime.datetime.strftime(inMst, '%y%m%d')
                        outSlv = datetime.datetime.strftime(inSlv, '%y%m%d')

                        infile = glob.glob(
                            opj('{}.data'.format(out_stack),
                                '*{}*{}_{}*img'.format(pol, mst, slv)))[0]
                        outfile = opj(
                            out_dir, '{}.{}.{}.{}.{}.tif'.format(
                                i, outMst, outSlv, p, pol))

                        ras.mask_by_shape(infile,
                                          outfile,
                                          extent,
                                          to_db=False,
                                          datatype=datatype,
                                          min_value=0.000001,
                                          max_value=1,
                                          ndv=0)

                        # add ot a list for subsequent vrt creation
                        outfiles.append(outfile)

                        i += 1

                    # build vrt of timeseries
                    vrt_options = gdal.BuildVRTOptions(srcNodata=0,
                                                       separate=True)
                    gdal.BuildVRT(opj(out_dir,
                                      'Timeseries.{}.{}.vrt'.format(p, pol)),
                                  outfiles,
                                  options=vrt_options)

                # remove tmp files
                h.delete_dimap(out_stack)

    for pol in ['Alpha', 'Entropy', 'Anisotropy']:

        list_of_ts_bursts = sorted(
            glob.glob(
                opj(processing_dir, burst, '20*', '*{}*'.format(p),
                    '*{}.img'.format(pol))))

        if len(list_of_ts_bursts) > 1:

            list_of_ts_bursts = sorted(
                glob.glob(
                    opj(processing_dir, burst, '20*/', '*{}*dim'.format(p))))
            list_of_ts_bursts = '\'{}\''.format(','.join(list_of_ts_bursts))

            # print(list_of_ts_bursts)

            out_dir = opj(processing_dir, '{}/Timeseries'.format(burst))
            os.makedirs(out_dir, exist_ok=True)

            temp_stack = opj(temp_dir, '{}_{}_mt'.format(burst, pol))
            out_stack = opj(out_dir, '{}_{}_mt'.format(burst, pol))

            stack_log = opj(out_dir, '{}_{}_stack.err_log'.format(burst, pol))
            # processing routines
            ts.create_stack(list_of_ts_bursts,
                            temp_stack,
                            stack_log,
                            pattern=pol)

            if mt_speckle_filter is True:
                speckle_log = opj(
                    out_dir, '{}_{}_mt_speckle.err_log'.format(burst, pol))
                ts.mt_speckle_filter('{}.dim'.format(temp_stack), out_stack,
                                     speckle_log)
                # remove tmp files
                h.delete_dimap(temp_stack)
            else:
                out_stack = temp_stack

            # get the dates of the files
            dates = [
                datetime.datetime.strptime(x.split('_')[-1][:-4], '%d%b%Y')
                for x in glob.glob(opj('{}.data'.format(out_stack), '*img'))
            ]
            # sort them
            dates.sort()
            # write them back to string for following loop
            sortedDates = [
                datetime.datetime.strftime(ts, "%d%b%Y") for ts in dates
            ]

            i, outfiles = 1, []
            for date in sortedDates:

                # restructure date to YYMMDD
                inDate = datetime.datetime.strptime(date, '%d%b%Y')
                outDate = datetime.datetime.strftime(inDate, '%y%m%d')

                infile = glob.glob(
                    opj('{}.data'.format(out_stack),
                        '*{}*{}*img'.format(pol, date)))[0]
                # create outfile
                outfile = opj(out_dir,
                              '{}.{}.{}.{}.tif'.format(i, outDate, p, pol))
                # mask by extent
                max_value = 90 if pol is 'Alpha' else 1
                ras.mask_by_shape(infile,
                                  outfile,
                                  extent,
                                  to_db=False,
                                  datatype=datatype,
                                  min_value=0.000001,
                                  max_value=max_value,
                                  ndv=0)

                # add ot a list for subsequent vrt creation
                outfiles.append(outfile)
                i += 1

            # build vrt of timeseries
            vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
            gdal.BuildVRT(opj(out_dir, 'Timeseries.{}.vrt'.format(pol)),
                          outfiles,
                          options=vrt_options)

            # remove tmp files
            h.delete_dimap(out_stack)
Esempio n. 13
0
def burst_to_ard(master_file,
                 swath,
                 master_burst_nr,
                 master_burst_id,
                 proc_file,
                 out_dir,
                 temp_dir,
                 slave_file=None,
                 slave_burst_nr=None,
                 slave_burst_id=None,
                 coherence=False,
                 remove_slave_import=False,
                 ncores=os.cpu_count()):
    '''The main routine to turn a burst into an ARD product

    Args:
        master_file (str): path to full master SLC scene
        swath (str): subswath
        master_burst_nr (): index number of the burst
        master_burst_id ():
        out_dir (str):
        temp_dir (str):
        slave_file (str):
        slave_burst_nr (str):
        slave_burst_id (str):
        proc_file (str):
        remove_slave_import (bool):
        ncores (int): number of cpus used - useful for parallel processing
    '''
    if type(remove_slave_import) == str:
        if remove_slave_import == 'True':
            remove_slave_import = True
        elif remove_slave_import == 'False':
            remove_slave_import = False
    if type(coherence) == str:
        if coherence == 'True':
            coherence = True
        elif coherence == 'False':
            coherence = False
    # load ards
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']
     
    # ---------------------------------------------------------------------
    # 1 Import
    # import master
    master_import = opj(temp_dir, '{}_import'.format(master_burst_id))

    if not os.path.exists('{}.dim'.format(master_import)):
        import_log = opj(out_dir, '{}_import.err_log'.format(master_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = slc._import(master_file, master_import, import_log,
                              swath, master_burst_nr, polars, ncores
        )
        if return_code != 0:
            h.delete_dimap(master_import)
            return return_code

    imported = '{}.dim'.format(master_import)
    # ---------------------------------------------------------------------
    # 2 H-A-Alpha
    if ard['H-A-Alpha']:
        # create HAalpha file
        out_haa = opj(temp_dir, '{}_h'.format(master_burst_id))
        haa_log = opj(out_dir, '{}_haa.err_log'.format(master_burst_id))
        return_code = slc._ha_alpha(imported,
                                out_haa, haa_log, 
                                ard['remove pol speckle'], 
                                ard['pol speckle filter'],
                                ncores
        )

        # delete files in case of error
        if return_code != 0:
            h.delete_dimap(out_haa)
            h.delete_dimap(master_import)
            return return_code

        # geo code HAalpha
        out_htc = opj(temp_dir, '{}_pol'.format(master_burst_id))
        haa_tc_log = opj(out_dir, '{}_haa_tc.err_log'.format(
            master_burst_id))
        return_code = common._terrain_correction(
            '{}.dim'.format(out_haa), out_htc, haa_tc_log, 
            ard['resolution'], ard['dem'], ncores
        )

        # remove HAalpha tmp files
        h.delete_dimap(out_haa)
        
        # last check on the output files
        return_code = h.check_out_dimap(out_htc)
        if return_code != 0:
            h.delete_dimap(out_htc)
            h.delete_dimap(master_import)
            return return_code

        # move to final destination
        h.move_dimap(out_htc, opj(out_dir, '{}_pol'.format(master_burst_id)))

    # ---------------------------------------------------------------------
    # 3 Calibration
    out_cal = opj(temp_dir, '{}_cal'.format(master_burst_id))
    cal_log = opj(out_dir, '{}_cal.err_log'.format(master_burst_id))
    return_code = slc._calibration(
        imported, out_cal, cal_log, ard['product type'], ncores)

    # delete output if command failed for some reason and return
    if return_code != 0:
        h.delete_dimap(out_cal)
        h.delete_dimap(master_import)
        return return_code

    if not coherence:
        #  remove imports
        h.delete_dimap(master_import)

    # ---------------------------------------------------------------------
    # 4 Speckle filtering
    if ard['remove speckle']:
        speckle_import = opj(
            temp_dir, '{}_speckle_import'.format(master_burst_id)
        )
        speckle_log = opj(
            out_dir, '{}_speckle.err_log'.format(master_burst_id)
        )

        return_code = common._speckle_filter(
            '{}.dim'.format(out_cal), speckle_import, speckle_log,
            ard['speckle filter'], ncores
        )

        # remove input 
        h.delete_dimap(out_cal)

        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(speckle_import)
            h.delete_dimap(master_import)
            return return_code

        # reset master_import for follwoing routine
        out_cal = speckle_import

    # ---------------------------------------------------------------------
    # 5 Terrain Flattening
    if ard['product type'] == 'RTC-gamma0':
        # define outfile
        out_rtc = opj(temp_dir, '{}_rtc'.format(master_burst_id))
        rtc_log = opj(out_dir, '{}_rtc.err_log'.format(
            master_burst_id))
        # do the TF
        return_code = common._terrain_flattening(
            '{}.dim'.format(out_cal), out_rtc, rtc_log, ard['dem'], ncores
        )

        # remove tmp files
        h.delete_dimap(out_cal)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_rtc)
            h.delete_dimap(master_import)
            return return_code

        # set out_rtc to out_cal for further processing
        out_cal = out_rtc

    # ---------------------------------------------------------------------
    # 7 to dB scale
    if ard['to db']:
        out_db = opj(temp_dir, '{}_cal_db'.format(master_burst_id))
        db_log = opj(out_dir, '{}_cal_db.err_log'.format(master_burst_id))
        return_code = common._linear_to_db('{}.dim'.format(out_cal), out_db, db_log, ncores)

        # remove tmp files
        h.delete_dimap(out_cal)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_db)
            h.delete_dimap(master_import)
            return return_code

        # set out_cal to out_db for further processing
        out_cal = out_db
 
    # ---------------------------------------------------------------------
    # 8 Geocode backscatter
    if ard['product type'] != "Coherence_only":
        out_tc = opj(temp_dir, '{}_bs'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_bs_tc.err_log'.format(master_burst_id))
        return_code = common._terrain_correction(
            '{}.dim'.format(out_cal), out_tc, tc_log,
            ard['resolution'], ard['dem'], ncores)

        # last check on backscatter data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.delete_dimap(out_tc)
            return return_code

        # we move backscatter to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_bs'.format(master_burst_id)))

    # ---------------------------------------------------------------------
    # 9 Layover/Shadow mask
    if ard['create ls mask']:
        
        out_ls = opj(temp_dir, '{}_LS'.format(master_burst_id))
        ls_log = opj(out_dir, '{}_LS.err_log'.format(master_burst_id))
        return_code = common._ls_mask('{}.dim'.format(out_cal), out_ls, ls_log,
                                      ard['resolution'], ard['dem'], ncores)

        if return_code != 0:
            h.delete_dimap(out_ls)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(out_ls, test_stats=False)
        if return_code != 0:
            h.delete_dimap(out_ls)
            return return_code

        # move ls data to final destination
        h.move_dimap(out_ls, opj(out_dir, '{}_LS'.format(master_burst_id)))

    # remove calibrated files
    if ard['product type'] != "Coherence_only":
        h.delete_dimap(out_cal)

    if coherence:

        # import slave
        slave_import = opj(temp_dir, '{}_import'.format(slave_burst_id))
        import_log = opj(out_dir, '{}_import.err_log'.format(slave_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = slc._import(
            slave_file, slave_import, import_log, swath, slave_burst_nr,
            polars, ncores
        )

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # co-registration
        #filelist = ['{}.dim'.format(master_import),
        #            '{}.dim'.format(slave_import)]
        #filelist = '\'{}\''.format(','.join(filelist))
        out_coreg = opj(temp_dir, '{}_coreg'.format(master_burst_id))
        coreg_log = opj(out_dir, '{}_coreg.err_log'.format(master_burst_id))
        # return_code = _coreg(filelist, out_coreg, coreg_log, dem)
        return_code = slc._coreg2('{}.dim'.format(master_import),
                              '{}.dim'.format(slave_import),
                               out_coreg,
                               coreg_log, ard['dem'], ncores)

        # remove imports
        h.delete_dimap(master_import)
        
        if remove_slave_import is True:
            h.delete_dimap(slave_import)
        
        # delete output if command failed for some reason and return   
        if return_code != 0:
            h.delete_dimap(out_coreg)
            h.delete_dimap(slave_import)
            return return_code

        # calculate coherence and deburst
        out_coh = opj(temp_dir, '{}_c'.format(master_burst_id))
        coh_log = opj(out_dir, '{}_coh.err_log'.format(master_burst_id))
        coh_polars = ard['coherence bands'].replace(' ', '')
        return_code = slc._coherence('{}.dim'.format(out_coreg),
                                 out_coh, coh_log, coh_polars, ncores)

        # remove coreg tmp files
        h.delete_dimap(out_coreg)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_coh)
            h.delete_dimap(slave_import)
            return return_code

        # geocode
        out_tc = opj(temp_dir, '{}_coh'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_coh_tc.err_log'.format(master_burst_id))
        return_code = common._terrain_correction(
            '{}.dim'.format(out_coh), out_tc, tc_log, 
            ard['resolution'], ard['dem'], ncores)
        
        # remove tmp files
        h.delete_dimap(out_coh)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_tc)
            h.delete_dimap(slave_import)
            return return_code
        
        # remove tmp files
        h.delete_dimap(out_coh)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_tc)
            h.delete_dimap(slave_import)
            return return_code
        
        # last check on coherence data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.delete_dimap(out_tc)
            return return_code

        # move to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_coh'.format(master_burst_id)))

    # write out check file for tracking that it is processed
    with open(opj(out_dir, '.processed'), 'w') as file:
        file.write('passed all tests \n')
    
    return return_code