def ls_mask(infile, outfile, logfile, config_dict):
    """Wrapper function of a Snap graph for Layover/Shadow mask creation

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    dem_dict = ard['dem']
    cpus = config_dict['snap_cpu_parallelism']

    # auto projections of snap
    if 42001 <= dem_dict['out_projection'] <= 97002:
        projection = f"AUTO:{dem_dict['out_projection']}"
    # epsg codes
    else:
        projection = f"EPSG:{dem_dict['out_projection']}"

    logger.debug('Creating the Layover/Shadow mask')

    # get path to workflow xml
    graph = OST_ROOT.joinpath('graphs/S1_GRD2ARD/3_LSmap.xml')

    command = (
        f'{GPT_FILE} {graph} -x -q {2 * cpus} '
        f'-Pinput=\'{str(infile)}\' '
        f'-Presol={ard["resolution"]} '
        f'-Pdem=\'{dem_dict["dem_name"]}\' '
        f'-Pdem_file=\'{dem_dict["dem_file"]}\' '
        f'-Pdem_nodata=\'{dem_dict["dem_nodata"]}\' '
        f'-Pdem_resampling=\'{dem_dict["dem_resampling"]}\' '
        f'-Pimage_resampling=\'{dem_dict["image_resampling"]}\' '
        f'-Pegm_correction=\'{str(dem_dict["egm_correction"]).lower()}\' '
        f'-Pprojection=\'{projection}\' '
        f'-Poutput=\'{str(outfile)}\'')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully created a Layover/Shadow mask')
    else:
        raise GPTRuntimeError(
            f'Layover/Shadow mask creation exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile, test_stats=False)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
def create_stack(
    file_list,
    out_stack,
    logfile,
    config_dict,
    polarisation=None,
    pattern=None,
):
    """

    :param file_list:
    :param out_stack:
    :param logfile:
    :param config_dict:
    :param polarisation:
    :param pattern:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']

    logger.debug('Creating multi-temporal stack.')

    if pattern:
        graph = OST_ROOT.joinpath('graphs/S1_TS/1_BS_Stacking_HAalpha.xml')

        command = (f'{GPT_FILE} {graph} -x -q {2*cpus} '
                   f'-Pfilelist={file_list} '
                   f'-PbandPattern=\'{pattern}.*\' '
                   f'-Poutput={out_stack}')

    else:
        graph = OST_ROOT.joinpath('graphs/S1_TS/1_BS_Stacking.xml')

        command = (f'{GPT_FILE} {graph} -x -q {2*cpus} '
                   f'-Pfilelist={file_list} '
                   f'-Ppol={polarisation} '
                   f'-Poutput={out_stack}')

    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Successfully created multi-temporal stack')
    else:
        raise GPTRuntimeError(
            f'Multi-temporal stack creation exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_msg = h.check_out_dimap(out_stack)
    if return_msg == 0:
        logger.debug('Product passed validity check.')
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_msg}')
Beispiel #3
0
def coreg(master, slave, outfile, logfile, config_dict):
    """A wrapper around SNAP's back-geocoding co-registration routine

    This function takes 2 OST imported Sentinel-1 SLC products
    (master and slave) and co-registers them properly.
    This routine is sufficient for coherence estimation,
    but not for InSAR, since the ESD refinement is not applied.

    :param master:
    :param slave:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']
    dem_dict = config_dict['processing']['single_ARD']['dem']

    logger.debug(f'Co-registering {master} and {slave}')

    # construct command
    command = (
        f'{GPT_FILE} Back-Geocoding -x -q {2*cpus} '
        f'-PdemName=\'{dem_dict["dem_name"]}\' '
        #f'-PdemName=\'SRTM 3Sec\' '
        f'-PdemResamplingMethod=\'{dem_dict["dem_resampling"]}\' '
        f'-PexternalDEMFile=\'{dem_dict["dem_file"]}\' '
        f'-PexternalDEMNoDataValue=\'{dem_dict["dem_nodata"]}\' '
        f'-PmaskOutAreaWithoutElevation=false '
        f'-PresamplingType=BILINEAR_INTERPOLATION '
        f'-t \'{str(outfile)}\''
        f' "{master}" "{slave}"'
    )

    logger.debug(f'Executing command: {command}')
    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Succesfully coregistered product.')
    else:
        raise GPTRuntimeError(
            f'Co-registration exited with an error {return_code}. '
            f'See {logfile} for Snap error output.'
        )

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}'
        )
Beispiel #4
0
def slice_assembly(filelist, outfile, logfile, config_dict):
    """Wrapper function around SNAP's slice assembly routine

    :param filelist: a string of a space separated list of OST imported
                     Sentinel-1 GRD product frames to be assembled
    :type filelist: str
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """
    '''A wrapper of SNAP's slice assembly routine

    This function assembles consecutive frames acquired at the same date.
    Can be either GRD or SLC products

    Args:
        filelist (str): a string of a space separated list of OST imported
                        Sentinel-1 product slices to be assembled
        outfile: string or os.path object for the output
                 file written in BEAM-Dimap format
        logfile: string or os.path object for the file
                 where SNAP'S STDOUT/STDERR is written to
    '''

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    polars = ard['polarisation'].replace(' ', '')
    cpus = config_dict['snap_cpu_parallelism']

    logger.debug('Assembling consecutive frames:')

    # construct command
    command = (f'{GPT_FILE} SliceAssembly -x -q {2*cpus} '
               f'-PselectedPolarisations={polars} '
               f'-t \'{str(outfile)}\' {filelist}')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully assembled products')
    else:
        raise GPTRuntimeError(
            f'ERROR: Slice Assembly exited with error {return_code}. '
            f'See {logfile} for Snap Error output')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
def speckle_filter(infile, outfile, logfile, config_dict):
    """Wrapper function around SNAP's Speckle Filter function

    This function takes OST imported Sentinel-1 product and applies
    the Speckle Filter as defind within the config dictionary.

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']
    speckle_dict = config_dict['processing']['single_ARD']['speckle_filter']

    logger.debug('Applying speckle filtering.')

    # construct command string
    command = (
        f"{GPT_FILE} Speckle-Filter -x -q {2*cpus} "
        f"-PestimateENL=\'{speckle_dict['estimate_ENL']}\' "
        f"-PanSize=\'{speckle_dict['pan_size']}\' "
        f"-PdampingFactor=\'{speckle_dict['damping']}\' "
        f"-Penl=\'{speckle_dict['ENL']}\' "
        f"-Pfilter=\'{speckle_dict['filter']}\' "
        f"-PfilterSizeX=\'{speckle_dict['filter_x_size']}\' "
        f"-PfilterSizeY=\'{speckle_dict['filter_y_size']}\' "
        f"-PnumLooksStr=\'{speckle_dict['num_of_looks']}\' "
        f"-PsigmaStr=\'{speckle_dict['sigma']}\' "
        f"-PtargetWindowSizeStr=\"{speckle_dict['target_window_size']}\" "
        f"-PwindowSize=\"{speckle_dict['window_size']}\" "
        f"-t \'{str(outfile)}\' \'{str(infile)}\' ")

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Successfully applied speckle filtering.')
    else:
        raise GPTRuntimeError(
            f'Speckle filtering exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
Beispiel #6
0
def coreg2(master, slave, outfile, logfile, config_dict):
    """A wrapper around SNAP's back-geocoding co-registration routine

    This function takes 2 OST imported Sentinel-1 SLC products
    (master and slave) and co-registers them properly.
    This routine is sufficient for coherence estimation,
    but not for InSAR, since the ESD refinement is not applied.

    :param master:
    :param slave:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']
    dem_dict = config_dict['processing']['single_ARD']['dem']

    # get path to graph
    graph = OST_ROOT.joinpath('graphs/S1_SLC2ARD/S1_SLC_Coreg.xml')

    logger.debug(f'Co-registering {master} and {slave}')
    command = (
        f"{GPT_FILE} {graph} -x -q {2*cpus} "
        f" -Pmaster={master} "
        f" -Pslave={slave} "
        f" -Pdem_name=\'{dem_dict['dem_name']}\' "
        f" -Pdem_file=\'{dem_dict['dem_file']}\' "
        f" -Pdem_nodata=\'{dem_dict['dem_nodata']}\' "
        f" -Pdem_resampling=\'{dem_dict['dem_resampling']}\' "
        f" -Poutput={str(outfile)}"
    )

    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Successfully co-registered product.')
    else:
        raise GPTRuntimeError(
            f'Co-registration exited with an error {return_code}. '
            f'See {logfile} for Snap\'s error message.'
        )

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}'
        )
Beispiel #7
0
def calibration(infile, outfile, logfile, config_dict):
    """

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    product_type = config_dict['processing']['single_ARD']['product_type']
    cpus = config_dict['snap_cpu_parallelism']

    # transform calibration parameter to snap readable
    sigma0, beta0, gamma0 = 'false', 'false', 'false'

    if product_type == 'GTC-sigma0':
        sigma0 = 'true'
    elif product_type == 'GTC-gamma0':
        gamma0 = 'true'
    elif product_type == 'RTC-gamma0':
        beta0 = 'true'
    else:
        raise TypeError('Wrong product type selected.')

    logger.debug(f'Calibrating the product to {product_type}.')

    # construct command string
    command = (f'{GPT_FILE} Calibration -x -q {2*cpus} '
               f' -PoutputBetaBand=\'{beta0}\' '
               f' -PoutputGammaBand=\'{gamma0}\' '
               f' -PoutputSigmaBand=\'{sigma0}\' '
               f' -t \'{str(outfile)}\' \'{str(infile)}\'')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug(f'Calibration to {product_type} successful.')
    else:
        raise GPTRuntimeError(f'Calibration exited with error {return_code}. '
                              f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
Beispiel #8
0
def coherence(infile, outfile, logfile, config_dict):
    """A wrapper around SNAP's coherence routine

    This function takes a co-registered stack of 2 Sentinel-1 SLC products
    and calculates the coherence.

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    polars = ard['coherence_bands'].replace(' ', '')
    cpus = config_dict['snap_cpu_parallelism']

    # get path to graph
    graph = OST_ROOT.joinpath('graphs/S1_SLC2ARD/S1_SLC_Coh_Deb.xml')

    logger.debug('Coherence estimation')

    command = (
        f"{GPT_FILE} {graph} -x -q {2 * cpus} "
        f"-Pazimuth_window={ard['coherence_azimuth']} "
        f"-Prange_window={ard['coherence_range']} "
        f'-Ppolar=\'{polars}\' '
        f'-Pinput="{str(infile)}" '
        f'-Poutput="{str(outfile)}"'
    )

    logger.debug(f'Executing command: {command}')
    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Succesfully created coherence product.')
    else:
        raise GPTRuntimeError(
            f'Coherence exited with an error {return_code}. '
            f'See {logfile} for Snap\'s error message.'
        )

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}'
        )
def mt_speckle_filter(in_stack, out_stack, logfile, config_dict):
    """

    :param in_stack:
    :param out_stack:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']
    speckle_dict = (
        config_dict['processing']['time-series_ARD']['mt_speckle_filter'])

    # debug message
    logger.debug('Applying multi-temporal speckle filtering.')

    # construct command string
    command = (
        f"{GPT_FILE} Multi-Temporal-Speckle-Filter -x -q {2*cpus} "
        f"-PestimateENL=\'{speckle_dict['estimate_ENL']}\' "
        f"-PanSize=\'{speckle_dict['pan_size']}\' "
        f"-PdampingFactor=\'{speckle_dict['damping']}\' "
        f"-Penl=\'{speckle_dict['ENL']}\' "
        f"-Pfilter=\'{speckle_dict['filter']}\' "
        f"-PfilterSizeX=\'{speckle_dict['filter_x_size']}\' "
        f"-PfilterSizeY=\'{speckle_dict['filter_y_size']}\' "
        f"-PnumLooksStr=\'{speckle_dict['num_of_looks']}\' "
        f"-PsigmaStr=\'{speckle_dict['sigma']}\' "
        f"-PtargetWindowSizeStr=\"{speckle_dict['target_window_size']}\" "
        f"-PwindowSize=\"{speckle_dict['window_size']}\" "
        f"-t \'{out_stack}\' \'{in_stack}\' ")

    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Successfully applied multi-temporal speckle filtering')
    else:
        raise GPTRuntimeError(
            f'Multi-temporal Spackle Filter exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(out_stack)
    if return_code == 0:
        return str(out_stack.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
Beispiel #10
0
def grd_subset_georegion(infile, outfile, logfile, config_dict):
    """Wrapper function around SNAP's subset routine

    This function takes an OST imported/slice assembled frame and
    subsets it according to the coordinates given in the region

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']

    try:
        aoi = config_dict['aoi']
    except KeyError:
        aoi = ''

    logger.debug('Subsetting imported imagery.')

    # extract window from scene
    command = (f'{GPT_FILE} Subset -x -q {2*cpus} '
               f'-PcopyMetadata=true '
               f'-PgeoRegion=\'{aoi}\' '
               f'-Ssource=\'{str(infile)}\' '
               f'-t \'{str(outfile)}\'')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully subsetted product.')
    else:
        raise GPTRuntimeError(f'Subsetting exited with error {return_code}. '
                              f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
Beispiel #11
0
def multi_look(infile, outfile, logfile, config_dict):
    """

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    ard = config_dict['processing']['single_ARD']
    cpus = config_dict['snap_cpu_parallelism']
    ml_factor = int(int(ard['resolution']) / 10)

    logger.debug('Multi-looking the image with {az_looks} looks in '
                 'azimuth and {rg_looks} looks in range.')

    # construct command string
    command = (f'{GPT_FILE} Multilook -x -q {2*cpus} '
               f'-PnAzLooks={ml_factor} '
               f'-PnRgLooks={ml_factor} '
               f'-t \'{str(outfile)}\' {str(infile)}')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully multi-looked product.')
    else:
        raise GPTRuntimeError(
            f' ERROR: Multi-look exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
def terrain_flattening(infile, outfile, logfile, config_dict):
    """Wrapper function to Snap's Terrain Flattening routine

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']
    dem_dict = config_dict['processing']['single_ARD']['dem']

    logger.debug('Applying terrain flattening to calibrated product.')

    command = (f"{GPT_FILE} Terrain-Flattening -x -q {2*cpus} "
               f"-PdemName=\'{dem_dict['dem_name']}\' "
               f"-PdemResamplingMethod=\'{dem_dict['dem_resampling']}\' "
               f"-PexternalDEMFile=\'{dem_dict['dem_file']}\' "
               f"-PexternalDEMNoDataValue={dem_dict['dem_nodata']} "
               f"-t \'{str(outfile)}\' \'{str(infile)}\'")

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully terrain flattened product')
    else:
        raise GPTRuntimeError(
            f'Terrain Flattening exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
def linear_to_db(infile, outfile, logfile, config_dict):
    """Wrapper function around SNAP's linear to db routine

    This function takes an OST calibrated Sentinel-1 product
    and converts it to dB.

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']

    logger.debug('Converting calibrated power image to dB scale.')

    # construct command string
    command = (f'{GPT_FILE} LinearToFromdB -x -q {2*cpus} '
               f'-t \'{str(outfile)}\' {str(infile)}')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully converted product to dB-scale.')
    else:
        raise GPTRuntimeError(f'dB Scaling exited with error {return_code}. '
                              f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
Beispiel #14
0
def burst_to_ard(master_file,
                 swath,
                 master_burst_nr,
                 master_burst_id,
                 proc_file,
                 out_dir,
                 temp_dir,
                 slave_file=None,
                 slave_burst_nr=None,
                 slave_burst_id=None,
                 coherence=False,
                 remove_slave_import=False,
                 ncores=os.cpu_count()):
    '''The main routine to turn a burst into an ARD product

    Args:
        master_file (str): path to full master SLC scene
        swath (str): subswath
        master_burst_nr (): index number of the burst
        master_burst_id ():
        out_dir (str):
        temp_dir (str):
        slave_file (str):
        slave_burst_nr (str):
        slave_burst_id (str):
        proc_file (str):
        remove_slave_import (bool):
        ncores (int): number of cpus used - useful for parallel processing
    '''
    if type(remove_slave_import) == str:
        if remove_slave_import == 'True':
            remove_slave_import = True
        elif remove_slave_import == 'False':
            remove_slave_import = False
    if type(coherence) == str:
        if coherence == 'True':
            coherence = True
        elif coherence == 'False':
            coherence = False
    # load ards
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']
     
    # ---------------------------------------------------------------------
    # 1 Import
    # import master
    master_import = opj(temp_dir, '{}_import'.format(master_burst_id))

    if not os.path.exists('{}.dim'.format(master_import)):
        import_log = opj(out_dir, '{}_import.err_log'.format(master_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = slc._import(master_file, master_import, import_log,
                              swath, master_burst_nr, polars, ncores
        )
        if return_code != 0:
            h.delete_dimap(master_import)
            return return_code

    imported = '{}.dim'.format(master_import)
    # ---------------------------------------------------------------------
    # 2 H-A-Alpha
    if ard['H-A-Alpha']:
        # create HAalpha file
        out_haa = opj(temp_dir, '{}_h'.format(master_burst_id))
        haa_log = opj(out_dir, '{}_haa.err_log'.format(master_burst_id))
        return_code = slc._ha_alpha(imported,
                                out_haa, haa_log, 
                                ard['remove pol speckle'], 
                                ard['pol speckle filter'],
                                ncores
        )

        # delete files in case of error
        if return_code != 0:
            h.delete_dimap(out_haa)
            h.delete_dimap(master_import)
            return return_code

        # geo code HAalpha
        out_htc = opj(temp_dir, '{}_pol'.format(master_burst_id))
        haa_tc_log = opj(out_dir, '{}_haa_tc.err_log'.format(
            master_burst_id))
        return_code = common._terrain_correction(
            '{}.dim'.format(out_haa), out_htc, haa_tc_log, 
            ard['resolution'], ard['dem'], ncores
        )

        # remove HAalpha tmp files
        h.delete_dimap(out_haa)
        
        # last check on the output files
        return_code = h.check_out_dimap(out_htc)
        if return_code != 0:
            h.delete_dimap(out_htc)
            h.delete_dimap(master_import)
            return return_code

        # move to final destination
        h.move_dimap(out_htc, opj(out_dir, '{}_pol'.format(master_burst_id)))

    # ---------------------------------------------------------------------
    # 3 Calibration
    out_cal = opj(temp_dir, '{}_cal'.format(master_burst_id))
    cal_log = opj(out_dir, '{}_cal.err_log'.format(master_burst_id))
    return_code = slc._calibration(
        imported, out_cal, cal_log, ard['product type'], ncores)

    # delete output if command failed for some reason and return
    if return_code != 0:
        h.delete_dimap(out_cal)
        h.delete_dimap(master_import)
        return return_code

    if not coherence:
        #  remove imports
        h.delete_dimap(master_import)

    # ---------------------------------------------------------------------
    # 4 Speckle filtering
    if ard['remove speckle']:
        speckle_import = opj(
            temp_dir, '{}_speckle_import'.format(master_burst_id)
        )
        speckle_log = opj(
            out_dir, '{}_speckle.err_log'.format(master_burst_id)
        )

        return_code = common._speckle_filter(
            '{}.dim'.format(out_cal), speckle_import, speckle_log,
            ard['speckle filter'], ncores
        )

        # remove input 
        h.delete_dimap(out_cal)

        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(speckle_import)
            h.delete_dimap(master_import)
            return return_code

        # reset master_import for follwoing routine
        out_cal = speckle_import

    # ---------------------------------------------------------------------
    # 5 Terrain Flattening
    if ard['product type'] == 'RTC-gamma0':
        # define outfile
        out_rtc = opj(temp_dir, '{}_rtc'.format(master_burst_id))
        rtc_log = opj(out_dir, '{}_rtc.err_log'.format(
            master_burst_id))
        # do the TF
        return_code = common._terrain_flattening(
            '{}.dim'.format(out_cal), out_rtc, rtc_log, ard['dem'], ncores
        )

        # remove tmp files
        h.delete_dimap(out_cal)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_rtc)
            h.delete_dimap(master_import)
            return return_code

        # set out_rtc to out_cal for further processing
        out_cal = out_rtc

    # ---------------------------------------------------------------------
    # 7 to dB scale
    if ard['to db']:
        out_db = opj(temp_dir, '{}_cal_db'.format(master_burst_id))
        db_log = opj(out_dir, '{}_cal_db.err_log'.format(master_burst_id))
        return_code = common._linear_to_db('{}.dim'.format(out_cal), out_db, db_log, ncores)

        # remove tmp files
        h.delete_dimap(out_cal)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_db)
            h.delete_dimap(master_import)
            return return_code

        # set out_cal to out_db for further processing
        out_cal = out_db
 
    # ---------------------------------------------------------------------
    # 8 Geocode backscatter
    if ard['product type'] != "Coherence_only":
        out_tc = opj(temp_dir, '{}_bs'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_bs_tc.err_log'.format(master_burst_id))
        return_code = common._terrain_correction(
            '{}.dim'.format(out_cal), out_tc, tc_log,
            ard['resolution'], ard['dem'], ncores)

        # last check on backscatter data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.delete_dimap(out_tc)
            return return_code

        # we move backscatter to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_bs'.format(master_burst_id)))

    # ---------------------------------------------------------------------
    # 9 Layover/Shadow mask
    if ard['create ls mask']:
        
        out_ls = opj(temp_dir, '{}_LS'.format(master_burst_id))
        ls_log = opj(out_dir, '{}_LS.err_log'.format(master_burst_id))
        return_code = common._ls_mask('{}.dim'.format(out_cal), out_ls, ls_log,
                                      ard['resolution'], ard['dem'], ncores)

        if return_code != 0:
            h.delete_dimap(out_ls)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(out_ls, test_stats=False)
        if return_code != 0:
            h.delete_dimap(out_ls)
            return return_code

        # move ls data to final destination
        h.move_dimap(out_ls, opj(out_dir, '{}_LS'.format(master_burst_id)))

    # remove calibrated files
    if ard['product type'] != "Coherence_only":
        h.delete_dimap(out_cal)

    if coherence:

        # import slave
        slave_import = opj(temp_dir, '{}_import'.format(slave_burst_id))
        import_log = opj(out_dir, '{}_import.err_log'.format(slave_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = slc._import(
            slave_file, slave_import, import_log, swath, slave_burst_nr,
            polars, ncores
        )

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # co-registration
        #filelist = ['{}.dim'.format(master_import),
        #            '{}.dim'.format(slave_import)]
        #filelist = '\'{}\''.format(','.join(filelist))
        out_coreg = opj(temp_dir, '{}_coreg'.format(master_burst_id))
        coreg_log = opj(out_dir, '{}_coreg.err_log'.format(master_burst_id))
        # return_code = _coreg(filelist, out_coreg, coreg_log, dem)
        return_code = slc._coreg2('{}.dim'.format(master_import),
                              '{}.dim'.format(slave_import),
                               out_coreg,
                               coreg_log, ard['dem'], ncores)

        # remove imports
        h.delete_dimap(master_import)
        
        if remove_slave_import is True:
            h.delete_dimap(slave_import)
        
        # delete output if command failed for some reason and return   
        if return_code != 0:
            h.delete_dimap(out_coreg)
            h.delete_dimap(slave_import)
            return return_code

        # calculate coherence and deburst
        out_coh = opj(temp_dir, '{}_c'.format(master_burst_id))
        coh_log = opj(out_dir, '{}_coh.err_log'.format(master_burst_id))
        coh_polars = ard['coherence bands'].replace(' ', '')
        return_code = slc._coherence('{}.dim'.format(out_coreg),
                                 out_coh, coh_log, coh_polars, ncores)

        # remove coreg tmp files
        h.delete_dimap(out_coreg)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_coh)
            h.delete_dimap(slave_import)
            return return_code

        # geocode
        out_tc = opj(temp_dir, '{}_coh'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_coh_tc.err_log'.format(master_burst_id))
        return_code = common._terrain_correction(
            '{}.dim'.format(out_coh), out_tc, tc_log, 
            ard['resolution'], ard['dem'], ncores)
        
        # remove tmp files
        h.delete_dimap(out_coh)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_tc)
            h.delete_dimap(slave_import)
            return return_code
        
        # remove tmp files
        h.delete_dimap(out_coh)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(out_tc)
            h.delete_dimap(slave_import)
            return return_code
        
        # last check on coherence data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.delete_dimap(out_tc)
            return return_code

        # move to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_coh'.format(master_burst_id)))

    # write out check file for tracking that it is processed
    with open(opj(out_dir, '.processed'), 'w') as file:
        file.write('passed all tests \n')
    
    return return_code
Beispiel #15
0
def ha_alpha(infile, outfile, logfile, config_dict):
    """A wrapper of SNAP H-A-alpha polarimetric decomposition

    This function takes an OST imported Sentinel-1 scene/burst
    and calulates the polarimetric decomposition parameters for
    the H-A-alpha decomposition.

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    remove_pol_speckle = ard['remove_pol_speckle']
    pol_speckle_dict = ard['pol_speckle_filter']
    cpus = config_dict['snap_cpu_parallelism']

    if remove_pol_speckle:
        graph = OST_ROOT.joinpath(
            'graphs/S1_SLC2ARD/S1_SLC_Deb_Spk_Halpha.xml'
        )
        logger.debug(
            'Applying the polarimetric speckle filter and'
            ' calculating the H-alpha dual-pol decomposition'
        )

        command = (
            f'{GPT_FILE} {graph} -x -q {2 * cpus} '
            f'-Pinput={str(infile)} '
            f'-Poutput={str(outfile)} '
            f"-Pfilter=\'{pol_speckle_dict['polarimetric_filter']}\' "
            f'-Pfilter_size=\'{pol_speckle_dict["filter_size"]}\' '
            f'-Pnr_looks={pol_speckle_dict["num_of_looks"]} '
            f'-Pwindow_size={pol_speckle_dict["window_size"]} '
            f'-Ptarget_window_size={pol_speckle_dict["target_window_size"]} '
            f'-Ppan_size={pol_speckle_dict["pan_size"]} '
            f'-Psigma={pol_speckle_dict["sigma"]}'
        )
    else:
        graph = OST_ROOT.joinpath(
            'graphs/S1_SLC2ARD/S1_SLC_Deb_Halpha.xml'
        )

        logger.debug('Calculating the H-alpha dual polarisation')
        command = (
            f'{GPT_FILE} {graph} -x -q {2 * cpus} '
            f'-Pinput="{str(infile)}" '
            f'-Poutput="{str(outfile)}"'
        )

    logger.debug(f'Executing command: {command}')
    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Succesfully created H/A/Alpha product')
    else:
        raise GPTRuntimeError(
            f'H/Alpha exited with an error {return_code}. '
            f'See {logfile} for Snap\'s error message.'
        )

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}'
        )
Beispiel #16
0
def calibration(
        infile,
        outfile,
        logfile,
        config_dict
):
    """A wrapper around SNAP's radiometric calibration

    This function takes OST imported Sentinel-1 product and generates
    it to calibrated backscatter.
    3 different calibration modes are supported.
        - Radiometrically terrain corrected Gamma nought (RTC)
          NOTE: that the routine actually calibrates to bet0 and needs to
          be used together with _terrain_flattening routine
        - ellipsoid based Gamma nought (GTCgamma)
        - Sigma nought (GTCsigma).

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :param region:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    cpus = config_dict['snap_cpu_parallelism']
    dem_dict = ard['dem']
    region = ''
    # calculate Multi-Look factors
    azimuth_looks = 1  # int(np.floor(ard['resolution'] / 10 ))
    range_looks = 5  # int(azimuth_looks * 5)

    # construct command dependent on selected product type
    if ard['product_type'] == 'RTC-gamma0':
        logger.debug('Calibrating the product to a RTC product.')

        # get graph for RTC generation
        graph = OST_ROOT.joinpath(
            'graphs/S1_SLC2ARD/S1_SLC_TNR_CalBeta_Deb_ML_TF_Sub.xml'
        )

        # construct command
        command = (
            f"{GPT_FILE} {graph} -x -q {2 * cpus} "
            f"-Prange_looks={range_looks} "
            f"-Pazimuth_looks={azimuth_looks} "
            f"-Pdem=\'{dem_dict['dem_name']}\' "
            f"-Pdem_file=\'{dem_dict['dem_file']}\' "
            f"-Pdem_nodata={dem_dict['dem_nodata']} "
            f"-Pdem_resampling={dem_dict['dem_resampling']} "
            f"-Pregion=\'{region}\' "
            f"-Pinput={str(infile)} "
            f"-Poutput={str(outfile)}"
        )

    elif ard['product_type'] == 'GTC-gamma0':
        logger.debug('Calibrating the product to a GTC product (Gamma0).')

        # get graph for GTC-gammao0 generation
        graph = OST_ROOT.joinpath(
            'graphs/S1_SLC2ARD/S1_SLC_TNR_CalGamma_Deb_ML_Sub.xml'
        )

        # construct command
        command = (
            f'{GPT_FILE} {graph} -x -q {2 * cpus} '
            f'-Prange_looks={range_looks} '
            f'-Pazimuth_looks={azimuth_looks} '
            f'-Pregion="{region}" '
            f'-Pinput="{str(infile)}" '
            f'-Poutput="{str(outfile)}"'
        )

    elif ard['product_type'] == 'GTC-sigma0':
        logger.debug('Calibrating the product to a GTC product (Sigma0).')

        # get graph for GTC-sigma0 generation
        graph = OST_ROOT.joinpath(
            'graphs/S1_SLC2ARD/S1_SLC_TNR_CalSigma_Deb_ML_Sub.xml'
        )

        # construct command
        command = (
            f'{GPT_FILE} {graph} -x -q {2 * cpus} '
            f'-Prange_looks={range_looks} '
            f'-Pazimuth_looks={azimuth_looks} '
            f'-Pregion="{region}" '
            f'-Pinput="{str(infile)}" '
            f'-Poutput="{str(outfile)}"'
        )
    else:
        raise TypeError('Wrong product type selected.')

    logger.debug(f'Command: {command}')
    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Succesfully calibrated product')
    else:
        raise GPTRuntimeError(
            f'Calibration exited with an error {return_code}. '
            f'See {logfile} for Snap\'s error output.'
        )

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}'
        )
def terrain_correction(infile, outfile, logfile, config_dict):
    """Wrapper function around Snap's terrain or ellipsoid correction

    Based on the configuration parameters either the
    Range-Doppler terrain correction or an Ellisoid correction
    is applied for geocoding a calibrated Sentinel-1 product.

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    dem_dict = ard['dem']
    cpus = config_dict['snap_cpu_parallelism']

    # auto projections of snap
    if 42001 <= dem_dict['out_projection'] <= 97002:
        projection = f"AUTO:{dem_dict['out_projection']}"
    # epsg codes
    else:
        projection = f"EPSG:{dem_dict['out_projection']}"

    logger.debug('Geocoding product.')

    if ard['geocoding'] == 'terrain':
        command = (f"{GPT_FILE} Terrain-Correction -x -q {2*cpus} "
                   f"-PdemName=\'{dem_dict['dem_name']}\' "
                   f"-PdemResamplingMethod=\'{dem_dict['dem_resampling']}\' "
                   f"-PexternalDEMFile=\'{dem_dict['dem_file']}\' "
                   f"-PexternalDEMNoDataValue={dem_dict['dem_nodata']} "
                   f"-PexternalDEMApplyEGM="
                   f"\'{str(dem_dict['egm_correction']).lower()}\' "
                   f"-PimgResamplingMethod=\'{dem_dict['image_resampling']}\' "
                   f"-PpixelSpacingInMeter={ard['resolution']} "
                   f"-PalignToStandardGrid=true "
                   f"-PmapProjection=\'{projection}\' "
                   f"-t \'{str(outfile)}\' \'{str(infile)}\' ")
    elif ard['geocoding'] == 'ellipsoid':
        command = (f"{GPT_FILE} Ellipsoid-Correction-RD -x -q {2*cpus} "
                   f"-PdemName=\'{dem_dict['dem_name']}\' "
                   f"-PdemResamplingMethod=\'{dem_dict['dem_resampling']}\' "
                   f"-PexternalDEMFile=\'{dem_dict['dem_file']}\' "
                   f"-PexternalDEMNoDataValue={dem_dict['dem_nodata']} "
                   f"-PexternalDEMApplyEGM="
                   f"\'{str(dem_dict['egm_correction']).lower()}\' "
                   f"-PimgResamplingMethod=\'{dem_dict['image_resampling']}\' "
                   f"-PpixelSpacingInMeter={ard['resolution']} "
                   f"-PalignToStandardGrid=true "
                   f"-PmapProjection=\'{projection}\' "
                   f"-t \'{str(outfile)}\' \'{str(infile)}\' ")
    else:
        raise ValueError(
            'Geocoding method should be either \'terrain\' or \'ellipsoid\'.')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully geocoded product')
    else:
        raise GPTRuntimeError(f'Geocoding exited with error {return_code}. '
                              f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
Beispiel #18
0
def grd_frame_import(infile, outfile, logfile, config_dict):
    """A wrapper of SNAP import of a single Sentinel-1 GRD product

    This function takes an original Sentinel-1 scene (either zip or
    SAFE format), updates the orbit information (does not fail if not
    available), removes the thermal noise and stores it as a SNAP
    compatible BEAM-Dimap format.

    :param infile: Sentinel-1 GRD product in zip or SAFE format
    :type infile: str/Path
    :param outfile:
    :type outfile: str/Path
    :param logfile:
    :param config_dict: an OST configuration dictionary
    :type config_dict: dict
    :return:
    """

    if isinstance(infile, str):
        infile = Path(infile)

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    polars = ard['polarisation'].replace(' ', '')
    cpus = config_dict['snap_cpu_parallelism']
    subset = config_dict['subset']

    try:
        aoi = config_dict['aoi']
    except KeyError:
        aoi = ''

    logger.debug(f'Importing {infile.name} by applying precise orbit file and '
                 f'removing thermal noise')

    # get path to graph
    if subset:
        graph = OST_ROOT.joinpath('graphs/S1_GRD2ARD/1_AO_TNR_SUB.xml')
        # construct command
        command = (f'{GPT_FILE} {graph} -x -q {2 * cpus} '
                   f'-Pinput=\'{str(infile)}\' '
                   f'-Pregion=\'{aoi}\' '
                   f'-Ppolarisation={polars} '
                   f'-Poutput=\'{str(outfile)}\'')
    else:
        # construct path ot graph
        graph = OST_ROOT.joinpath('graphs/S1_GRD2ARD/1_AO_TNR.xml')
        # construct command
        command = (f'{GPT_FILE} {graph} -x -q {2 * cpus} '
                   f'-Pinput=\'{str(infile)}\' '
                   f'-Ppolarisation={polars} '
                   f'-Poutput=\'{str(outfile)}\'')

    # run command
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully imported GRD product')
    else:
        # read logfile
        raise GPTRuntimeError(
            f'GRD frame import exited with error {return_code}. '
            f'See {logfile} for Snap\'s error output.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
Beispiel #19
0
def burst_import(
        infile,
        outfile,
        logfile,
        swath,
        burst,
        config_dict
):
    """A wrapper of SNAP import of a single Sentinel-1 SLC burst

    This function takes an original Sentinel-1 scene (either zip or
    SAFE format), updates the orbit information (does not fail if not
    available), and extracts a single burst based on the
    given input parameters.

    :param infile:
    :param outfile:
    :param logfile:
    :param swath:
    :param burst:
    :param config_dict:
    :return:
    """

    # get polarisations to import
    ard = config_dict['processing']['single_ARD']
    bs_polar = ard['polarisation'].replace(' ', ',')
    coh_polar = ard['coherence_bands'].replace(' ', ',')
    subset = config_dict['subset']

    region = config_dict['aoi'] if subset else ''

    if ard['coherence']:
        polars = bs_polar if len(bs_polar) >= len(coh_polar) else coh_polar
    else:
        polars = bs_polar

    # get cpus
    cpus = config_dict['snap_cpu_parallelism']

    # get path to graph
    graph = OST_ROOT.joinpath('graphs/S1_SLC2ARD/S1_SLC_BurstSplit_AO.xml')

    logger.debug(
        f'Importing Burst {burst} from Swath {swath} from scene {infile.name}'
    )

    command = (
        f'{GPT_FILE} {graph} -x -q {2 * cpus} '
        f'-Pinput={str(infile)} '
        f'-Ppolar={polars} '
        f'-Pswath={swath} '
        f'-Pburst={burst} '
        f'-Pregion=\'{region}\' '
        f'-Poutput={str(outfile)}'
    )

    logger.debug(f'Executing command: {command}')
    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Succesfully imported burst.')
    else:
        raise GPTRuntimeError(
            f'Frame import exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.'
        )

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}'
        )
Beispiel #20
0
def grd_to_ard(filelist, 
               output_dir, 
               file_id, 
               temp_dir, 
               proc_file,
               subset=None):
    '''The main function for the grd to ard generation

    This function represents the full workflow for the generation of an
    Analysis-Ready-Data product. The standard parameters reflect the CEOS
    ARD defintion for Sentinel-1 backcsatter products.

    By changing the parameters, taking care of all parameters
    that can be given. The function can handle multiple inputs of the same
    acquisition, given that there are consecutive data takes.

    Args:
        filelist (list): must be a list with one or more absolute
                  paths to GRD scene(s)
        output_dir: os.path object or string for the folder
                    where the output file should be written#
        file_id (str): prefix of the final output file
        temp_dir:
        resolution: the resolution of the output product in meters
        ls_mask: layover/shadow mask generation (Boolean)
        speckle_filter: speckle filtering (Boolean)

    Returns:
        nothing

    Notes:
        no explicit return value, since output file is our actual return
    '''

    # load ard parameters
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']
        polars = ard['polarisation'].replace(' ', '')
        
    # ---------------------------------------------------------------------
    # 1 Import
    
    # slice assembly if more than one scene
    if len(filelist) > 1:

        for file in filelist:

            grd_import = opj(temp_dir, '{}_imported'.format(
                os.path.basename(file)[:-5]))
            logfile = opj(output_dir, '{}.Import.errLog'.format(
                os.path.basename(file)[:-5]))
            
            return_code = _grd_frame_import(file, grd_import, logfile, polars)
            if return_code != 0:
                h.delete_dimap(grd_import)
                return return_code

        # create list of scenes for full acquisition in
        # preparation of slice assembly
        scenelist = ' '.join(glob.glob(opj(temp_dir, '*imported.dim')))

        # create file strings
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}._slice_assembly.errLog'.format(file_id))
        return_code = _slice_assembly(scenelist, grd_import, logfile)
        
        # delete inputs
        for file in filelist:
            h.delete_dimap(opj(temp_dir, '{}_imported'.format(
                os.path.basename(str(file))[:-5])))
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(grd_import)
            return return_code

        # subset mode after slice assembly
        if subset:
            grd_subset = opj(temp_dir, '{}_imported_subset'.format(file_id))
            return_code = _grd_subset_georegion('{}.dim'.format(grd_import), 
                                                grd_subset, logfile, subset)
            
            # delete slice assembly input to subset
            h.delete_dimap(grd_import)
            
            # delete output if command failed for some reason and return
            if return_code != 0:
                h.delete_dimap(grd_subset)
                return return_code
            
    # single scene case
    else:
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}.Import.errLog'.format(file_id))

        if subset is None:
            return_code = _grd_frame_import(filelist[0], grd_import, logfile, 
                                            polars)
        else:
            return_code = _grd_frame_import_subset(filelist[0], grd_import, 
                                                   subset, logfile, 
                                                   polars)
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(grd_import)
            return return_code
    
    # ---------------------------------------------------------------------
    # 2 GRD Border Noise
    if ard['remove border noise'] and not subset:
        for polarisation in ['VV', 'VH', 'HH', 'HV']:

            infile = glob.glob(opj(
                    temp_dir, '{}_imported*data'.format(file_id),
                    'Intensity_{}.img'.format(polarisation)))

            if len(infile) == 1:
                # run grd Border Remove
                print(' INFO: Remove border noise for {} band.'.format(
                    polarisation))
                _grd_remove_border(infile[0])

    # set input for next step
    infile = glob.glob(opj(temp_dir, '{}_imported*dim'.format(file_id)))[0]
    
    # ---------------------------------------------------------------------
    # 3 Calibration
    if ard['product type'] == 'GTC-sigma0':
        calibrate_to = 'sigma0'
    elif ard['product type'] == 'GTC-gamma0':
        calibrate_to = 'gamma0'
    elif ard['product type'] == 'RTC-gamma0':
        calibrate_to = 'beta0'
       
    calibrated = opj(temp_dir, '{}_cal'.format(file_id))
    logfile = opj(output_dir, '{}.Calibration.errLog'.format(file_id))
    return_code = common._calibration(infile, calibrated, logfile, calibrate_to)
    
    # delete input
    h.delete_dimap(infile[:-4])
    
    # delete output if command failed for some reason and return
    if return_code != 0:
        h.delete_dimap(calibrated)
        return return_code
    
    # input for next step
    infile = '{}.dim'.format(calibrated)
    
    # ---------------------------------------------------------------------
    # 4 Multi-looking
    if int(ard['resolution']) >= 20:
        # calculate the multi-look factor
        ml_factor = int(int(ard['resolution']) / 10)
        
        multi_looked = opj(temp_dir, '{}_ml'.format(file_id))
        logfile = opj(output_dir, '{}.multilook.errLog'.format(file_id))
        return_code = common._multi_look(infile, multi_looked, logfile,
                                        ml_factor, ml_factor)

        # delete input
        h.delete_dimap(infile[:-4])
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(multi_looked)
            return return_code
            
        # define input for next step
        infile = '{}.dim'.format(multi_looked)
    
    # ---------------------------------------------------------------------
    # 5 Layover shadow mask
    if  ard['create ls mask'] is True:
        ls_mask = opj(temp_dir, '{}.ls_mask'.format(file_id))
        logfile = opj(output_dir, '{}.ls_mask.errLog'.format(file_id))
        return_code = common._ls_mask(infile, ls_mask, logfile, ard['resolution'],
                                      ard['dem']
        )

        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(ls_mask)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(ls_mask, test_stats=False)
        if return_code != 0:
            h.delete_dimap(ls_mask)
            return return_code
        
        # move to final destination
        out_ls_mask = opj(output_dir, '{}.LS'.format(file_id))

        # delete original file sin case they exist
        if os.path.exists(str(out_ls_mask) + '.dim'):
            h.delete_dimap(out_ls_mask)

        # move out of temp
        shutil.move('{}.dim'.format(ls_mask), '{}.dim'.format(out_ls_mask))
        shutil.move('{}.data'.format(ls_mask), '{}.data'.format(out_ls_mask))
        
    # ---------------------------------------------------------------------
    # 6 Speckle filtering
    if ard['remove speckle']:
        
        logfile = opj(output_dir, '{}.Speckle.errLog'.format(file_id))
        filtered = opj(temp_dir, '{}_spk'.format(file_id))

        # run processing
        return_code = common._speckle_filter(infile, filtered, logfile,
                                             ard['speckle filter'])

        # delete input
        h.delete_dimap(infile[:-4])
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(filtered)
            return return_code
       
        # define input for next step
        infile = '{}.dim'.format(filtered)
        
    # ---------------------------------------------------------------------
    # 7 Terrain flattening
    if ard['product type'] == 'RTC-gamma0':
        flattened = opj(temp_dir, '{}_flat'.format(file_id))
        logfile = opj(output_dir, '{}.tf.errLog'.format(file_id))
        return_code = common._terrain_flattening(infile, flattened, logfile,
                                                 ard['dem']
        )

        # delete input file
        h.delete_dimap(infile[:-4])
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(flattened)
            return return_code
        
        # define input for next step
        infile = '{}.dim'.format(flattened)

    # ---------------------------------------------------------------------
    # 8 Linear to db
    if ard['to db']:
        db_scaled = opj(temp_dir, '{}_db'.format(file_id))
        logfile = opj(output_dir, '{}.db.errLog'.format(file_id))
        return_code = common._linear_to_db(infile, db_scaled, logfile)
        
        # delete input file
        h.delete_dimap(infile[:-4])
        
        # delete output if command failed for some reason and return
        if return_code != 0:
            h.delete_dimap(db_scaled)
            return return_code
        
        # set input for next step
        infile = '{}.dim'.format(db_scaled)

    # ---------------------------------------------------------------------
    # 9 Geocoding
    geocoded = opj(temp_dir, '{}_bs'.format(file_id))
    logfile = opj(output_dir, '{}_bs.errLog'.format(file_id))
    return_code = common._terrain_correction(
        infile, geocoded, logfile, ard['resolution'], ard['dem']
    )
    
    # delete input file
    h.delete_dimap(infile[:-4])
    
    # delete output if command failed for some reason and return
    if return_code != 0:
        h.delete_dimap(geocoded)
        return return_code

    # define final destination
    out_final = opj(output_dir, '{}.bs'.format(file_id))

    # ---------------------------------------------------------------------
    # 10 Checks and move to output directory
    # remove output file if exists
    if os.path.exists(out_final + '.dim'):
        h.delete_dimap(out_final)   
    
    # check final output
    return_code = h.check_out_dimap(geocoded)
    if return_code != 0:
        h.delete_dimap(geocoded)
        return return_code
    
    # move to final destination
    shutil.move('{}.dim'.format(geocoded), '{}.dim'.format(out_final))
    shutil.move('{}.data'.format(geocoded), '{}.data'.format(out_final))

    # write processed file to keep track of files already processed
    with open(opj(output_dir, '.processed'), 'w') as file:
        file.write('passed all tests \n')
Beispiel #21
0
def burst_to_ard(master_file,
                 swath,
                 master_burst_nr,
                 master_burst_id,
                 proc_file,
                 out_dir,
                 temp_dir,
                 slave_file=None,
                 slave_burst_nr=None,
                 slave_burst_id=None,
                 coherence=False,
                 remove_slave_import=False):
    '''The main routine to turn a burst into an ARD product

    Args:
        master_file (str): path to full master SLC scene
        swath (str): subswath
        master_burst_nr (): index number of the burst
        master_burst_id ():
        out_dir (str):
        temp_dir (str):
        slave_file (str):
        slave_burst_nr (str):
        slave_burst_id (str):
        proc_file (str):
        remove_slave_import (bool):

    '''

    # load ards
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']

    # import master
    master_import = opj(temp_dir, '{}_import'.format(master_burst_id))

    if not os.path.exists('{}.dim'.format(master_import)):
        import_log = opj(out_dir, '{}_import.err_log'.format(master_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = _import(master_file, master_import, import_log, swath,
                              master_burst_nr, polars)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

    if ard['H-A-Alpha']:
        # create HAalpha file
        out_haa = opj(temp_dir, '{}_h'.format(master_burst_id))
        haa_log = opj(out_dir, '{}_haa.err_log'.format(master_burst_id))
        return_code = _ha_alpha('{}.dim'.format(master_import), out_haa,
                                haa_log, ard['remove pol speckle'])

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # geo code HAalpha
        out_htc = opj(temp_dir, '{}_pol'.format(master_burst_id))
        haa_tc_log = opj(out_dir, '{}_haa_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_haa), out_htc,
                                          haa_tc_log, ard['resolution'],
                                          ard['dem'])

        # last check on the output files
        return_code = h.check_out_dimap(out_htc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_htc, opj(out_dir, '{}_pol'.format(master_burst_id)))

        # remove HAalpha tmp files
        h.delete_dimap(out_haa)

    # calibrate
    out_cal = opj(temp_dir, '{}_cal'.format(master_burst_id))
    cal_log = opj(out_dir, '{}_cal.err_log'.format(master_burst_id))
    return_code = _calibration('{}.dim'.format(master_import), out_cal,
                               cal_log, ard['product type'])
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    if not coherence:
        #  remove imports
        h.delete_dimap(master_import)

    # speckle filtering
    if ard['remove speckle']:
        speckle_import = opj(temp_dir,
                             '{}_speckle_import'.format(master_burst_id))
        speckle_log = opj(out_dir,
                          '{}_speckle.err_log'.format(master_burst_id))
        return_code = _speckle_filter('{}.dim'.format(out_cal), speckle_import,
                                      speckle_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove temp file
        h.delete_dimap(out_cal)

        # reset master_import for follwoing routine
        out_cal = speckle_import

    # do terrain flattening in case it is selected
    if ard['product type'] == 'RTC':
        # define outfile
        out_rtc = opj(temp_dir, '{}_rtc'.format(master_burst_id))
        rtc_log = opj(out_dir, '{}_rtc.err_log'.format(master_burst_id))
        # do the TF
        return_code = _terrain_flattening('{}.dim'.format(out_cal), out_rtc,
                                          rtc_log, ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_rtc to out_cal for further processing
        out_cal = out_rtc

    if ard['to db']:
        out_db = opj(temp_dir, '{}_cal_db'.format(master_burst_id))
        db_log = opj(out_dir, '{}_cal_db.err_log'.format(master_burst_id))
        return_code = _linear_to_db('{}.dim'.format(out_cal), out_db, db_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_cal to out_db for further processing
        out_cal = out_db

    # geo code backscatter products
    out_tc = opj(temp_dir, '{}_bs'.format(master_burst_id))
    tc_log = opj(out_dir, '{}_bs_tc.err_log'.format(master_burst_id))
    return_code = _terrain_correction('{}.dim'.format(out_cal), out_tc, tc_log,
                                      ard['resolution'], ard['dem'])

    # last check on backscatter data
    return_code = h.check_out_dimap(out_tc)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # we move backscatter to final destination
    h.move_dimap(out_tc, opj(out_dir, '{}_bs'.format(master_burst_id)))

    if ard['create ls mask']:
        # create LS map
        out_ls = opj(temp_dir, '{}_LS'.format(master_burst_id))
        ls_log = opj(out_dir, '{}_LS.err_log'.format(master_burst_id))
        return_code = _ls_mask('{}.dim'.format(out_cal), out_ls, ls_log,
                               ard['resolution'], ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(out_ls, test_stats=False)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move ls data to final destination
        h.move_dimap(out_ls, opj(out_dir, '{}_LS'.format(master_burst_id)))

    # remove calibrated files
    h.delete_dimap(out_cal)

    if coherence:

        # import slave
        slave_import = opj(temp_dir, '{}_import'.format(slave_burst_id))
        import_log = opj(out_dir, '{}_import.err_log'.format(slave_burst_id))
        polars = ard['polarisation'].replace(' ', '')
        return_code = _import(slave_file, slave_import, import_log, swath,
                              slave_burst_nr, polars)

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # co-registration
        #filelist = ['{}.dim'.format(master_import),
        #            '{}.dim'.format(slave_import)]
        #filelist = '\'{}\''.format(','.join(filelist))
        out_coreg = opj(temp_dir, '{}_coreg'.format(master_burst_id))
        coreg_log = opj(out_dir, '{}_coreg.err_log'.format(master_burst_id))
        # return_code = _coreg(filelist, out_coreg, coreg_log, dem)
        return_code = _coreg2('{}.dim'.format(master_import),
                              '{}.dim'.format(slave_import), out_coreg,
                              coreg_log, ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        #  remove imports
        h.delete_dimap(master_import)

        if remove_slave_import is True:
            h.delete_dimap(slave_import)

        # calculate coherence and deburst
        out_coh = opj(temp_dir, '{}_c'.format(master_burst_id))
        coh_log = opj(out_dir, '{}_coh.err_log'.format(master_burst_id))
        coh_polars = ard['coherence bands'].replace(' ', '')
        return_code = _coherence('{}.dim'.format(out_coreg), out_coh, coh_log,
                                 coh_polars)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove coreg tmp files
        h.delete_dimap(out_coreg)

        # geocode
        out_tc = opj(temp_dir, '{}_coh'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_coh_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_coh), out_tc,
                                          tc_log, ard['resolution'],
                                          ard['dem'])

        # last check on coherence data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_coh'.format(master_burst_id)))

        # remove tmp files
        h.delete_dimap(out_coh)

    # write file, so we know this burst has been succesfully processed
    if return_code == 0:
        check_file = opj(out_dir, '.processed')
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')
    else:
        h.remove_folder_content(temp_dir)
        h.remove_folder_content(out_dir)

    return return_code
Beispiel #22
0
def grd_to_ard(filelist,
               output_dir,
               file_id,
               temp_dir,
               proc_file,
               subset=None):
    '''The main function for the grd to ard generation

    This function represents the full workflow for the generation of an
    Analysis-Ready-Data product. The standard parameters reflect the CEOS
    ARD defintion for Sentinel-1 backcsatter products.

    By changing the parameters, taking care of all parameters
    that can be given. The function can handle multiple inputs of the same
    acquisition, given that there are consecutive data takes.

    Args:
        filelist (list): must be a list with one or more absolute
                  paths to GRD scene(s)
        output_dir: os.path object or string for the folder
                    where the output file should be written#
        file_id (str): prefix of the final output file
        temp_dir:
        resolution: the resolution of the output product in meters
        ls_mask: layover/shadow mask generation (Boolean)
        speckle_filter: speckle filtering (Boolean)

    Returns:
        nothing

    Notes:
        no explicit return value, since output file is our actual return
    '''

    # load ard parameters
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']
        polars = ard['polarisation'].replace(' ', '')

    # slice assembly if more than one scene
    if len(filelist) > 1:

        for file in filelist:

            grd_import = opj(temp_dir,
                             '{}_imported'.format(os.path.basename(file)[:-5]))
            logfile = opj(
                output_dir,
                '{}.Import.errLog'.format(os.path.basename(file)[:-5]))

            return_code = _grd_frame_import(file, grd_import, logfile, polars)
            if return_code != 0:
                h.remove_folder_content(temp_dir)
                return return_code

        # create list of scenes for full acquisition in
        # preparation of slice assembly
        scenelist = ' '.join(glob.glob(opj(temp_dir, '*imported.dim')))

        # create file strings
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}._slice_assembly.errLog'.format(file_id))
        return_code = _slice_assembly(
            scenelist,
            grd_import,
            logfile,
        )
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        for file in filelist:
            h.delete_dimap(
                opj(temp_dir,
                    '{}_imported'.format(os.path.basename(str(file))[:-5])))

        if subset:
            grd_subset = opj(temp_dir, '{}_imported_subset'.format(file_id))
            return_code = _grd_subset_georegion('{}.dim'.format(grd_import),
                                                grd_subset, logfile, subset)
            if return_code != 0:
                h.remove_folder_content(temp_dir)
                return return_code

            # delete slice assembly
            h.delete_dimap(grd_import)

    # single scene case
    else:
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}.Import.errLog'.format(file_id))

        if subset is None:
            return_code = _grd_frame_import(filelist[0], grd_import, logfile,
                                            polars)
        else:
            return_code = _grd_frame_import_subset(filelist[0], grd_import,
                                                   subset, logfile, polars)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code
    # ---------------------------------------------------------------------
    # Remove the grd border noise from existent channels (OST routine)

    if ard['remove border noise'] and not subset:
        for polarisation in ['VV', 'VH', 'HH', 'HV']:

            infile = glob.glob(
                opj(temp_dir, '{}_imported*data'.format(file_id),
                    'Intensity_{}.img'.format(polarisation)))

            if len(infile) == 1:
                # run grd Border Remove
                print(' INFO: Remove border noise for {} band.'.format(
                    polarisation))
                _grd_remove_border(infile[0])

    # set new infile
    infile = glob.glob(opj(temp_dir, '{}_imported*dim'.format(file_id)))[0]
    # -------------------------------------------
    # in case we want to apply Speckle filtering
    if ard['remove speckle']:

        logfile = opj(temp_dir, '{}.Speckle.errLog'.format(file_id))
        outfile = opj(temp_dir, '{}_spk'.format(file_id))

        # run processing
        return_code = _grd_speckle_filter(infile, outfile, logfile)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # delete input
        h.delete_dimap(infile[:-4])
        # define infile for next processing step
        infile = '{}.dim'.format(outfile)

    # ----------------------
    # do the calibration
    outfile = opj(temp_dir, '{}.{}'.format(file_id, ard['product type']))
    logfile = opj(output_dir, '{}.Backscatter.errLog'.format(file_id))
    return_code = _grd_backscatter(infile, outfile, logfile, ard['dem'],
                                   ard['product type'])

    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # delete input file
    h.delete_dimap(infile[:-4])

    # input file for follwoing
    infile = '{}.dim'.format(outfile)

    # ----------------------------------------------
    # let's create a Layover shadow mask if needed
    if ard['create ls mask'] is True:
        outfile = opj(temp_dir, '{}.ls_mask'.format(file_id))
        logfile = opj(output_dir, '{}.ls_mask.errLog'.format(file_id))
        return_code = _grd_ls_mask(infile, outfile, logfile, ard['resolution'],
                                   ard['dem'])
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(outfile, test_stats=False)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        out_ls_mask = opj(output_dir, '{}.LS'.format(file_id))

        # delete original file sin case they exist
        if os.path.exists(str(out_ls_mask) + '.dim'):
            h.delete_dimap(out_ls_mask)

        # move out of temp
        shutil.move('{}.dim'.format(outfile), '{}.dim'.format(out_ls_mask))
        shutil.move('{}.data'.format(outfile), '{}.data'.format(out_ls_mask))

    # to db
    if ard['to db']:
        logfile = opj(output_dir, '{}.linToDb.errLog'.format(file_id))
        outfile = opj(temp_dir, '{}_{}_db'.format(file_id,
                                                  ard['product type']))
        return_code = _grd_to_db(infile, outfile, logfile)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # delete
        h.delete_dimap(infile[:-4])
        # re-define infile
        infile = opj(temp_dir, '{}_{}_db.dim'.format(file_id,
                                                     ard['product type']))

    # -----------------------
    # let's geocode the data
    # infile = opj(temp_dir, '{}.{}.dim'.format(file_id, product_type))
    outfile = opj(temp_dir, '{}.bs'.format(file_id))
    logfile = opj(output_dir, '{}.bs.errLog'.format(file_id))
    return_code = _grd_terrain_correction(infile, outfile, logfile,
                                          ard['resolution'], ard['dem'])
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # remove calibrated files
    h.delete_dimap(infile[:-4])

    # move to final destination
    out_final = opj(output_dir, '{}.bs'.format(file_id))

    # remove file if exists
    if os.path.exists(out_final + '.dim'):
        h.delete_dimap(out_final)

    return_code = h.check_out_dimap(outfile)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    shutil.move('{}.dim'.format(outfile), '{}.dim'.format(out_final))
    shutil.move('{}.data'.format(outfile), '{}.data'.format(out_final))

    # write file, so we know this burst has been succesfully processed
    if return_code == 0:
        check_file = opj(output_dir, '.processed')
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')
    else:
        h.remove_folder_content(temp_dir)
        h.remove_folder_content(output_dir)
Beispiel #23
0
def burst_to_ard(master_file,
                 swath,
                 master_burst_nr,
                 master_burst_id,
                 out_dir,
                 temp_dir,
                 slave_file=None,
                 slave_burst_nr=None,
                 slave_burst_id=None,
                 coherence=False,
                 polarimetry=False,
                 pol_speckle_filter=False,
                 resolution=20,
                 product_type='GTCgamma',
                 speckle_filter=False,
                 to_db=False,
                 ls_mask_create=False,
                 dem='SRTM 1sec HGT',
                 remove_slave_import=False):
    '''The main routine to turn a burst into an ARD product

    Args:
        master_file (str): path to full master SLC scene
        swath (str): subswath
        master_burst_nr (): index number of the burst
        master_burst_id ():
        out_dir (str):
        temp_dir (str):
        slave_file (str):
        slave_burst_nr (str):
        slave_burst_id (str):
        coherence (bool):
        polarimetry (bool):
        pol_speckle_filter (bool):
        resolution (int):
        product_type (str):
        speckle_filter (bool):
        to_db (bool):
        ls_mask (bool):
        dem (str):
        remove_slave_import (bool):

    '''

    # import master
    master_import = opj(temp_dir, '{}_import'.format(master_burst_id))

    if not os.path.exists('{}.dim'.format(master_import)):
        import_log = opj(out_dir, '{}_import.err_log'.format(master_burst_id))
        return_code = _import(master_file, master_import, import_log, swath,
                              master_burst_nr)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

    if polarimetry:
        # create HAalpha file
        out_haa = opj(temp_dir, '{}_h'.format(master_burst_id))
        haa_log = opj(out_dir, '{}_haa.err_log'.format(master_burst_id))
        return_code = _ha_alpha('{}.dim'.format(master_import), out_haa,
                                haa_log, pol_speckle_filter)

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # geo code HAalpha
        out_htc = opj(temp_dir, '{}_ha_alpha'.format(master_burst_id))
        haa_tc_log = opj(out_dir, '{}_haa_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_haa), out_htc,
                                          haa_tc_log, resolution, dem)

        # last check on the output files
        return_code = h.check_out_dimap(out_htc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_htc,
                     opj(out_dir, '{}_ha_alpha'.format(master_burst_id)))

        # remove HAalpha tmp files
        h.delete_dimap(out_haa)

    # calibrate
    out_cal = opj(temp_dir, '{}_cal'.format(master_burst_id))
    cal_log = opj(out_dir, '{}_cal.err_log'.format(master_burst_id))
    return_code = _calibration('{}.dim'.format(master_import), out_cal,
                               cal_log, product_type)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    if not coherence:
        #  remove imports
        h.delete_dimap(master_import)

    # speckle filtering
    if speckle_filter:
        speckle_import = opj(temp_dir,
                             '{}_speckle_import'.format(master_burst_id))
        speckle_log = opj(out_dir,
                          '{}_speckle.err_log'.format(master_burst_id))
        return_code = _speckle_filter('{}.dim'.format(out_cal), speckle_import,
                                      speckle_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove temp file
        h.delete_dimap(out_cal)

        # reset master_import for follwoing routine
        out_cal = speckle_import

    # do terrain flattening in case it is selected
    if product_type == 'RTC':
        # define outfile
        out_rtc = opj(temp_dir, '{}_rtc'.format(master_burst_id))
        rtc_log = opj(out_dir, '{}_rtc.err_log'.format(master_burst_id))
        # do the TF
        return_code = _terrain_flattening('{}.dim'.format(out_cal), out_rtc,
                                          rtc_log, dem)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_rtc to out_cal for further processing
        out_cal = out_rtc

    if to_db:
        out_db = opj(temp_dir, '{}_cal_db'.format(master_burst_id))
        db_log = opj(out_dir, '{}_cal_db.err_log'.format(master_burst_id))
        return_code = _linear_to_db('{}.dim'.format(out_cal), out_db, db_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove tmp files
        h.delete_dimap(out_cal)
        # set out_cal to out_db for further processing
        out_cal = out_db

    # geo code backscatter products
    out_tc = opj(temp_dir, '{}_BS'.format(master_burst_id))
    tc_log = opj(out_dir, '{}_BS_tc.err_log'.format(master_burst_id))
    return_code = _terrain_correction('{}.dim'.format(out_cal), out_tc, tc_log,
                                      resolution, dem)

    # last check on backscatter data
    return_code = h.check_out_dimap(out_tc)
    if return_code != 0:
        h.remove_folder_content(temp_dir)
        return return_code

    # we move backscatter to final destination
    h.move_dimap(out_tc, opj(out_dir, '{}_BS'.format(master_burst_id)))

    if ls_mask_create:
        # create LS map
        out_ls = opj(temp_dir, '{}_LS'.format(master_burst_id))
        ls_log = opj(out_dir, '{}_LS.err_log'.format(master_burst_id))
        return_code = _ls_mask('{}.dim'.format(out_cal), out_ls, ls_log,
                               resolution, dem)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # last check on ls data
        return_code = h.check_out_dimap(out_ls, test_stats=False)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move ls data to final destination
        h.move_dimap(out_ls, opj(out_dir, '{}_LS'.format(master_burst_id)))

    # remove calibrated files
    h.delete_dimap(out_cal)

    if coherence:

        # import slave
        slave_import = opj(temp_dir, '{}_import'.format(slave_burst_id))
        import_log = opj(out_dir, '{}_import.err_log'.format(slave_burst_id))
        return_code = _import(slave_file, slave_import, import_log, swath,
                              slave_burst_nr)

        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # co-registration
        # filelist = ['{}.dim'.format(master_import),
        #            '{}.dim'.format(slave_import)]
        # filelist = '\'{}\''.format(','.join(filelist))
        out_coreg = opj(temp_dir, '{}_coreg'.format(master_burst_id))
        coreg_log = opj(out_dir, '{}_coreg.err_log'.format(master_burst_id))
        # return_code = _coreg2(filelist, out_coreg, coreg_log, dem)
        return_code = _coreg2('{}.dim'.format(master_import),
                              '{}.dim'.format(slave_import), out_coreg,
                              coreg_log, dem)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        #  remove imports
        h.delete_dimap(master_import)

        if remove_slave_import is True:
            h.delete_dimap(slave_import)

        # calculate coherence and deburst
        out_coh = opj(temp_dir, '{}_c'.format(master_burst_id))
        coh_log = opj(out_dir, '{}_coh.err_log'.format(master_burst_id))
        return_code = _coherence('{}.dim'.format(out_coreg), out_coh, coh_log)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # remove coreg tmp files
        h.delete_dimap(out_coreg)

        # geocode
        out_tc = opj(temp_dir, '{}_coh'.format(master_burst_id))
        tc_log = opj(out_dir, '{}_coh_tc.err_log'.format(master_burst_id))
        return_code = _terrain_correction('{}.dim'.format(out_coh), out_tc,
                                          tc_log, resolution, dem)
        # last check on coherence data
        return_code = h.check_out_dimap(out_tc)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            return return_code

        # move to final destination
        h.move_dimap(out_tc, opj(out_dir, '{}_coh'.format(master_burst_id)))

        # remove tmp files
        h.delete_dimap(out_coh)

    # write file, so we know this burst has been succesfully processed
    if return_code == 0:
        check_file = opj(out_dir, '.processed')
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')
    else:
        h.remove_folder_content(temp_dir)
        h.remove_folder_content(out_dir)

    return return_code