예제 #1
0
def ls_mask(infile, outfile, logfile, config_dict):
    """Wrapper function of a Snap graph for Layover/Shadow mask creation

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    dem_dict = ard['dem']
    cpus = config_dict['snap_cpu_parallelism']

    # auto projections of snap
    if 42001 <= dem_dict['out_projection'] <= 97002:
        projection = f"AUTO:{dem_dict['out_projection']}"
    # epsg codes
    else:
        projection = f"EPSG:{dem_dict['out_projection']}"

    logger.debug('Creating the Layover/Shadow mask')

    # get path to workflow xml
    graph = OST_ROOT.joinpath('graphs/S1_GRD2ARD/3_LSmap.xml')

    command = (
        f'{GPT_FILE} {graph} -x -q {2 * cpus} '
        f'-Pinput=\'{str(infile)}\' '
        f'-Presol={ard["resolution"]} '
        f'-Pdem=\'{dem_dict["dem_name"]}\' '
        f'-Pdem_file=\'{dem_dict["dem_file"]}\' '
        f'-Pdem_nodata=\'{dem_dict["dem_nodata"]}\' '
        f'-Pdem_resampling=\'{dem_dict["dem_resampling"]}\' '
        f'-Pimage_resampling=\'{dem_dict["image_resampling"]}\' '
        f'-Pegm_correction=\'{str(dem_dict["egm_correction"]).lower()}\' '
        f'-Pprojection=\'{projection}\' '
        f'-Poutput=\'{str(outfile)}\'')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully created a Layover/Shadow mask')
    else:
        raise GPTRuntimeError(
            f'Layover/Shadow mask creation exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile, test_stats=False)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
예제 #2
0
def create_stack(
    file_list,
    out_stack,
    logfile,
    config_dict,
    polarisation=None,
    pattern=None,
):
    """

    :param file_list:
    :param out_stack:
    :param logfile:
    :param config_dict:
    :param polarisation:
    :param pattern:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']

    logger.debug('Creating multi-temporal stack.')

    if pattern:
        graph = OST_ROOT.joinpath('graphs/S1_TS/1_BS_Stacking_HAalpha.xml')

        command = (f'{GPT_FILE} {graph} -x -q {2*cpus} '
                   f'-Pfilelist={file_list} '
                   f'-PbandPattern=\'{pattern}.*\' '
                   f'-Poutput={out_stack}')

    else:
        graph = OST_ROOT.joinpath('graphs/S1_TS/1_BS_Stacking.xml')

        command = (f'{GPT_FILE} {graph} -x -q {2*cpus} '
                   f'-Pfilelist={file_list} '
                   f'-Ppol={polarisation} '
                   f'-Poutput={out_stack}')

    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Successfully created multi-temporal stack')
    else:
        raise GPTRuntimeError(
            f'Multi-temporal stack creation exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_msg = h.check_out_dimap(out_stack)
    if return_msg == 0:
        logger.debug('Product passed validity check.')
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_msg}')
예제 #3
0
def slice_assembly(filelist, outfile, logfile, config_dict):
    """Wrapper function around SNAP's slice assembly routine

    :param filelist: a string of a space separated list of OST imported
                     Sentinel-1 GRD product frames to be assembled
    :type filelist: str
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """
    '''A wrapper of SNAP's slice assembly routine

    This function assembles consecutive frames acquired at the same date.
    Can be either GRD or SLC products

    Args:
        filelist (str): a string of a space separated list of OST imported
                        Sentinel-1 product slices to be assembled
        outfile: string or os.path object for the output
                 file written in BEAM-Dimap format
        logfile: string or os.path object for the file
                 where SNAP'S STDOUT/STDERR is written to
    '''

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    polars = ard['polarisation'].replace(' ', '')
    cpus = config_dict['snap_cpu_parallelism']

    logger.debug('Assembling consecutive frames:')

    # construct command
    command = (f'{GPT_FILE} SliceAssembly -x -q {2*cpus} '
               f'-PselectedPolarisations={polars} '
               f'-t \'{str(outfile)}\' {filelist}')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully assembled products')
    else:
        raise GPTRuntimeError(
            f'ERROR: Slice Assembly exited with error {return_code}. '
            f'See {logfile} for Snap Error output')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
예제 #4
0
def coreg(master, slave, outfile, logfile, config_dict):
    """A wrapper around SNAP's back-geocoding co-registration routine

    This function takes 2 OST imported Sentinel-1 SLC products
    (master and slave) and co-registers them properly.
    This routine is sufficient for coherence estimation,
    but not for InSAR, since the ESD refinement is not applied.

    :param master:
    :param slave:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']
    dem_dict = config_dict['processing']['single_ARD']['dem']

    logger.debug(f'Co-registering {master} and {slave}')

    # construct command
    command = (
        f'{GPT_FILE} Back-Geocoding -x -q {2*cpus} '
        f'-PdemName=\'{dem_dict["dem_name"]}\' '
        #f'-PdemName=\'SRTM 3Sec\' '
        f'-PdemResamplingMethod=\'{dem_dict["dem_resampling"]}\' '
        f'-PexternalDEMFile=\'{dem_dict["dem_file"]}\' '
        f'-PexternalDEMNoDataValue=\'{dem_dict["dem_nodata"]}\' '
        f'-PmaskOutAreaWithoutElevation=false '
        f'-PresamplingType=BILINEAR_INTERPOLATION '
        f'-t \'{str(outfile)}\''
        f' "{master}" "{slave}"'
    )

    logger.debug(f'Executing command: {command}')
    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Succesfully coregistered product.')
    else:
        raise GPTRuntimeError(
            f'Co-registration exited with an error {return_code}. '
            f'See {logfile} for Snap error output.'
        )

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}'
        )
예제 #5
0
def coreg2(master, slave, outfile, logfile, config_dict):
    """A wrapper around SNAP's back-geocoding co-registration routine

    This function takes 2 OST imported Sentinel-1 SLC products
    (master and slave) and co-registers them properly.
    This routine is sufficient for coherence estimation,
    but not for InSAR, since the ESD refinement is not applied.

    :param master:
    :param slave:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']
    dem_dict = config_dict['processing']['single_ARD']['dem']

    # get path to graph
    graph = OST_ROOT.joinpath('graphs/S1_SLC2ARD/S1_SLC_Coreg.xml')

    logger.debug(f'Co-registering {master} and {slave}')
    command = (
        f"{GPT_FILE} {graph} -x -q {2*cpus} "
        f" -Pmaster={master} "
        f" -Pslave={slave} "
        f" -Pdem_name=\'{dem_dict['dem_name']}\' "
        f" -Pdem_file=\'{dem_dict['dem_file']}\' "
        f" -Pdem_nodata=\'{dem_dict['dem_nodata']}\' "
        f" -Pdem_resampling=\'{dem_dict['dem_resampling']}\' "
        f" -Poutput={str(outfile)}"
    )

    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Successfully co-registered product.')
    else:
        raise GPTRuntimeError(
            f'Co-registration exited with an error {return_code}. '
            f'See {logfile} for Snap\'s error message.'
        )

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}'
        )
예제 #6
0
def speckle_filter(infile, outfile, logfile, config_dict):
    """Wrapper function around SNAP's Speckle Filter function

    This function takes OST imported Sentinel-1 product and applies
    the Speckle Filter as defind within the config dictionary.

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']
    speckle_dict = config_dict['processing']['single_ARD']['speckle_filter']

    logger.debug('Applying speckle filtering.')

    # construct command string
    command = (
        f"{GPT_FILE} Speckle-Filter -x -q {2*cpus} "
        f"-PestimateENL=\'{speckle_dict['estimate_ENL']}\' "
        f"-PanSize=\'{speckle_dict['pan_size']}\' "
        f"-PdampingFactor=\'{speckle_dict['damping']}\' "
        f"-Penl=\'{speckle_dict['ENL']}\' "
        f"-Pfilter=\'{speckle_dict['filter']}\' "
        f"-PfilterSizeX=\'{speckle_dict['filter_x_size']}\' "
        f"-PfilterSizeY=\'{speckle_dict['filter_y_size']}\' "
        f"-PnumLooksStr=\'{speckle_dict['num_of_looks']}\' "
        f"-PsigmaStr=\'{speckle_dict['sigma']}\' "
        f"-PtargetWindowSizeStr=\"{speckle_dict['target_window_size']}\" "
        f"-PwindowSize=\"{speckle_dict['window_size']}\" "
        f"-t \'{str(outfile)}\' \'{str(infile)}\' ")

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Successfully applied speckle filtering.')
    else:
        raise GPTRuntimeError(
            f'Speckle filtering exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
예제 #7
0
def calibration(infile, outfile, logfile, config_dict):
    """

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    product_type = config_dict['processing']['single_ARD']['product_type']
    cpus = config_dict['snap_cpu_parallelism']

    # transform calibration parameter to snap readable
    sigma0, beta0, gamma0 = 'false', 'false', 'false'

    if product_type == 'GTC-sigma0':
        sigma0 = 'true'
    elif product_type == 'GTC-gamma0':
        gamma0 = 'true'
    elif product_type == 'RTC-gamma0':
        beta0 = 'true'
    else:
        raise TypeError('Wrong product type selected.')

    logger.debug(f'Calibrating the product to {product_type}.')

    # construct command string
    command = (f'{GPT_FILE} Calibration -x -q {2*cpus} '
               f' -PoutputBetaBand=\'{beta0}\' '
               f' -PoutputGammaBand=\'{gamma0}\' '
               f' -PoutputSigmaBand=\'{sigma0}\' '
               f' -t \'{str(outfile)}\' \'{str(infile)}\'')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug(f'Calibration to {product_type} successful.')
    else:
        raise GPTRuntimeError(f'Calibration exited with error {return_code}. '
                              f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
예제 #8
0
def coherence(infile, outfile, logfile, config_dict):
    """A wrapper around SNAP's coherence routine

    This function takes a co-registered stack of 2 Sentinel-1 SLC products
    and calculates the coherence.

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    polars = ard['coherence_bands'].replace(' ', '')
    cpus = config_dict['snap_cpu_parallelism']

    # get path to graph
    graph = OST_ROOT.joinpath('graphs/S1_SLC2ARD/S1_SLC_Coh_Deb.xml')

    logger.debug('Coherence estimation')

    command = (
        f"{GPT_FILE} {graph} -x -q {2 * cpus} "
        f"-Pazimuth_window={ard['coherence_azimuth']} "
        f"-Prange_window={ard['coherence_range']} "
        f'-Ppolar=\'{polars}\' '
        f'-Pinput="{str(infile)}" '
        f'-Poutput="{str(outfile)}"'
    )

    logger.debug(f'Executing command: {command}')
    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Succesfully created coherence product.')
    else:
        raise GPTRuntimeError(
            f'Coherence exited with an error {return_code}. '
            f'See {logfile} for Snap\'s error message.'
        )

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}'
        )
예제 #9
0
def mt_speckle_filter(in_stack, out_stack, logfile, config_dict):
    """

    :param in_stack:
    :param out_stack:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']
    speckle_dict = (
        config_dict['processing']['time-series_ARD']['mt_speckle_filter'])

    # debug message
    logger.debug('Applying multi-temporal speckle filtering.')

    # construct command string
    command = (
        f"{GPT_FILE} Multi-Temporal-Speckle-Filter -x -q {2*cpus} "
        f"-PestimateENL=\'{speckle_dict['estimate_ENL']}\' "
        f"-PanSize=\'{speckle_dict['pan_size']}\' "
        f"-PdampingFactor=\'{speckle_dict['damping']}\' "
        f"-Penl=\'{speckle_dict['ENL']}\' "
        f"-Pfilter=\'{speckle_dict['filter']}\' "
        f"-PfilterSizeX=\'{speckle_dict['filter_x_size']}\' "
        f"-PfilterSizeY=\'{speckle_dict['filter_y_size']}\' "
        f"-PnumLooksStr=\'{speckle_dict['num_of_looks']}\' "
        f"-PsigmaStr=\'{speckle_dict['sigma']}\' "
        f"-PtargetWindowSizeStr=\"{speckle_dict['target_window_size']}\" "
        f"-PwindowSize=\"{speckle_dict['window_size']}\" "
        f"-t \'{out_stack}\' \'{in_stack}\' ")

    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Successfully applied multi-temporal speckle filtering')
    else:
        raise GPTRuntimeError(
            f'Multi-temporal Spackle Filter exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(out_stack)
    if return_code == 0:
        return str(out_stack.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
예제 #10
0
def grd_subset_georegion(infile, outfile, logfile, config_dict):
    """Wrapper function around SNAP's subset routine

    This function takes an OST imported/slice assembled frame and
    subsets it according to the coordinates given in the region

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']

    try:
        aoi = config_dict['aoi']
    except KeyError:
        aoi = ''

    logger.debug('Subsetting imported imagery.')

    # extract window from scene
    command = (f'{GPT_FILE} Subset -x -q {2*cpus} '
               f'-PcopyMetadata=true '
               f'-PgeoRegion=\'{aoi}\' '
               f'-Ssource=\'{str(infile)}\' '
               f'-t \'{str(outfile)}\'')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully subsetted product.')
    else:
        raise GPTRuntimeError(f'Subsetting exited with error {return_code}. '
                              f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
예제 #11
0
def terrain_flattening(infile, outfile, logfile, config_dict):
    """Wrapper function to Snap's Terrain Flattening routine

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']
    dem_dict = config_dict['processing']['single_ARD']['dem']

    logger.debug('Applying terrain flattening to calibrated product.')

    command = (f"{GPT_FILE} Terrain-Flattening -x -q {2*cpus} "
               f"-PdemName=\'{dem_dict['dem_name']}\' "
               f"-PdemResamplingMethod=\'{dem_dict['dem_resampling']}\' "
               f"-PexternalDEMFile=\'{dem_dict['dem_file']}\' "
               f"-PexternalDEMNoDataValue={dem_dict['dem_nodata']} "
               f"-t \'{str(outfile)}\' \'{str(infile)}\'")

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully terrain flattened product')
    else:
        raise GPTRuntimeError(
            f'Terrain Flattening exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
예제 #12
0
def multi_look(infile, outfile, logfile, config_dict):
    """

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    ard = config_dict['processing']['single_ARD']
    cpus = config_dict['snap_cpu_parallelism']
    ml_factor = int(int(ard['resolution']) / 10)

    logger.debug('Multi-looking the image with {az_looks} looks in '
                 'azimuth and {rg_looks} looks in range.')

    # construct command string
    command = (f'{GPT_FILE} Multilook -x -q {2*cpus} '
               f'-PnAzLooks={ml_factor} '
               f'-PnRgLooks={ml_factor} '
               f'-t \'{str(outfile)}\' {str(infile)}')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully multi-looked product.')
    else:
        raise GPTRuntimeError(
            f' ERROR: Multi-look exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
예제 #13
0
def linear_to_db(infile, outfile, logfile, config_dict):
    """Wrapper function around SNAP's linear to db routine

    This function takes an OST calibrated Sentinel-1 product
    and converts it to dB.

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    cpus = config_dict['snap_cpu_parallelism']

    logger.debug('Converting calibrated power image to dB scale.')

    # construct command string
    command = (f'{GPT_FILE} LinearToFromdB -x -q {2*cpus} '
               f'-t \'{str(outfile)}\' {str(infile)}')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully converted product to dB-scale.')
    else:
        raise GPTRuntimeError(f'dB Scaling exited with error {return_code}. '
                              f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
예제 #14
0
def burst_import(
        infile,
        outfile,
        logfile,
        swath,
        burst,
        config_dict
):
    """A wrapper of SNAP import of a single Sentinel-1 SLC burst

    This function takes an original Sentinel-1 scene (either zip or
    SAFE format), updates the orbit information (does not fail if not
    available), and extracts a single burst based on the
    given input parameters.

    :param infile:
    :param outfile:
    :param logfile:
    :param swath:
    :param burst:
    :param config_dict:
    :return:
    """

    # get polarisations to import
    ard = config_dict['processing']['single_ARD']
    bs_polar = ard['polarisation'].replace(' ', ',')
    coh_polar = ard['coherence_bands'].replace(' ', ',')
    subset = config_dict['subset']

    region = config_dict['aoi'] if subset else ''

    if ard['coherence']:
        polars = bs_polar if len(bs_polar) >= len(coh_polar) else coh_polar
    else:
        polars = bs_polar

    # get cpus
    cpus = config_dict['snap_cpu_parallelism']

    # get path to graph
    graph = OST_ROOT.joinpath('graphs/S1_SLC2ARD/S1_SLC_BurstSplit_AO.xml')

    logger.debug(
        f'Importing Burst {burst} from Swath {swath} from scene {infile.name}'
    )

    command = (
        f'{GPT_FILE} {graph} -x -q {2 * cpus} '
        f'-Pinput={str(infile)} '
        f'-Ppolar={polars} '
        f'-Pswath={swath} '
        f'-Pburst={burst} '
        f'-Pregion=\'{region}\' '
        f'-Poutput={str(outfile)}'
    )

    logger.debug(f'Executing command: {command}')
    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Succesfully imported burst.')
    else:
        raise GPTRuntimeError(
            f'Frame import exited with error {return_code}. '
            f'See {logfile} for Snap\'s error message.'
        )

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}'
        )
예제 #15
0
def ha_alpha(infile, outfile, logfile, config_dict):
    """A wrapper of SNAP H-A-alpha polarimetric decomposition

    This function takes an OST imported Sentinel-1 scene/burst
    and calulates the polarimetric decomposition parameters for
    the H-A-alpha decomposition.

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    remove_pol_speckle = ard['remove_pol_speckle']
    pol_speckle_dict = ard['pol_speckle_filter']
    cpus = config_dict['snap_cpu_parallelism']

    if remove_pol_speckle:
        graph = OST_ROOT.joinpath(
            'graphs/S1_SLC2ARD/S1_SLC_Deb_Spk_Halpha.xml'
        )
        logger.debug(
            'Applying the polarimetric speckle filter and'
            ' calculating the H-alpha dual-pol decomposition'
        )

        command = (
            f'{GPT_FILE} {graph} -x -q {2 * cpus} '
            f'-Pinput={str(infile)} '
            f'-Poutput={str(outfile)} '
            f"-Pfilter=\'{pol_speckle_dict['polarimetric_filter']}\' "
            f'-Pfilter_size=\'{pol_speckle_dict["filter_size"]}\' '
            f'-Pnr_looks={pol_speckle_dict["num_of_looks"]} '
            f'-Pwindow_size={pol_speckle_dict["window_size"]} '
            f'-Ptarget_window_size={pol_speckle_dict["target_window_size"]} '
            f'-Ppan_size={pol_speckle_dict["pan_size"]} '
            f'-Psigma={pol_speckle_dict["sigma"]}'
        )
    else:
        graph = OST_ROOT.joinpath(
            'graphs/S1_SLC2ARD/S1_SLC_Deb_Halpha.xml'
        )

        logger.debug('Calculating the H-alpha dual polarisation')
        command = (
            f'{GPT_FILE} {graph} -x -q {2 * cpus} '
            f'-Pinput="{str(infile)}" '
            f'-Poutput="{str(outfile)}"'
        )

    logger.debug(f'Executing command: {command}')
    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Succesfully created H/A/Alpha product')
    else:
        raise GPTRuntimeError(
            f'H/Alpha exited with an error {return_code}. '
            f'See {logfile} for Snap\'s error message.'
        )

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}'
        )
예제 #16
0
def calibration(
        infile,
        outfile,
        logfile,
        config_dict
):
    """A wrapper around SNAP's radiometric calibration

    This function takes OST imported Sentinel-1 product and generates
    it to calibrated backscatter.
    3 different calibration modes are supported.
        - Radiometrically terrain corrected Gamma nought (RTC)
          NOTE: that the routine actually calibrates to bet0 and needs to
          be used together with _terrain_flattening routine
        - ellipsoid based Gamma nought (GTCgamma)
        - Sigma nought (GTCsigma).

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :param region:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    cpus = config_dict['snap_cpu_parallelism']
    dem_dict = ard['dem']
    region = ''
    # calculate Multi-Look factors
    azimuth_looks = 1  # int(np.floor(ard['resolution'] / 10 ))
    range_looks = 5  # int(azimuth_looks * 5)

    # construct command dependent on selected product type
    if ard['product_type'] == 'RTC-gamma0':
        logger.debug('Calibrating the product to a RTC product.')

        # get graph for RTC generation
        graph = OST_ROOT.joinpath(
            'graphs/S1_SLC2ARD/S1_SLC_TNR_CalBeta_Deb_ML_TF_Sub.xml'
        )

        # construct command
        command = (
            f"{GPT_FILE} {graph} -x -q {2 * cpus} "
            f"-Prange_looks={range_looks} "
            f"-Pazimuth_looks={azimuth_looks} "
            f"-Pdem=\'{dem_dict['dem_name']}\' "
            f"-Pdem_file=\'{dem_dict['dem_file']}\' "
            f"-Pdem_nodata={dem_dict['dem_nodata']} "
            f"-Pdem_resampling={dem_dict['dem_resampling']} "
            f"-Pregion=\'{region}\' "
            f"-Pinput={str(infile)} "
            f"-Poutput={str(outfile)}"
        )

    elif ard['product_type'] == 'GTC-gamma0':
        logger.debug('Calibrating the product to a GTC product (Gamma0).')

        # get graph for GTC-gammao0 generation
        graph = OST_ROOT.joinpath(
            'graphs/S1_SLC2ARD/S1_SLC_TNR_CalGamma_Deb_ML_Sub.xml'
        )

        # construct command
        command = (
            f'{GPT_FILE} {graph} -x -q {2 * cpus} '
            f'-Prange_looks={range_looks} '
            f'-Pazimuth_looks={azimuth_looks} '
            f'-Pregion="{region}" '
            f'-Pinput="{str(infile)}" '
            f'-Poutput="{str(outfile)}"'
        )

    elif ard['product_type'] == 'GTC-sigma0':
        logger.debug('Calibrating the product to a GTC product (Sigma0).')

        # get graph for GTC-sigma0 generation
        graph = OST_ROOT.joinpath(
            'graphs/S1_SLC2ARD/S1_SLC_TNR_CalSigma_Deb_ML_Sub.xml'
        )

        # construct command
        command = (
            f'{GPT_FILE} {graph} -x -q {2 * cpus} '
            f'-Prange_looks={range_looks} '
            f'-Pazimuth_looks={azimuth_looks} '
            f'-Pregion="{region}" '
            f'-Pinput="{str(infile)}" '
            f'-Poutput="{str(outfile)}"'
        )
    else:
        raise TypeError('Wrong product type selected.')

    logger.debug(f'Command: {command}')
    return_code = h.run_command(command, logfile)

    if return_code == 0:
        logger.debug('Succesfully calibrated product')
    else:
        raise GPTRuntimeError(
            f'Calibration exited with an error {return_code}. '
            f'See {logfile} for Snap\'s error output.'
        )

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}'
        )
예제 #17
0
def grd_frame_import(infile, outfile, logfile, config_dict):
    """A wrapper of SNAP import of a single Sentinel-1 GRD product

    This function takes an original Sentinel-1 scene (either zip or
    SAFE format), updates the orbit information (does not fail if not
    available), removes the thermal noise and stores it as a SNAP
    compatible BEAM-Dimap format.

    :param infile: Sentinel-1 GRD product in zip or SAFE format
    :type infile: str/Path
    :param outfile:
    :type outfile: str/Path
    :param logfile:
    :param config_dict: an OST configuration dictionary
    :type config_dict: dict
    :return:
    """

    if isinstance(infile, str):
        infile = Path(infile)

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    polars = ard['polarisation'].replace(' ', '')
    cpus = config_dict['snap_cpu_parallelism']
    subset = config_dict['subset']

    try:
        aoi = config_dict['aoi']
    except KeyError:
        aoi = ''

    logger.debug(f'Importing {infile.name} by applying precise orbit file and '
                 f'removing thermal noise')

    # get path to graph
    if subset:
        graph = OST_ROOT.joinpath('graphs/S1_GRD2ARD/1_AO_TNR_SUB.xml')
        # construct command
        command = (f'{GPT_FILE} {graph} -x -q {2 * cpus} '
                   f'-Pinput=\'{str(infile)}\' '
                   f'-Pregion=\'{aoi}\' '
                   f'-Ppolarisation={polars} '
                   f'-Poutput=\'{str(outfile)}\'')
    else:
        # construct path ot graph
        graph = OST_ROOT.joinpath('graphs/S1_GRD2ARD/1_AO_TNR.xml')
        # construct command
        command = (f'{GPT_FILE} {graph} -x -q {2 * cpus} '
                   f'-Pinput=\'{str(infile)}\' '
                   f'-Ppolarisation={polars} '
                   f'-Poutput=\'{str(outfile)}\'')

    # run command
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully imported GRD product')
    else:
        # read logfile
        raise GPTRuntimeError(
            f'GRD frame import exited with error {return_code}. '
            f'See {logfile} for Snap\'s error output.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')
예제 #18
0
def terrain_correction(infile, outfile, logfile, config_dict):
    """Wrapper function around Snap's terrain or ellipsoid correction

    Based on the configuration parameters either the
    Range-Doppler terrain correction or an Ellisoid correction
    is applied for geocoding a calibrated Sentinel-1 product.

    :param infile:
    :param outfile:
    :param logfile:
    :param config_dict:
    :return:
    """

    # get relevant config parameters
    ard = config_dict['processing']['single_ARD']
    dem_dict = ard['dem']
    cpus = config_dict['snap_cpu_parallelism']

    # auto projections of snap
    if 42001 <= dem_dict['out_projection'] <= 97002:
        projection = f"AUTO:{dem_dict['out_projection']}"
    # epsg codes
    else:
        projection = f"EPSG:{dem_dict['out_projection']}"

    logger.debug('Geocoding product.')

    if ard['geocoding'] == 'terrain':
        command = (f"{GPT_FILE} Terrain-Correction -x -q {2*cpus} "
                   f"-PdemName=\'{dem_dict['dem_name']}\' "
                   f"-PdemResamplingMethod=\'{dem_dict['dem_resampling']}\' "
                   f"-PexternalDEMFile=\'{dem_dict['dem_file']}\' "
                   f"-PexternalDEMNoDataValue={dem_dict['dem_nodata']} "
                   f"-PexternalDEMApplyEGM="
                   f"\'{str(dem_dict['egm_correction']).lower()}\' "
                   f"-PimgResamplingMethod=\'{dem_dict['image_resampling']}\' "
                   f"-PpixelSpacingInMeter={ard['resolution']} "
                   f"-PalignToStandardGrid=true "
                   f"-PmapProjection=\'{projection}\' "
                   f"-t \'{str(outfile)}\' \'{str(infile)}\' ")
    elif ard['geocoding'] == 'ellipsoid':
        command = (f"{GPT_FILE} Ellipsoid-Correction-RD -x -q {2*cpus} "
                   f"-PdemName=\'{dem_dict['dem_name']}\' "
                   f"-PdemResamplingMethod=\'{dem_dict['dem_resampling']}\' "
                   f"-PexternalDEMFile=\'{dem_dict['dem_file']}\' "
                   f"-PexternalDEMNoDataValue={dem_dict['dem_nodata']} "
                   f"-PexternalDEMApplyEGM="
                   f"\'{str(dem_dict['egm_correction']).lower()}\' "
                   f"-PimgResamplingMethod=\'{dem_dict['image_resampling']}\' "
                   f"-PpixelSpacingInMeter={ard['resolution']} "
                   f"-PalignToStandardGrid=true "
                   f"-PmapProjection=\'{projection}\' "
                   f"-t \'{str(outfile)}\' \'{str(infile)}\' ")
    else:
        raise ValueError(
            'Geocoding method should be either \'terrain\' or \'ellipsoid\'.')

    # run command and get return code
    return_code = h.run_command(command, logfile)

    # handle errors and logs
    if return_code == 0:
        logger.debug('Succesfully geocoded product')
    else:
        raise GPTRuntimeError(f'Geocoding exited with error {return_code}. '
                              f'See {logfile} for Snap\'s error message.')

    # do check routine
    return_code = h.check_out_dimap(outfile)
    if return_code == 0:
        return str(outfile.with_suffix('.dim'))
    else:
        raise NotValidFileError(
            f'Product did not pass file check: {return_code}')