Example #1
0
    def __init__(self,
                 project_dir,
                 aoi,
                 start='1978-06-28',
                 end=datetime.today().strftime("%Y-%m-%d"),
                 data_mount=None,
                 download_dir=None,
                 inventory_dir=None,
                 processing_dir=None,
                 temp_dir=None):

        self.project_dir = os.path.abspath(project_dir)
        self.start = start
        self.end = end
        self.data_mount = data_mount
        self.download_dir = download_dir
        self.inventory_dir = inventory_dir
        self.processing_dir = processing_dir
        self.temp_dir = temp_dir

        # handle the import of different aoi formats and transform
        # to a WKT string
        if aoi.split('.')[-1] != 'shp' and len(aoi) == 3:

            # get lowres data
            world = gpd.read_file(gpd.datasets.get_path('naturalearth_lowres'))
            country = world.name[world.iso_a3 == aoi].values[0]
            print(' INFO: Getting the country boundaries from Geopandas low'
                  ' resolution data for {}'.format(country))

            self.aoi = (
                world['geometry'][world['iso_a3'] == aoi].values[0].to_wkt())
        elif aoi.split('.')[-1] == 'shp':
            self.aoi = str(vec.shp_to_wkt(aoi))
            print(' INFO: Using {} shapefile as Area of Interest definition.')
        else:
            try:
                loads(str(aoi))
            except:
                print(' ERROR: No valid OST AOI defintion.')
                sys.exit()
            else:
                self.aoi = aoi

        if not self.download_dir:
            self.download_dir = opj(project_dir, 'download')
        if not self.inventory_dir:
            self.inventory_dir = opj(project_dir, 'inventory')
        if not self.processing_dir:
            self.processing_dir = opj(project_dir, 'processing')
        if not self.temp_dir:
            self.temp_dir = opj(project_dir, 'temp')

        self._create_project_dir()
        self._create_download_dir(self.download_dir)
        self._create_inventory_dir(self.inventory_dir)
        self._create_processing_dir(self.processing_dir)
        self._create_temp_dir(self.temp_dir)
Example #2
0
    def to_ard(self, subset=None, overwrite=False):
        logger.debug('INFO: Starting %s ARD processing for %s',
                     self.ard_parameters['type'], self.product_type)

        if overwrite:
            logger.debug(
                'INFO: Deleting processing folder to start from scratch')
            h.remove_folder_content(self.processing_dir)

        if not self.ard_parameters:
            self.set_ard_parameters()
        if self.inventory.empty:
            raise EmptyInventoryException(
                'Run search before downloading and processing!')
        # set resolution in degree
        self.center_lat = loads(self.aoi).centroid.y
        if float(self.center_lat) > 59 or float(self.center_lat) < -59:
            logger.debug('INFO: Scene is outside SRTM coverage. '
                         'Will use 30m ASTER DEM instead.')
            self.ard_parameters['dem'] = 'ASTER 1sec GDEM'

        if subset:
            if subset.split('.')[-1] == '.shp':
                subset = str(vec.shp_to_wkt(subset, buffer=0.1, envelope=True))
            elif subset.startswith('POLYGON (('):
                subset = loads(subset).buffer(0.1).to_wkt()
            elif subset.geom_type == 'MultiPolygon' \
                    or subset.geom_type == 'Polygon':
                subset = subset.wkt
            else:
                logger.debug('ERROR: No valid subset given.'
                             'Should be either path to a shapefile '
                             'or a WKT Polygon.')
                sys.exit()
        if self.product_type == 'GRD':
            _to_ard_batch(self.inventory, self.download_dir,
                          self.processing_dir, self.ard_parameters, subset)
        elif self.product_type == 'SLC' and not self.burst_inventory.empty:
            burst_to_ard_batch(burst_inventory=self.burst_inventory,
                               download_dir=self.download_dir,
                               processing_dir=self.processing_dir,
                               ard_parameters=self.ard_parameters,
                               data_mount=self.data_mount,
                               max_workers=self.max_workers)
        logger.debug('INFO:%s ARD processing for %s DONE!',
                     self.ard_parameters['type'], self.product_type)
Example #3
0
def grd_to_ard(filelist,
               output_dir,
               file_id,
               temp_dir,
               processing_dict,
               subset=None,
               polar='VV,VH,HH,HV'):
    '''The main function for the grd to ard generation

    This function represents the full workflow for the generation of an
    Analysis-Ready-Data product. The standard parameters reflect the CEOS
    ARD defintion for Sentinel-1 backcsatter products.

    By changing the parameters, taking care of all parameters
    that can be given. The function can handle multiple inputs of the same
    acquisition, given that there are consecutive data takes.

    Args:
        filelist (list): must be a list with one or more absolute
                  paths to GRD scene(s)
        output_dir: os.path object or string for the folder
                    where the output file should be written#
        file_id (str): prefix of the final output file
        temp_dir:
        resolution: the resolution of the output product in meters
        ls_mask: layover/shadow mask generation (Boolean)
        speckle_filter: speckle filtering (Boolean)

    Returns:
        nothing

    Notes:
        no explicit return value, since output file is our actual return
    '''

    # get processing parameters from dict
    resolution = processing_dict['resolution']
    product_type = processing_dict['product_type']
    ls_mask = processing_dict['ls_mask']
    speckle_filter = processing_dict['speckle_filter']
    border_noise = processing_dict['border_noise']
    dem = processing_dict['dem']
    to_db = processing_dict['to_db']

    # slice assembly if more than one scene
    if len(filelist) > 1:

        for file in filelist:

            grd_import = opj(temp_dir,
                             '{}_imported'.format(os.path.basename(file)[:-5]))
            logfile = opj(
                output_dir,
                '{}.Import.errLog'.format(os.path.basename(file)[:-5]))
            _grd_frame_import(file, grd_import, logfile)

        # create list of scenes for full acquisition in
        # preparation of slice assembly
        scenelist = ' '.join(glob.glob(opj(temp_dir, '*imported.dim')))

        # create file strings
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}._slice_assembly.errLog'.format(file_id))
        _slice_assembly(scenelist, grd_import, logfile, polar)

        for file in filelist:
            h.delete_dimap(
                opj(temp_dir,
                    '{}_imported'.format(os.path.basename(str(file))[:-5])))

        if subset is not None:
            grd_subset = opj(temp_dir, '{}_imported_subset'.format(file_id))
            georegion = vec.shp_to_wkt(subset, buffer=0.1, envelope=True)
            _grd_subset_georegion('{}.dim'.format(grd_import), grd_subset,
                                  logfile, georegion)

            # delete slice assembly
            h.delete_dimap(grd_import)
            glob.glob('{}/{}*imported*.data'.format(temp_dir, file_id))

    else:
        grd_import = opj(temp_dir, '{}_imported'.format(file_id))
        logfile = opj(output_dir, '{}.Import.errLog'.format(file_id))

        if subset is None:
            _grd_frame_import(filelist[0], grd_import, logfile, polar)
        else:
            georegion = vec.shp_to_wkt(subset, buffer=0.1, envelope=True)
            _grd_frame_import_subset(filelist[0], grd_import, georegion,
                                     logfile, polar)
    # ---------------------------------------------------------------------
    # Remove the grd border noise from existent channels (OST routine)

    if border_noise:
        for polarisation in ['VV', 'VH', 'HH', 'HV']:

            infile = glob.glob(
                opj(temp_dir, '{}_imported*data'.format(file_id),
                    'Intensity_{}.img'.format(polarisation)))

            if len(infile) == 1:
                # run grd Border Remove
                print(' INFO: Remove border noise for {} band.'.format(
                    polarisation))
                _grd_remove_border(infile[0])

    # -------------------------------------------
    # in case we want to apply Speckle filtering
    if speckle_filter:
        infile = glob.glob(opj(temp_dir, '{}_imported*dim'.format(file_id)))[0]
        logfile = opj(temp_dir, '{}.Speckle.errLog'.format(file_id))
        outfile = opj(temp_dir, '{}_imported_spk'.format(file_id))

        # run processing
        _grd_speckle_filter(infile, outfile, logfile)

        # define infile for next processing step
        infile = opj(temp_dir, '{}_imported_spk.dim'.format(file_id))
        data_dir = glob.glob(opj(temp_dir,
                                 '{}*imported*.data'.format(file_id)))
        h.delete_dimap(str(data_dir[0])[:-5])

    else:
        # let's calibrate the data
        infile = glob.glob(opj(temp_dir, '{}_imported*dim'.format(file_id)))[0]

    # ----------------------
    # do the calibration
    outfile = opj(temp_dir, '{}.{}'.format(file_id, product_type))
    logfile = opj(output_dir, '{}.Backscatter.errLog'.format(file_id))
    _grd_backscatter(infile, outfile, logfile, product_type, dem)

    data_dir = glob.glob(opj(temp_dir, '{}*imported*.data'.format(file_id)))
    h.delete_dimap(str(data_dir[0])[:-5])

    # input file for follwoing
    infile = opj(temp_dir, '{}.{}.dim'.format(file_id, product_type))

    # to db
    if to_db:
        logfile = opj(output_dir, '{}.linToDb.errLog'.format(file_id))
        outfile = opj(temp_dir, '{}_{}_db'.format(file_id, product_type))
        _grd_to_db(infile, outfile, logfile)
        # delete
        h.delete_dimap(infile[:-4])
        # re-define infile
        infile = opj(temp_dir, '{}_{}_db.dim'.format(file_id, product_type))

    # -----------------------
    # let's geocode the data
    # infile = opj(temp_dir, '{}.{}.dim'.format(file_id, product_type))
    outfile = opj(temp_dir, '{}.{}.TC'.format(file_id, product_type))
    logfile = opj(output_dir, '{}.TC.errLog'.format(file_id))
    _grd_terrain_correction_deg(infile, outfile, logfile, resolution, dem)

    # move to final destination
    out_final = opj(output_dir, '{}.{}.TC'.format(file_id, product_type))

    # remove file if exists
    if os.path.exists(out_final + '.dim'):
        h.delete_dimap(out_final)

    shutil.move('{}.dim'.format(outfile), '{}.dim'.format(out_final))
    shutil.move('{}.data'.format(outfile), '{}.data'.format(out_final))

    # ----------------------------------------------
    # let's create a Layover shadow mask if needed
    if ls_mask is True:
        outfile = opj(temp_dir, '{}.ls_mask'.format(file_id))
        logfile = opj(output_dir, '{}.ls_mask.errLog'.format(file_id))
        _grd_ls_mask(infile, outfile, logfile, resolution, dem)

        # move to final destination
        out_ls_mask = opj(output_dir, '{}.LS'.format(file_id))

        # delete original file sin case they exist
        if os.path.exists(str(out_ls_mask) + '.dim'):
            h.delete_dimap(out_ls_mask)

        # move out of temp
        shutil.move('{}.dim'.format(outfile), '{}.dim'.format(out_ls_mask))
        shutil.move('{}.data'.format(outfile), '{}.data'.format(out_ls_mask))

    # remove calibrated files
    h.delete_dimap(infile[:-4])
Example #4
0
    def grds_to_ard(self,
                    inventory_df=None,
                    subset=None,
                    timeseries=False,
                    timescan=False,
                    mosaic=False,
                    overwrite=False,
                    exec_file=None,
                    cut_to_aoi=False):

        self.update_ard_parameters()

        if overwrite:
            print(' INFO: Deleting processing folder to start from scratch')
            h.remove_folder_content(self.processing_dir)

        # set resolution in degree
#        self.center_lat = loads(self.aoi).centroid.y
#        if float(self.center_lat) > 59 or float(self.center_lat) < -59:
#            print(' INFO: Scene is outside SRTM coverage. Will use 30m ASTER'
#                  ' DEM instead.')
#            self.ard_parameters['dem'] = 'ASTER 1sec GDEM'

        if subset:
            if subset.split('.')[-1] == '.shp':
                subset = str(vec.shp_to_wkt(subset, buffer=0.1, envelope=True))
            elif subset.startswith('POLYGON (('):
                subset = loads(subset).buffer(0.1).to_wkt()
            else:
                print(
                    ' ERROR: No valid subset given.'
                    ' Should be either path to a shapefile or a WKT Polygon.')
                sys.exit()

        # check number of already prcessed acquisitions
        nr_of_processed = len(
            glob.glob(opj(self.processing_dir, '*', '20*', '.processed')))

        # number of acquisitions to process
        nr_of_acq = len(
            inventory_df.groupby(['relativeorbit', 'acquisitiondate']))

        # check and retry function
        i = 0
        while nr_of_acq > nr_of_processed:

            # the grd to ard batch routine
            grd_batch.grd_to_ard_batch(inventory_df, self.download_dir,
                                       self.processing_dir, self.temp_dir,
                                       self.proc_file, subset, self.data_mount,
                                       exec_file)

            # reset number of already processed acquisitions
            nr_of_processed = len(
                glob.glob(opj(self.processing_dir, '*', '20*', '.processed')))
            i += 1

            # not more than 5 trys
            if i == 5:
                break

        # time-series part
        if timeseries or timescan:

            nr_of_processed = len(
                glob.glob(
                    opj(self.processing_dir, '*', 'Timeseries',
                        '.*processed')))

            nr_of_polar = len(
                inventory_df.polarisationmode.unique()[0].split(' '))
            nr_of_tracks = len(inventory_df.relativeorbit.unique())
            nr_of_ts = nr_of_polar * nr_of_tracks

            # check and retry function
            i = 0
            while nr_of_ts > nr_of_processed:

                grd_batch.ards_to_timeseries(inventory_df, self.processing_dir,
                                             self.temp_dir, self.proc_file,
                                             exec_file)

                nr_of_processed = len(
                    glob.glob(
                        opj(self.processing_dir, '*', 'Timeseries',
                            '.*processed')))
                i += 1

                # not more than 5 trys
                if i == 5:
                    break

        if timescan:

            # number of already processed timescans
            nr_of_processed = len(
                glob.glob(
                    opj(self.processing_dir, '*', 'Timescan', '.*processed')))

            # number of expected timescans
            nr_of_polar = len(
                inventory_df.polarisationmode.unique()[0].split(' '))
            nr_of_tracks = len(inventory_df.relativeorbit.unique())
            nr_of_ts = nr_of_polar * nr_of_tracks

            i = 0
            while nr_of_ts > nr_of_processed:

                grd_batch.timeseries_to_timescan(inventory_df,
                                                 self.processing_dir,
                                                 self.proc_file)

                nr_of_processed = len(
                    glob.glob(
                        opj(self.processing_dir, '*', 'Timescan',
                            '.*processed')))

                i += 1

                # not more than 5 trys
                if i == 5:
                    break

            if i < 5 and exec_file:
                print(' create vrt command')

        if cut_to_aoi:
            cut_to_aoi = self.aoi

        if mosaic and timeseries and not subset:
            grd_batch.mosaic_timeseries(inventory_df, self.processing_dir,
                                        self.temp_dir, cut_to_aoi)

        if mosaic and timescan and not subset:
            grd_batch.mosaic_timescan(inventory_df, self.processing_dir,
                                      self.temp_dir, self.proc_file,
                                      cut_to_aoi)