예제 #1
0
def make_bbox(inp_bbox):
    """Make a WKT from SNWE or a shapefile"""
    if inp_bbox is None:
        return None
    from shapely.geometry import Polygon
    if op.exists(op.abspath(inps.bbox)):
        from ARIAtools.shapefile_util import open_shapefile
        ring = open_shapefile(inps.bbox, 0, 0).exterior
        poly = Polygon(ring)
    else:
        try:
            S, N, W, E = [float(i) for i in inps.bbox.split()]
            ## adjust for degrees easting / northing (0 - 360 / 0:180)
            if W > 180:
                W -= 360
                print('AdjustedW')
            if E > 180:
                E -= 360
                print('AdjustedE')
            if N > 90:
                N -= 90
                S -= 90
                print('Adjusted N/S')
        except:
            raise Exception(
                'Cannot understand the --bbox argument. '
                'Input string was entered incorrectly or path does not '
                'exist.')

        poly = Polygon([(W, N), (W, S), (E, S), (E, N)])
    return poly
예제 #2
0
    def __init__(self,
                 product_dict,
                 workdir='./',
                 bbox_file=None,
                 prods_TOTbbox=None,
                 mask=None,
                 outputFormat='ENVI',
                 croptounion=False,
                 num_threads='2'):
        # Pass inputs, and initialize list of pairs
        self.product_dict = product_dict
        self.bbox_file = bbox_file
        self.workdir = os.path.join(workdir, 'figures')
        self.prods_TOTbbox = prods_TOTbbox
        self.mask = mask
        self.outputFormat = outputFormat
        self.croptounion = croptounion
        self.num_threads = num_threads
        self.pairs = None
        self.mask_ext = '_mask' if self.mask is not None else ''

        if self.bbox_file:
            self.bbox_file = open_shapefile(bbox_file, 0, 0).bounds

        if self.outputFormat.upper() == 'VRT':
            self.outputFormat = 'ENVI'

        os.makedirs(self.workdir, exist_ok=True)
예제 #3
0
    def __init__(self,
                 product_dict,
                 workdir='./',
                 bbox_file=None,
                 prods_TOTbbox=None,
                 mask=None,
                 outputFormat='ENVI',
                 croptounion=False):
        # Pass inputs, and initialize list of pairs
        self.product_dict = product_dict
        self.bbox_file = bbox_file
        if self.bbox_file:
            self.bbox_file = open_shapefile(bbox_file, 0, 0).bounds
        self.workdir = os.path.join(workdir, 'figures')
        self.prods_TOTbbox = prods_TOTbbox
        self.mask = mask
        self.outputFormat = outputFormat
        self.croptounion = croptounion
        # File must be physically extracted, cannot proceed with VRT format. Defaulting to ENVI format.
        if self.outputFormat == 'VRT':
            self.outputFormat = 'ENVI'

        # create workdir if it doesn't exist
        if not os.path.exists(self.workdir):
            os.mkdir(self.workdir)

        self.pairs = None
        self.mask_ext = '_mask' if self.mask is not None else ''
예제 #4
0
    def __readproduct__(self, file):
        '''
            Read product, determine expected layer names based off of version number, and populate corresponding product dictionary accordingly.
        '''

        ### Get standard product version from file
        # If netcdf with groups
        try:
            version = str(gdal.Open(file).GetMetadataItem('NC_GLOBAL#version'))
        except:
            print('{} is not a supported file type... skipping'.format(file))
            return []

        # If netcdf with nogroups
        if version == str(None):
            version = str(gdal.Open(file).GetMetadataItem('version'))

        ### Get lists of radarmetadata/layer keys for this file version
        rmdkeys, sdskeys = self.__mappingVersion__(file, version)
        if self.bbox is not None:
            # Open standard product bbox
            file_bbox = open_shapefile(
                'NETCDF:"' + file + '":' + sdskeys[0], 'productBoundingBox', 1
            )  ##SS => We should track the projection of the shapefile. i.e. in case this changes in the product
            # Only generate dictionaries if there is spatial overlap with user bbox
            if file_bbox.intersects(self.bbox):
                product_dicts = [self.__mappingData__(file, rmdkeys, sdskeys)]
            else:
                product_dicts = []
                pass
        # If no bbox specified, just pass dictionaries
        else:
            product_dicts = [self.__mappingData__(file, rmdkeys, sdskeys)]

        return product_dicts
예제 #5
0
    def __init__(self, filearg, bbox=None, workdir='./', verbose=False):
        # If user wants verbose mode
        self.verbose=verbose
        # Parse through file(s)/bbox input
        self.files = []
        self.products = []
        # Track bbox file
        self.bbox_file = None
        # Pair name for layer extraction
        self.pairname = None

        ### Determine if file input is single file, a list, or wildcard.
        # If list of files
        if len([str(val) for val in filearg.split(',')])>1:
            self.files=[str(val) for val in filearg.split(',')]
        # If single file or wildcard
        else:
            # If single file
            if os.path.isfile(filearg):
                self.files=[filearg]
            # If wildcard
            else:
                self.files=self.glob.glob(os.path.expanduser(os.path.expandvars(filearg)))
            # Convert relative paths to absolute paths
            self.files=[os.path.normpath(os.path.join(os.getcwd(),i)) if not os.path.isabs(i) else i for i in self.files]
        if len(self.files)==0:
            raise Exception('No file match found')
        # If specified workdir doesn't exist, create it
        if not os.path.exists(workdir):
            os.mkdir(workdir)

        ### Check if bbox input is valid list or shapefile.
        if bbox is not None:
            # If list
            if isinstance ([str(val) for val in bbox.split()], list) and not os.path.isfile(bbox):
                from shapely.geometry import Polygon
                try:
                    bbox = [float(val) for val in bbox.split()]
                except:
                    raise Exception('Cannot understand the --bbox argument. String input is incorrect or path does not exist.')
                # Use shapely to make list
                self.bbox = Polygon(np.column_stack((np.array([bbox[2],bbox[3],bbox[3],bbox[2],bbox[2]]),
                            np.array([bbox[0],bbox[0],bbox[1],bbox[1],bbox[0]])))) #Pass lons/lats to create polygon
                # Save polygon in shapefile
                save_shapefile(os.path.join(workdir,'user_bbox.shp'), self.bbox, 'GeoJSON')
                self.bbox_file=os.path.join(workdir,'user_bbox.shp')
                print("Shapefile %s created for input user bounds."%os.path.join(workdir,'user_bbox.shp'))
            # If shapefile
            elif os.path.isfile(bbox):
                self.bbox = open_shapefile(bbox, 0, 0)                       ##SS => We should track the projection of the shapefile. i.e. if user provides this in e.g. UTM etc.
                self.bbox_file = bbox
            else:
                raise Exception('bbox input neither valid list nor file')
        else:
            self.bbox=None

        ### Report dictionaries for all valid products
        self.__run__()
예제 #6
0
 def _get_bbox(self):
     if op.exists(op.abspath(self.inps.bbox)):
         from ARIAtools.shapefile_util import open_shapefile
         bounds = open_shapefile(self.inps.bbox, 0, 0).bounds
         W, S, E, N = [str(i) for i in bounds]
     else:
         try:
             S, N, W, E = self.inps.bbox.split()
         except:
             raise Exception(
                 'Cannot understand the --bbox argument. Input string was entered incorrectly or path does not exist.'
             )
     return ','.join([W, S, E, N])
예제 #7
0
    def __readproduct__(self, fname):
        """

        Read products.

        Read product, determine expected layer names based off of version
        number, and populate corresponding product dictionary accordingly.

        """
        ### Get standard product version from file
        try:
            #version accessed differently between URL vs downloaded product
            version = str(
                gdal.Open(fname).GetMetadataItem('NC_GLOBAL#version'))
        except:
            log.warning('%s is not a supported file type... skipping', fname)
            return []

        ### Get lists of radarmetadata/layer keys for this file version
        fname = 'NETCDF:"' + fname
        rmdkeys, sdskeys = self.__mappingVersion__(fname, version)

        # Open standard product bbox
        if self.bbox is not None:
            file_bbox = open_shapefile(fname + '":' + sdskeys[0],
                                       'productBoundingBox', 1)
            # Only generate dictionaries if there is spatial overlap with user bbox
            if file_bbox.intersects(self.bbox):
                product_dicts = [self.__mappingData__( \
                                 fname, rmdkeys, sdskeys, version)]
            else:
                product_dicts = []
        # If no bbox specified, just pass dictionaries
        else:
            product_dicts = [self.__mappingData__( \
                             fname, rmdkeys, sdskeys, version)]

        return product_dicts
예제 #8
0
    def plot_extents(self, figwidth=6.4):
        """ Make plot of track extents vs bbox/common track extent. """

        # Figure size based on number of products
        if figwidth in ['standard', 'narrow']:
            # Use standard figure width
            figwidth = 6.4
        elif figwidth in ['wide', 'auto']:
            # Automatically adjust figure width
            figwidth = 0.17 * len(self.product_dict[0]) + 6.4
        else:
            # Else, use user-specified width of figure
            figwidth = float(figwidth)

        ax = plt.figure(figsize=(figwidth, 4.8)).add_subplot(111)

        # Iterate through all IFGs
        S_extent = []
        N_extent = []
        for i in enumerate(self.product_dict[0]):
            prods_bbox = open_shapefile(i[1][0], 0, 0).bounds
            S_extent.append(prods_bbox[1])
            N_extent.append(prods_bbox[3])
            # Plot IFG extent bounds in latitude
            ax.plot([self.product_dict[1][i[0]][0]] * 2,
                    list(prods_bbox[1::2]),
                    'ko',
                    markersize=10)
            # Plot IFG extent line connecting bounds in latitude
            ax.plot([self.product_dict[1][i[0]][0]] * 2,
                    list(prods_bbox[1::2]),
                    color='0.5',
                    linestyle='--')

        # Plot bounds of common track extent
        if self.croptounion:
            S_extent = min(S_extent)
            N_extent = max(N_extent)
            if [self.bbox_file[1], self.bbox_file[3]
                ] != [S_extent, N_extent] and S_extent != N_extent:
                ax.axhline(y=S_extent,
                           color='r',
                           linestyle=':',
                           label="extent of union")
                ax.axhline(y=N_extent, color='r', linestyle=':')
        else:
            S_extent = max(S_extent)
            N_extent = min(N_extent)
            if [self.bbox_file[1], self.bbox_file[3]
                ] != [S_extent, N_extent] and S_extent != N_extent:
                ax.axhline(y=S_extent,
                           color='r',
                           linestyle=':',
                           label="extent of intersection")
                ax.axhline(y=N_extent, color='r', linestyle=':')

        # Plot bounds of final track extent all IFGs will be cropped to
        ax.axhline(y=self.bbox_file[1],
                   color='b',
                   linestyle='--',
                   label="bounding box")
        ax.axhline(y=self.bbox_file[3], color='b', linestyle='--')

        # add legend
        plt.legend(bbox_to_anchor=(1.05, 1),
                   loc='upper left',
                   borderaxespad=0.)

        # defining the axis labels
        ax.set_ylabel('Latitude', weight='bold')
        ax.set_xlabel('Interferograms', weight='bold')
        ax.set_title('Interferogram lat extents', weight='bold')
        plt.xticks(rotation=90)
        plt.tight_layout()

        # saving the figure
        plt.savefig(os.path.join(self.workdir, 'lat_extents.eps'))
        plt.savefig(os.path.join(self.workdir, 'lat_extents.png'))
        plt.close()

        return
예제 #9
0
def export_products(full_product_dict,
                    bbox_file,
                    prods_TOTbbox,
                    layers,
                    dem=None,
                    lat=None,
                    lon=None,
                    mask=None,
                    outDir='./',
                    outputFormat='VRT',
                    stitchMethodType='overlap',
                    verbose=None):
    """
        Export layer and 2D meta-data layers (at the product resolution).
        The function finalize_metadata is called to derive the 2D metadata layer. Dem/lat/lon arrays must be passed for this process.
        The keys specify which layer to extract from the dictionary.
        All products are cropped by the bounds from the input bbox_file, and clipped to the track extent denoted by the input prods_TOTbbox.
        Optionally, a user may pass a mask-file.
    """

    if not layers: return  # only bbox

    bounds = open_shapefile(bbox_file, 0, 0).bounds
    # Loop through user expected layers
    for key in layers:
        product_dict = [[j[key] for j in full_product_dict],
                        [j["pair_name"] for j in full_product_dict]]
        workdir = os.path.join(outDir, key)

        # If specified workdir doesn't exist, create it
        if not os.path.exists(workdir):
            os.mkdir(workdir)

        # Iterate through all IFGs
        print("Generating: " + key)
        for i, j in enumerate(product_dict[0]):
            outname = os.path.abspath(
                os.path.join(workdir, product_dict[1][i][0]))

            # Extract/crop metadata layers
            if any(":/science/grids/imagingGeometry" in s for s in j):
                gdal.BuildVRT(outname + '.vrt', j)

                if dem is None:
                    raise Exception(
                        'No DEM input specified. Cannot extract 3D imaging geometry layers without DEM to intersect with.'
                    )

                # Check if height layers are consistent, and if not exit with error
                if len(
                        set([
                            gdal.Open(i).GetMetadataItem(
                                'NETCDF_DIM_heightsMeta_VALUES') for i in j
                        ])) == 1:
                    gdal.Open(outname + '.vrt').SetMetadataItem(
                        'NETCDF_DIM_heightsMeta_VALUES',
                        gdal.Open(j[0]).GetMetadataItem(
                            'NETCDF_DIM_heightsMeta_VALUES'))
                else:
                    raise Exception(
                        'Inconsistent heights for metadata layer(s) ', j,
                        ' corresponding heights: ', [
                            gdal.Open(i).GetMetadataItem(
                                'NETCDF_DIM_heightsMeta_VALUES') for i in j
                        ])

                # Pass metadata layer VRT, with DEM filename and output name to interpolate/intersect with DEM before cropping
                finalize_metadata(outname,
                                  open_shapefile(bbox_file, 0, 0).bounds,
                                  prods_TOTbbox,
                                  dem,
                                  lat,
                                  lon,
                                  mask,
                                  outputFormat,
                                  verbose=verbose)

            # Extract/crop full res layers, except for "unw" and "conn_comp" which requires advanced stitching
            elif key != 'unwrappedPhase' and key != 'connectedComponents':
                if outputFormat == 'VRT' and mask is None:
                    # building the virtual vrt
                    gdal.BuildVRT(outname + "_uncropped" + '.vrt', j)
                    # building the cropped vrt
                    gdal.Warp(outname + '.vrt',
                              outname + "_uncropped" + '.vrt',
                              options=gdal.WarpOptions(
                                  format=outputFormat,
                                  cutlineDSName=prods_TOTbbox,
                                  outputBounds=bounds))
                else:
                    # building the VRT
                    gdal.BuildVRT(outname + '.vrt', j)

                    # Mask specified, so file must be physically extracted, cannot proceed with VRT format. Defaulting to ENVI format.
                    if outputFormat == 'VRT' and mask is not None:
                        outputFormat = 'ENVI'
                    gdal.Warp(outname,
                              outname + '.vrt',
                              options=gdal.WarpOptions(
                                  format=outputFormat,
                                  cutlineDSName=prods_TOTbbox,
                                  outputBounds=bounds))

                    # Update VRT
                    gdal.Translate(outname + '.vrt',
                                   outname,
                                   options=gdal.TranslateOptions(format="VRT"))

                    # Apply mask (if specified).
                    if mask is not None:
                        update_file = gdal.Open(outname, gdal.GA_Update)
                        update_file = update_file.GetRasterBand(1).WriteArray(
                            mask * gdal.Open(outname + '.vrt').ReadAsArray())
                        update_file = None

            # Extract/crop "unw" and "conn_comp" layers leveraging the two stage unwrapper
            else:
                # Check if unw phase and connected components are already generated
                if not os.path.exists(
                        os.path.join(
                            outDir, 'unwrappedPhase',
                            product_dict[1][i][0])) or not os.path.exists(
                                os.path.join(outDir, 'connectedComponents',
                                             product_dict[1][i][0])):
                    # extract the inputs required for stitching of unwrapped and connected component files
                    unw_files = full_product_dict[i]['unwrappedPhase']
                    conn_files = full_product_dict[i]['connectedComponents']
                    prod_bbox_files = full_product_dict[i][
                        'productBoundingBoxFrames']
                    # based on the key define the output directories
                    outFileUnw = os.path.join(outDir, 'unwrappedPhase',
                                              product_dict[1][i][0])
                    outFileConnComp = os.path.join(outDir,
                                                   'connectedComponents',
                                                   product_dict[1][i][0])

                    # calling the stitching methods
                    if stitchMethodType == 'overlap':
                        product_stitch_overlap(unw_files,
                                               conn_files,
                                               prod_bbox_files,
                                               bounds,
                                               prods_TOTbbox,
                                               outFileUnw=outFileUnw,
                                               outFileConnComp=outFileConnComp,
                                               mask=mask,
                                               outputFormat=outputFormat,
                                               verbose=verbose)
                    elif stitchMethodType == '2stage':
                        product_stitch_2stage(unw_files,
                                              conn_files,
                                              bounds,
                                              prods_TOTbbox,
                                              outFileUnw=outFileUnw,
                                              outFileConnComp=outFileConnComp,
                                              mask=mask,
                                              outputFormat=outputFormat,
                                              verbose=verbose)

    return
예제 #10
0
def merged_productbbox(product_dict,
                       workdir='./',
                       bbox_file=None,
                       croptounion=False):
    '''
        Extract/merge productBoundingBox layers for each pair and update dict, report common track bbox (default is to take common intersection, but user may specify union), and expected shape for DEM.
    '''

    # Import functions
    from ARIAtools.shapefile_util import save_shapefile

    # If specified workdir doesn't exist, create it
    if not os.path.exists(workdir):
        os.mkdir(workdir)

    # Extract/merge productBoundingBox layers
    for scene in product_dict:
        # Get pair name, expected in dictionary
        pair_name = scene["pair_name"][0]
        outname = os.path.join(workdir, pair_name + '.shp')

        # Create union of productBoundingBox layers
        for frame in scene["productBoundingBox"]:
            prods_bbox = open_shapefile(frame, 'productBoundingBox', 1)
            if os.path.exists(outname):
                union_bbox = open_shapefile(outname, 0, 0)
                prods_bbox = prods_bbox.union(union_bbox)
            save_shapefile(
                outname, prods_bbox, 'GeoJSON'
            )  ##SS can we track and provide the proj information of the geojson?
        scene["productBoundingBox"] = [outname]

    prods_TOTbbox = os.path.join(workdir, 'productBoundingBox.shp')
    # Initiate intersection file with first product
    # this is for different scenes
    save_shapefile(
        prods_TOTbbox,
        open_shapefile(product_dict[0]['productBoundingBox'][0], 0, 0),
        'GeoJSON')
    for scene in product_dict[1:]:
        prods_bbox = open_shapefile(scene['productBoundingBox'][0], 0, 0)
        total_bbox = open_shapefile(prods_TOTbbox, 0, 0)
        # Generate footprint for the union of all products
        if croptounion:
            prods_bbox = prods_bbox.union(total_bbox)
        # Generate footprint for the common intersection of all products
        else:
            prods_bbox = prods_bbox.intersection(total_bbox)
        # Check if there is any common overlap
        if prods_bbox.bounds == ():
            raise Exception(
                'No common overlap, footprint cannot be generated. Last scene checked: %s'
                % (scene['productBoundingBox'][0]))
        save_shapefile(prods_TOTbbox, prods_bbox, 'GeoJSON')

    # If bbox specified, intersect with common track intersection/union
    if bbox_file is not None:
        user_bbox = open_shapefile(bbox_file, 0, 0)
        total_bbox = open_shapefile(prods_TOTbbox, 0, 0)
        user_bbox = user_bbox.intersection(total_bbox)
        save_shapefile(prods_TOTbbox, user_bbox, 'GeoJSON')

        # Estimate percentage of overlap with bbox
        prods_bbox_area = open_shapefile(prods_TOTbbox, 0, 0).bounds
        prods_bbox_area = (max(prods_bbox_area[0], prods_bbox_area[2]) -
                           min(prods_bbox_area[0], prods_bbox_area[2])) * (
                               max(prods_bbox_area[1], prods_bbox_area[3]) -
                               min(prods_bbox_area[1], prods_bbox_area[3]))
        bbox_area = open_shapefile(bbox_file, 0, 0).bounds
        bbox_area = (max(bbox_area[0], bbox_area[2]) - min(
            bbox_area[0], bbox_area[2])) * (max(bbox_area[1], bbox_area[3]) -
                                            min(bbox_area[1], bbox_area[3]))
        per_overlap = (prods_bbox_area / bbox_area) * 100
        if per_overlap < 50.:
            print(
                "WARNING: Common track extent only has %d%% overlap with bbox"
                % per_overlap + '\n')
    else:
        bbox_file = prods_TOTbbox

    # Warp the first scene with the output-bounds defined above
    ds = gdal.Warp('',
                   gdal.BuildVRT('', product_dict[0]['unwrappedPhase'][0]),
                   options=gdal.WarpOptions(format="MEM",
                                            outputBounds=open_shapefile(
                                                bbox_file, 0, 0).bounds))
    # Get shape of full res layers
    arrshape = [ds.RasterYSize, ds.RasterXSize]
    # Get projection of full res layers
    proj = ds.GetProjection()
    ds = None

    return product_dict, bbox_file, prods_TOTbbox, arrshape, proj
예제 #11
0
def prep_mask(product_dict,
              maskfilename,
              bbox_file,
              prods_TOTbbox,
              proj,
              amp_thresh=None,
              arrshape=None,
              workdir='./',
              outputFormat='ENVI'):
    '''
        Function to load and export mask file.
        If "Download" flag is specified, GSHHS water mask will be donwloaded on the fly.
    '''

    # Import functions
    from ARIAtools.vrtmanager import renderVRT
    import glob

    _world_watermask = [
        ' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L1.shp',
        ' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L2.shp',
        ' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L3.shp',
        ' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L4.shp',
        ' /vsizip/vsicurl/https://osmdata.openstreetmap.de/download/land-polygons-complete-4326.zip/land-polygons-complete-4326/land_polygons.shp'
    ]

    # Get bounds of user bbox_file
    bounds = open_shapefile(bbox_file, 0, 0).bounds

    # File must be physically extracted, cannot proceed with VRT format. Defaulting to ENVI format.
    if outputFormat == 'VRT':
        outputFormat = 'ENVI'

    # Download mask
    if maskfilename.lower() == 'download':
        maskfilename = os.path.join(workdir, 'watermask' + '.msk')

        ###Make coastlines/islands union VRT
        os.system('ogrmerge.py -o ' +
                  os.path.join(workdir, 'watermsk_shorelines.vrt') +
                  ''.join(_world_watermask[::2]) +
                  ' -field_strategy Union -f VRT -single')

        ###Make lakes/ponds union VRT
        os.system('ogrmerge.py -o ' +
                  os.path.join(workdir, 'watermsk_lakes.vrt') +
                  ''.join(_world_watermask[1::2]) +
                  ' -field_strategy Union -f VRT -single')

        ###Initiate water-mask with coastlines/islands union VRT
        gdal.Rasterize(maskfilename,
                       os.path.join(workdir, 'watermsk_shorelines.vrt'),
                       options=gdal.RasterizeOptions(format=outputFormat,
                                                     outputBounds=bounds,
                                                     outputType=gdal.GDT_Byte,
                                                     width=arrshape[1],
                                                     height=arrshape[0],
                                                     burnValues=[1],
                                                     layers='merged'))
        gdal.Open(maskfilename, gdal.GA_Update).SetProjection(proj)
        gdal.Translate(maskfilename + '.vrt',
                       maskfilename,
                       options=gdal.TranslateOptions(format="VRT"))

        ###Must take inverse of lakes/ponds union because of opposite designation (1 for water, 0 for land) as desired (0 for water, 1 for land)
        lake_masks = gdal.Rasterize(
            '',
            os.path.join(workdir, 'watermsk_lakes.vrt'),
            options=gdal.RasterizeOptions(format='MEM',
                                          outputBounds=bounds,
                                          outputType=gdal.GDT_Byte,
                                          width=arrshape[1],
                                          height=arrshape[0],
                                          burnValues=[1],
                                          layers='merged',
                                          inverse=True))
        lake_masks.SetProjection(proj)
        lake_masks = lake_masks.ReadAsArray()

        if amp_thresh:
            ###Make average amplitude mask
            # Iterate through all IFGs
            for i, j in enumerate(product_dict[0]):
                amp_file = gdal.Warp('',
                                     j,
                                     options=gdal.WarpOptions(
                                         format="MEM",
                                         cutlineDSName=prods_TOTbbox,
                                         outputBounds=bounds))
                amp_file_arr = np.ma.masked_where(
                    amp_file.ReadAsArray() == amp_file.GetRasterBand(
                        1).GetNoDataValue(), amp_file.ReadAsArray())

                # Iteratively update average amplitude file
                # If looping through first amplitude file, nothing to sum so just save to file
                if os.path.exists(os.path.join(workdir, 'avgamplitude.msk')):
                    amp_file = gdal.Open(
                        os.path.join(workdir, 'avgamplitude.msk'),
                        gdal.GA_Update)
                    amp_file = amp_file.GetRasterBand(1).WriteArray(
                        amp_file_arr + amp_file.ReadAsArray())
                else:
                    renderVRT(
                        os.path.join(workdir, 'avgamplitude.msk'),
                        amp_file_arr,
                        geotrans=amp_file.GetGeoTransform(),
                        drivername=outputFormat,
                        gdal_fmt=amp_file_arr.dtype.name,
                        proj=amp_file.GetProjection(),
                        nodata=amp_file.GetRasterBand(1).GetNoDataValue())
                amp_file = coh_val = amp_file_arr = None

            # Take average of amplitude sum
            amp_file = gdal.Open(os.path.join(workdir, 'avgamplitude.msk'),
                                 gdal.GA_Update)
            arr_mean = amp_file.ReadAsArray() / len(product_dict[0])
            arr_mean = np.where(arr_mean < float(amp_thresh), 0, 1)
            amp_file = amp_file.GetRasterBand(1).WriteArray(arr_mean)
            amp_file = None
            arr_mean = None
            amp_file = gdal.Open(os.path.join(
                workdir, 'avgamplitude.msk')).ReadAsArray()
        else:
            amp_file = np.ones((lake_masks.shape[0], lake_masks.shape[1]))

        ###Update water-mask with lakes/ponds union and average amplitude
        update_file = gdal.Open(maskfilename, gdal.GA_Update)
        update_file = update_file.GetRasterBand(1).WriteArray(
            update_file.ReadAsArray() * lake_masks * amp_file)
        print('Downloaded water-mask here: ' + maskfilename)
        update_file = None
        lake_masks = None
        amp_file = None
        #Delete temp files
        os.remove(os.path.join(workdir, 'watermsk_shorelines.vrt'))
        os.remove(os.path.join(workdir, 'watermsk_lakes.vrt'))
        for i in glob.glob(os.path.join(workdir, 'avgamplitude.msk*')):
            os.remove(i)

    # Load mask
    try:
        mask = gdal.Warp('',
                         maskfilename,
                         options=gdal.WarpOptions(format="MEM",
                                                  cutlineDSName=prods_TOTbbox,
                                                  outputBounds=bounds,
                                                  dstNodata=0))
        mask.SetProjection(proj)
        # If no data value
        if mask.GetRasterBand(1).GetNoDataValue():
            mask = np.ma.masked_where(
                mask.ReadAsArray() == mask.GetRasterBand(1).GetNoDataValue(),
                mask.ReadAsArray())
        else:
            mask = mask.ReadAsArray()
    except:
        raise Exception('Failed to open user mask')

    return mask
예제 #12
0
def prep_dem(demfilename,
             bbox_file,
             prods_TOTbbox,
             proj,
             arrshape=None,
             workdir='./',
             outputFormat='ENVI'):
    '''
        Function to load and export DEM, lat, lon arrays.
        If "Download" flag is specified, DEM will be donwloaded on the fly.
    '''

    _world_dem = '/vsicurl/https://cloud.sdsc.edu/v1/AUTH_opentopography/Raster/SRTM_GL1_Ellip/SRTM_GL1_Ellip_srtm.vrt'

    # Get bounds of user bbox_file
    bounds = open_shapefile(bbox_file, 0, 0).bounds

    # File must be physically extracted, cannot proceed with VRT format. Defaulting to ENVI format.
    if outputFormat == 'VRT':
        outputFormat = 'ENVI'

    # Download DEM
    if demfilename.lower() == 'download':
        demfilename = os.path.join(workdir, 'SRTM_3arcsec' + '.dem')
        gdal.Warp(demfilename,
                  _world_dem,
                  options=gdal.WarpOptions(format=outputFormat,
                                           outputBounds=bounds,
                                           outputType=gdal.GDT_Int16,
                                           width=arrshape[1],
                                           height=arrshape[0],
                                           dstNodata=-32768.0,
                                           srcNodata=-32768.0))
        gdal.Open(demfilename, gdal.GA_Update).SetProjection(proj)
        gdal.Translate(demfilename + '.vrt',
                       demfilename,
                       options=gdal.TranslateOptions(format="VRT"))  #Make VRT
        print('Downloaded 3 arc-sec SRTM DEM here: ' + demfilename)

    # Load DEM and setup lat and lon arrays
    try:
        demfile = gdal.Warp('',
                            demfilename,
                            options=gdal.WarpOptions(
                                format="MEM",
                                cutlineDSName=prods_TOTbbox,
                                outputBounds=bounds,
                                dstNodata=0))
        demfile.SetProjection(proj)

        ##SS Do we need lon lat if we would be doing gdal reproject using projection and transformation? See our earlier discussions.
        # Define lat/lon arrays for fullres layers
        Latitude = np.linspace(
            demfile.GetGeoTransform()[3],
            demfile.GetGeoTransform()[3] +
            (demfile.GetGeoTransform()[5] * demfile.RasterYSize),
            demfile.RasterYSize)
        Latitude = np.repeat(Latitude[:, np.newaxis],
                             demfile.RasterXSize,
                             axis=1)
        Longitude = np.linspace(
            demfile.GetGeoTransform()[0],
            demfile.GetGeoTransform()[0] +
            (demfile.GetGeoTransform()[1] * demfile.RasterXSize),
            demfile.RasterXSize)
        Longitude = np.repeat(Longitude[:, np.newaxis],
                              demfile.RasterYSize,
                              axis=1)
        Longitude = Longitude.transpose()
    except:
        raise Exception('Failed to open user DEM')

    return demfilename, demfile, Latitude, Longitude
예제 #13
0
    def __init__(self, filearg, bbox=None, workdir='./', verbose=False):
        # If user wants verbose mode
        # Parse through file(s)/bbox input
        if verbose: logger.setLevel(logging.DEBUG)
        self.files = []
        self.products = []
        # Track bbox file
        self.bbox_file = None
        # Pair name for layer extraction
        self.pairname = None

        ### Determine if file input is single file, a list, or wildcard.
        # If list of files
        if len([str(val) for val in filearg.split(',')]) > 1:
            self.files = [str(i) for i in filearg.split(',')]
            # If wildcard
            self.files = [
                os.path.abspath(item) for sublist in [
                    self.glob.glob(os.path.expanduser(os.path.expandvars(i))
                                   ) if '*' in i else [i] for i in self.files
                ] for item in sublist
            ]
        # If list of URLs provided
        elif os.path.basename(filearg).endswith('.txt'):
            with open(filearg, 'r') as fh:
                self.files = [f.rstrip('\n') for f in fh.readlines()]
        # If single file or wildcard
        else:
            # If single file
            if os.path.isfile(filearg):
                self.files = [filearg]
            # If wildcard
            else:
                self.files = self.glob.glob(
                    os.path.expanduser(os.path.expandvars(filearg)))
            # Convert relative paths to absolute paths
            self.files = [os.path.abspath(i) for i in self.files]

        # If URLs, append with '/vsicurl/'
        self.files = [
            '/vsicurl/{}'.format(i) if 'https://' in i else i
            for i in self.files
        ]
        #check if virtual file reader is being captured as netcdf
        if any("https://" in i for i in self.files):
            # must configure gdal to load URLs
            gdal.SetConfigOption('GDAL_HTTP_COOKIEFILE', 'cookies.txt')
            gdal.SetConfigOption('GDAL_HTTP_COOKIEJAR', 'cookies.txt')
            #gdal.SetConfigOption('CPL_VSIL_CURL_CHUNK_SIZE','10485760')
            gdal.SetConfigOption('VSI_CACHE', 'YES')

            fmt = gdal.Open([s for s in self.files if 'https://' in s
                             ][0]).GetDriver().GetDescription()
            if fmt != 'netCDF':
                raise Exception(
                    'System update required to read requested virtual products: Linux kernel >=4.3 and libnetcdf >=4.5'
                )
        #check if local file reader is being captured as netcdf
        if any("https://" not in i for i in self.files):
            fmt = gdal.Open([s for s in self.files if 'https://' not in s
                             ][0]).GetDriver().GetDescription()
            if fmt != 'netCDF':
                raise Exception(
                    'System update required to read requested local products: Linux kernel >=4.3 and libnetcdf >=4.5'
                )

        if len(self.files) == 0:
            raise Exception('No file match found')
        # If specified workdir doesn't exist, create it
        if not os.path.exists(workdir):
            os.mkdir(workdir)

        ### Check if bbox input is valid list or shapefile.
        if bbox is not None:
            # If list
            if isinstance([str(val) for val in bbox.split()],
                          list) and not os.path.isfile(bbox):
                from shapely.geometry import Polygon
                try:
                    bbox = [float(val) for val in bbox.split()]
                except:
                    raise Exception(
                        'Cannot understand the --bbox argument. String input is incorrect or path does not exist.'
                    )
                # Use shapely to make list
                self.bbox = Polygon(
                    np.column_stack((np.array([
                        bbox[2], bbox[3], bbox[3], bbox[2], bbox[2]
                    ]), np.array([bbox[0], bbox[0], bbox[1], bbox[1], bbox[0]
                                  ]))))  #Pass lons/lats to create polygon
                # Save polygon in shapefile
                save_shapefile(os.path.join(workdir, 'user_bbox.json'),
                               self.bbox, 'GeoJSON')
                self.bbox_file = os.path.join(workdir, 'user_bbox.json')
                log.info("Shapefile %s created for input user bounds.",
                         os.path.join(workdir, 'user_bbox.json'))
            # If shapefile
            elif os.path.isfile(bbox):
                self.bbox = open_shapefile(
                    bbox, 0, 0
                )  ##SS => We should track the projection of the shapefile. i.e. if user provides this in e.g. UTM etc.
                self.bbox_file = bbox
            else:
                raise Exception('bbox input neither valid list nor file')
        else:
            self.bbox = None

        ### Report dictionaries for all valid products
        self.__run__()
예제 #14
0
    def plot_extents(self):
        '''
            Make plot of track extents vs bounding bbox/common track extent.
        '''

        ax = self.plt.figure().add_subplot(111)
        #Iterate through all IFGs
        S_extent = []
        N_extent = []
        for i, j in enumerate(self.product_dict[0]):
            prods_bbox = open_shapefile(j[0], 0, 0).bounds
            S_extent.append(prods_bbox[1])
            N_extent.append(prods_bbox[3])
            # Plot IFG extent bounds in latitude
            ax.plot([self.product_dict[1][i][0]] * 2,
                    list(prods_bbox[1::2]),
                    'ko',
                    markersize=10)
            # Plot IFG extent line connecting bounds in latitude
            ax.plot([self.product_dict[1][i][0]] * 2,
                    list(prods_bbox[1::2]),
                    color='0.5',
                    linestyle='--')

        # Plot bounds of common track extent
        if self.croptounion:
            S_extent = min(S_extent)
            N_extent = max(N_extent)
            if [self.bbox_file[1], self.bbox_file[3]
                ] != [S_extent, N_extent] and S_extent != N_extent:
                ax.axhline(y=S_extent,
                           color='r',
                           linestyle=':',
                           label="extent of union")
                ax.axhline(y=N_extent, color='r', linestyle=':')
        else:
            S_extent = max(S_extent)
            N_extent = min(N_extent)
            if [self.bbox_file[1], self.bbox_file[3]
                ] != [S_extent, N_extent] and S_extent != N_extent:
                ax.axhline(y=S_extent,
                           color='r',
                           linestyle=':',
                           label="extent of intersection")
                ax.axhline(y=N_extent, color='r', linestyle=':')

        # Plot bounds of final track extent all IFGs will be cropped to
        ax.axhline(y=self.bbox_file[1],
                   color='b',
                   linestyle='--',
                   label="bounding box")
        ax.axhline(y=self.bbox_file[3], color='b', linestyle='--')

        # add legend
        self.plt.legend(bbox_to_anchor=(1.05, 1),
                        loc='upper left',
                        borderaxespad=0.)

        # defining the axis labels
        ax.set_ylabel('Latitude', weight='bold')
        ax.set_xlabel('Interferograms', weight='bold')
        ax.set_title('Interferogram lat extents', weight='bold')
        self.plt.xticks(rotation=90)
        self.plt.tight_layout()

        # saving the figure
        self.plt.savefig(os.path.join(self.workdir, 'lat_extents.eps'))
        self.plt.close()

        return
예제 #15
0
def prep_mask(product_dict,
              maskfilename,
              bbox_file,
              prods_TOTbbox,
              proj,
              amp_thresh=None,
              arrshape=None,
              workdir='./',
              outputFormat='ENVI',
              num_threads='2'):
    '''
        Function to load and export mask file.
        If "Download" flag is specified, GSHHS water mask will be donwloaded on the fly.
        If the full resolution NLCD landcover data is given (NLCD...img) it will be cropped to match product
    '''

    # Import functions
    from ARIAtools.vrtmanager import renderOGRVRT

    _world_watermask = [
        ' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L1.shp',
        ' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L2.shp',
        ' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L3.shp',
        ' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L4.shp',
        ' /vsizip/vsicurl/https://osmdata.openstreetmap.de/download/land-polygons-complete-4326.zip/land-polygons-complete-4326/land_polygons.shp'
    ]

    # If specified DEM subdirectory exists, delete contents
    workdir = os.path.join(workdir, 'mask')
    if os.path.exists(
            workdir) and os.path.abspath(maskfilename) != os.path.abspath(
                os.path.join(
                    workdir,
                    os.path.basename(maskfilename).split('.')
                    [0].split('uncropped')[0] + '.msk')) and os.path.abspath(
                        maskfilename) != os.path.abspath(
                            os.path.join(
                                workdir,
                                os.path.basename(maskfilename).split('.')[0] +
                                '.msk')) or maskfilename.lower() == 'download':
        for i in glob.glob(os.path.join(workdir, '*.*')):
            os.remove(i)
    if not os.path.exists(workdir):
        os.mkdir(workdir)

    # Get bounds of user bbox_file
    bounds = open_shapefile(bbox_file, 0, 0).bounds

    # File must be physically extracted, cannot proceed with VRT format. Defaulting to ENVI format.
    if outputFormat == 'VRT':
        outputFormat = 'ENVI'

    # Download mask
    if maskfilename.lower() == 'download':
        log.info("***Downloading water mask... ***")
        maskfilename = os.path.join(workdir, 'watermask' + '.msk')
        os.environ['CPL_ZIP_ENCODING'] = 'UTF-8'
        ###Make coastlines/islands union VRT
        renderOGRVRT(os.path.join(workdir, 'watermsk_shorelines.vrt'),
                     _world_watermask[::2])
        ###Make lakes/ponds union VRT
        renderOGRVRT(os.path.join(workdir, 'watermsk_lakes.vrt'),
                     _world_watermask[1::2])

        ###Initiate water-mask with coastlines/islands union VRT
        # save uncropped mask
        gdal.Rasterize(os.path.join(workdir, 'watermask_uncropped.msk'),
                       os.path.join(workdir, 'watermsk_shorelines.vrt'),
                       options=gdal.RasterizeOptions(format=outputFormat,
                                                     outputBounds=bounds,
                                                     outputType=gdal.GDT_Byte,
                                                     width=arrshape[1],
                                                     height=arrshape[0],
                                                     burnValues=[1],
                                                     layers='merged'))
        gdal.Translate(os.path.join(workdir, 'watermask_uncropped.msk.vrt'),
                       os.path.join(workdir, 'watermask_uncropped.msk'),
                       options=gdal.TranslateOptions(format="VRT"))
        # save cropped mask
        gdal.Warp(maskfilename,
                  os.path.join(workdir, 'watermask_uncropped.msk.vrt'),
                  options=gdal.WarpOptions(
                      format=outputFormat,
                      outputBounds=bounds,
                      outputType=gdal.GDT_Byte,
                      width=arrshape[1],
                      height=arrshape[0],
                      multithread=True,
                      options=['NUM_THREADS=%s' % (num_threads)]))
        update_file = gdal.Open(maskfilename, gdal.GA_Update)
        update_file.SetProjection(proj)
        update_file.GetRasterBand(1).SetNoDataValue(0.)
        del update_file
        gdal.Translate(maskfilename + '.vrt',
                       maskfilename,
                       options=gdal.TranslateOptions(format="VRT"))

        ###Must take inverse of lakes/ponds union because of opposite designation (1 for water, 0 for land) as desired (0 for water, 1 for land)
        lake_masks = gdal.Rasterize(
            '',
            os.path.join(workdir, 'watermsk_lakes.vrt'),
            options=gdal.RasterizeOptions(format='MEM',
                                          outputBounds=bounds,
                                          outputType=gdal.GDT_Byte,
                                          width=arrshape[1],
                                          height=arrshape[0],
                                          burnValues=[1],
                                          layers='merged',
                                          inverse=True))
        lake_masks.SetProjection(proj)
        lake_masks = lake_masks.ReadAsArray()

        ###Update water-mask with lakes/ponds union and average amplitude
        mask_file = gdal.Open(maskfilename, gdal.GA_Update)
        mask_file.GetRasterBand(1).WriteArray(
            lake_masks * gdal.Open(maskfilename).ReadAsArray())
        #Delete temp files
        del lake_masks, mask_file

    if os.path.basename(maskfilename).lower().startswith('nlcd'):
        log.info("***Accessing and cropping the NLCD mask...***")
        maskfilename = NLCDMasker(os.path.dirname(workdir))(proj, bounds,
                                                            arrshape,
                                                            outputFormat)

    # Make sure to apply amplitude mask to downloaded products
    if os.path.exists(os.path.join(workdir, 'watermsk_shorelines.vrt')
                      ) or os.path.basename(maskfilename) == 'NLCD_crop.msk':
        ###Make average amplitude mask
        if amp_thresh:
            amp_file = rasterAverage(os.path.join(workdir, 'avgamplitude'),
                                     product_dict[0],
                                     bounds,
                                     prods_TOTbbox,
                                     outputFormat=outputFormat,
                                     thresh=amp_thresh)
            ###Update mask with average amplitude
            mask_file = gdal.Open(maskfilename, gdal.GA_Update)
            mask_file.GetRasterBand(1).WriteArray(
                gdal.Open(maskfilename).ReadAsArray() * amp_file)
            #Delete temp files
            del mask_file, amp_file
        if os.path.exists(os.path.join(workdir, 'watermsk_shorelines.vrt')):
            os.remove(os.path.join(workdir, 'watermsk_shorelines.vrt'))
        if os.path.exists(os.path.join(workdir, 'watermsk_lakes.vrt')):
            os.remove(os.path.join(workdir, 'watermsk_lakes.vrt'))

    # Load mask
    try:
        # Check if uncropped/cropped maskfiles exist in 'mask' subdirectory
        if not os.path.exists(
                os.path.join(
                    workdir,
                    os.path.basename(maskfilename).split('.')[0] + '.msk')):
            # save uncropped masfile
            gdal.BuildVRT(os.path.join(
                workdir,
                os.path.basename(maskfilename).split('.')[0] +
                '_uncropped.msk.vrt'),
                          maskfilename,
                          options=gdal.BuildVRTOptions(outputBounds=bounds))
            # update maskfilename
            maskfilename = os.path.join(
                workdir,
                os.path.basename(maskfilename).split('.')[0].split('uncropped')
                [0] + '.msk')
            # save cropped maskfile
            gdal.Warp(maskfilename,
                      os.path.join(
                          workdir,
                          os.path.basename(maskfilename).split('.')[0] +
                          '_uncropped.msk.vrt'),
                      options=gdal.WarpOptions(
                          format=outputFormat,
                          cutlineDSName=prods_TOTbbox,
                          outputBounds=bounds,
                          width=arrshape[1],
                          height=arrshape[0],
                          multithread=True,
                          options=['NUM_THREADS=%s' % (num_threads)]))
            mask_file = gdal.Open(maskfilename, gdal.GA_Update)
            mask_file.SetProjection(proj)
            del mask_file
            gdal.Translate(maskfilename + '.vrt',
                           maskfilename,
                           options=gdal.TranslateOptions(format="VRT"))
            ###Make average amplitude mask
            if amp_thresh:
                amp_file = rasterAverage(os.path.join(workdir, 'avgamplitude'),
                                         product_dict[0],
                                         bounds,
                                         prods_TOTbbox,
                                         outputFormat=outputFormat,
                                         thresh=amp_thresh)
                ###Update mask with average amplitude
                mask_file = gdal.Open(maskfilename, gdal.GA_Update)
                mask_file.GetRasterBand(1).WriteArray(
                    gdal.Open(maskfilename).ReadAsArray() * amp_file)
                #Delete temp files
                del mask_file, amp_file

        #pass cropped DEM
        mask = gdal.Warp('',
                         maskfilename,
                         options=gdal.WarpOptions(
                             format="MEM",
                             cutlineDSName=prods_TOTbbox,
                             outputBounds=bounds,
                             width=arrshape[1],
                             height=arrshape[0],
                             multithread=True,
                             options=['NUM_THREADS=%s' % (num_threads)]))
        mask.SetProjection(proj)
        mask.SetDescription(maskfilename)
    except:
        raise Exception('Failed to open user mask')

    return mask
예제 #16
0
    def __continuous_time__(self):
        '''
            Split the products into spatiotemporally continuous groups (i.e. by individual, continuous interferograms). Input must be already sorted by pair and start-time to fit the logic scheme below.
            Using their time-tags, this function determines whether or not successive products are in the same orbit. If in the same orbit, the program determines whether or not they overlap in time and are therefore spatially contiguous, and rejects/reports cases for which there is no temporal overlap and therefore a spatial gap.
        '''

        # import dependencies
        from datetime import datetime, timedelta
        import itertools

        sorted_products = []
        track_rejected_pairs = []

        # Check for (and remove) duplicate products
        num_prods = len(self.products)
        num_dups = []
        for i in enumerate(self.products[:-1]):
            # If scenes share >90% spatial overlap AND same dates, they MUST be duplicates. Reject the latter.
            if (self.products[i[0] + 1][0]['pair_name'][9:]
                    == i[1][0]['pair_name'][9:]) and (
                        self.products[i[0] + 1][0]['pair_name'][:8]
                        == i[1][0]['pair_name'][:8]) and (open_shapefile(
                            self.products[i[0] + 1][1]['productBoundingBox'],
                            'productBoundingBox', 1).intersection(
                                open_shapefile(i[1][1]['productBoundingBox'],
                                               'productBoundingBox', 1)
                            ).area) / (open_shapefile(
                                i[1][1]['productBoundingBox'],
                                'productBoundingBox', 1).area) > 0.9:
                log.debug(
                    "Duplicate product captured. Rejecting scene %s",
                    os.path.basename(
                        self.products[i[0] +
                                      1][1]['unwrappedPhase'].split('"')[1]))
                # Overwrite latter scene with former
                self.products[i[0] + 1] = i[1]
                num_dups.append(i[0])
        # Delete duplicate products
        self.products = list(
            self.products
            for self.products, _ in itertools.groupby(self.products))
        if num_dups:
            log.warning("%d products rejected since they are duplicates",
                        len(num_dups))

        # If only one pair in list, add it to list.
        if len(self.products) == 1:
            sorted_products.extend([[
                dict(
                    zip(self.products[0][0].keys(),
                        [list(a) for a in zip(self.products[0][0].values())])),
                dict(
                    zip(self.products[0][1].keys(),
                        [list(a) for a in zip(self.products[0][1].values())]))
            ]])

        # If multiple pairs in list, cycle through and evaluate temporal connectivity.
        for i in enumerate(self.products[:-1]):
            # Get this reference product's times
            scene_start = datetime.strptime(
                i[1][0]['azimuthZeroDopplerMidTime'], "%Y-%m-%dT%H:%M:%S.%f")
            scene_end = scene_start + timedelta(seconds=i[1][0]['sceneLength'])
            master = datetime.strptime(i[1][0]['pair_name'][9:], "%Y%m%d")
            new_scene_start = datetime.strptime(
                self.products[i[0] + 1][0]['azimuthZeroDopplerMidTime'],
                "%Y-%m-%dT%H:%M:%S.%f")
            new_scene_end = new_scene_start + timedelta(
                seconds=i[1][0]['sceneLength'])
            slave = datetime.strptime(
                self.products[i[0] + 1][0]['pair_name'][9:], "%Y%m%d")

            # Determine if next product in time is in same orbit AND overlaps AND corresponds to same pair
            # If it is within same orbit cycle, try to append scene. This accounts for day change.
            if abs(new_scene_end - scene_end) <= timedelta(
                    minutes=100) and abs(slave - master) <= timedelta(days=1):
                # Don't export product if it is already tracked as a rejected pair
                if i[1][0][
                        'pair_name'] in track_rejected_pairs or self.products[
                            i[0] + 1][0]['pair_name'] in track_rejected_pairs:
                    track_rejected_pairs.extend(
                        (i[1][0]['pair_name'],
                         self.products[i[0] + 1][0]['pair_name']))

                # Only pass scene if it temporally overlaps with reference scene
                elif ((scene_end <= new_scene_start) and
                      (new_scene_end <= scene_start)) or (
                          (scene_end >= new_scene_start) and
                          (new_scene_end >= scene_start)):
                    # Check if dictionary for IFG corresponding to reference product already exists, and if it does then append values
                    try:
                        dict_ind = sorted_products.index(
                            next(item for item in sorted_products
                                 if i[1][1]['productBoundingBox'] in item[1]
                                 ['productBoundingBox']))
                        sorted_products[dict_ind] = [
                            dict(
                                zip(i[1][0].keys(), [[
                                    subitem for item in a
                                    for subitem in (item if isinstance(
                                        item, list) else [item])
                                ] for a in zip(
                                    sorted_products[dict_ind][0].values(),
                                    self.products[i[0] + 1][0].values())])),
                            dict(
                                zip(i[1][1].keys(), [[
                                    subitem for item in a
                                    for subitem in (item if isinstance(
                                        item, list) else [item])
                                ] for a in zip(
                                    sorted_products[dict_ind][1].values(),
                                    self.products[i[0] + 1][1].values())]))
                        ]
                    # Match IFG corresponding to reference product NOT found, so initialize dictionary for new IFG
                    except:
                        sorted_products.extend([[
                            dict(
                                zip(i[1][0].keys(), [
                                    list(a) for a in zip(
                                        i[1][0].values(), self.products[
                                            i[0] + 1][0].values())
                                ])),
                            dict(
                                zip(i[1][1].keys(), [
                                    list(a) for a in zip(
                                        i[1][1].values(), self.products[
                                            i[0] + 1][1].values())
                                ]))
                        ]])

                #Else if scene doesn't overlap, this means there is a gap. Reject date from product list, and keep track of all failed dates
                else:
                    track_rejected_pairs.extend(
                        (i[1][0]['pair_name'],
                         self.products[i[0] + 1][0]['pair_name']))
                    log.debug("Gap for interferogram %s", i[1][0]['pair_name'])

            # Products correspond to different dates, so pass both as separate IFGs.
            else:
                # Check if dictionary for IFG corresponding to reference product already exists. If not, then pass as new IFG.
                if [
                        item for item in sorted_products if i[1][1]
                    ['productBoundingBox'] in item[1]['productBoundingBox']
                ] == [] and i[1][0]['pair_name'] not in track_rejected_pairs:
                    sorted_products.extend([[
                        dict(
                            zip(i[1][0].keys(),
                                [list(a) for a in zip(i[1][0].values())])),
                        dict(
                            zip(i[1][1].keys(),
                                [list(a) for a in zip(i[1][1].values())]))
                    ]])
                # Check if dictionary for IFG corresponding to next product already exists. If not, then pass as new IFG.
                if [
                        item
                        for item in sorted_products if self.products[i[0] + 1]
                    [1]['productBoundingBox'] in item[1]['productBoundingBox']
                ] == [] and self.products[
                        i[0] + 1][0]['pair_name'] not in track_rejected_pairs:
                    sorted_products.extend([[
                        dict(
                            zip(self.products[i[0] + 1][0].keys(), [
                                list(a)
                                for a in zip(self.products[i[0] +
                                                           1][0].values())
                            ])),
                        dict(
                            zip(self.products[i[0] + 1][1].keys(), [
                                list(a)
                                for a in zip(self.products[i[0] +
                                                           1][1].values())
                            ]))
                    ]])

        # Remove duplicate dates
        track_rejected_pairs = list(set(track_rejected_pairs))
        if len(track_rejected_pairs) > 0:
            log.warning(
                "%d out of %d interferograms rejected since stitched interferogram would have gaps",
                len(track_rejected_pairs),
                len([item[0] for item in sorted_products]))
            # Provide report of which files were kept vs. which were not.
            log.debug(
                "Specifically, the following interferograms were rejected:")
            for item in sorted_products:
                if item[1]['pair_name'][0] in track_rejected_pairs:
                    log.debug(
                        str([
                            rejects.split('"')[1]
                            for rejects in item[1]['productBoundingBox']
                        ]).strip('[]'))
        else:
            log.info("All (%d) interferograms are spatially continuous.",
                     len(sorted_products))

        sorted_products=[[item[0] for item in sorted_products if (item[0]['pair_name'][0] not in track_rejected_pairs)], \
            [item[1] for item in sorted_products if (item[1]['pair_name'][0] not in track_rejected_pairs)]]

        ###Report dictionaries for all valid products
        if sorted_products == [[],
                               []]:  #Check if pairs were successfully selected
            raise Exception(
                'No valid interferogram meet spatial criteria due to gaps and/or invalid input, nothing to export.'
            )

        return sorted_products
예제 #17
0
def main(inps=None):
    inps = cmdLineParse()

    print("***Time-series Preparation Function:***")
    # if user bbox was specified, file(s) not meeting imposed spatial criteria are rejected.
    # Outputs = arrays ['standardproduct_info.products'] containing grouped “radarmetadata info” and “data layer keys+paths” dictionaries for each standard product
    # In addition, path to bbox file ['standardproduct_info.bbox_file'] (if bbox specified)
    standardproduct_info = ARIA_standardproduct(inps.imgfile, bbox=inps.bbox, workdir=inps.workdir, verbose=inps.verbose)

    # pass number of threads for gdal multiprocessing computation
    if inps.num_threads.lower()=='all':
        import multiprocessing
        print('User specified use of all %s threads for gdal multiprocessing'%(str(multiprocessing.cpu_count())))
        inps.num_threads='ALL_CPUS'
    print('Thread count specified for gdal multiprocessing = %s'%(inps.num_threads))

    # extract/merge productBoundingBox layers for each pair and update dict,
    # report common track bbox (default is to take common intersection, but user may specify union), and expected shape for DEM.
    standardproduct_info.products[0], standardproduct_info.products[1], standardproduct_info.bbox_file, prods_TOTbbox, prods_TOTbbox_metadatalyr, arrshape, proj = merged_productbbox(standardproduct_info.products[0], standardproduct_info.products[1], os.path.join(inps.workdir,'productBoundingBox'), standardproduct_info.bbox_file, inps.croptounion, num_threads=inps.num_threads, minimumOverlap=inps.minimumOverlap, verbose=inps.verbose)


    # Download/Load DEM & Lat/Lon arrays, providing bbox, expected DEM shape, and output dir as input.
    if inps.demfile is not None:
        print('Download/cropping DEM')
        # Pass DEM-filename, loaded DEM array, and lat/lon arrays
        inps.demfile, demfile, Latitude, Longitude = prep_dem(inps.demfile, standardproduct_info.bbox_file, prods_TOTbbox, \
            prods_TOTbbox_metadatalyr, proj, arrshape=arrshape, workdir=inps.workdir, outputFormat=inps.outputFormat, num_threads=inps.num_threads)

    # Load or download mask (if specified).
    if inps.mask is not None:
        inps.mask = prep_mask([[item for sublist in [list(set(d['amplitude'])) for d in standardproduct_info.products[1] if 'amplitude' in d] for item in sublist], [item for sublist in [list(set(d['pair_name'])) for d in standardproduct_info.products[1] if 'pair_name' in d] for item in sublist]],inps.mask, standardproduct_info.bbox_file, prods_TOTbbox, proj, amp_thresh=inps.amp_thresh, arrshape=arrshape, workdir=inps.workdir, outputFormat=inps.outputFormat, num_threads=inps.num_threads)

    # Extract
    layers=['unwrappedPhase','coherence']
    print('\nExtracting unwrapped phase, coherence, and connected components for each interferogram pair')
    export_products(standardproduct_info.products[1], standardproduct_info.bbox_file, prods_TOTbbox, layers, dem=demfile, lat=Latitude, lon=Longitude, mask=inps.mask, outDir=inps.workdir, outputFormat=inps.outputFormat, stitchMethodType='overlap', verbose=inps.verbose, num_threads=inps.num_threads, multilooking=inps.multilooking)

    layers=['incidenceAngle','lookAngle','azimuthAngle']
    print('\nExtracting single incidence angle, look angle and azimuth angle files valid over common interferometric grid')
    export_products([dict(zip([k for k in set(k for d in standardproduct_info.products[1] for k in d)], [[item for sublist in [list(set(d[k])) for d in standardproduct_info.products[1] if k in d] for item in sublist] for k in set(k for d in standardproduct_info.products[1] for k in d)]))], standardproduct_info.bbox_file, prods_TOTbbox, layers, dem=demfile, lat=Latitude, lon=Longitude, mask=inps.mask, outDir=inps.workdir, outputFormat=inps.outputFormat, stitchMethodType='overlap', verbose=inps.verbose, num_threads=inps.num_threads, multilooking=inps.multilooking)

    if inps.bperp==True:
        layers=['bPerpendicular']
        print('\nExtracting perpendicular baseline grids for each interferogram pair')
        export_products(standardproduct_info.products[1], standardproduct_info.bbox_file, prods_TOTbbox, layers, dem=demfile, lat=Latitude, lon=Longitude, mask=inps.mask, outDir=inps.workdir, outputFormat=inps.outputFormat, stitchMethodType='overlap', verbose=inps.verbose, num_threads=inps.num_threads, multilooking=inps.multilooking)

    # Extracting other layers, if specified
    if inps.layers:
        if inps.layers.lower()=='all':
            inps.layers=list(standardproduct_info.products[1][0].keys())
            # Must also remove productBoundingBoxes & pair-names because they are not raster layers
            layers=[i for i in inps.layers if i not in ['unwrappedPhase','coherence','incidenceAngle','lookAngle','azimuthAngle','bPerpendicular','productBoundingBox','productBoundingBoxFrames','pair_name']]
        else:
            inps.layers=list(inps.layers.split(','))
            layers=[i for i in inps.layers if i not in ['unwrappedPhase','coherence','incidenceAngle','lookAngle','azimuthAngle','bPerpendicular']]
            layers=[i.replace(' ','') for i in layers]

        if layers!=[]:
            print('\nExtracting optional, user-specified layers %s for each interferogram pair'%(layers))
            export_products(standardproduct_info.products[1], standardproduct_info.bbox_file, prods_TOTbbox, layers, dem=demfile, lat=Latitude, lon=Longitude, mask=inps.mask, outDir=inps.workdir, outputFormat=inps.outputFormat, stitchMethodType='overlap', verbose=inps.verbose, num_threads=inps.num_threads, multilooking=inps.multilooking)

    # If necessary, resample DEM/mask AFTER they have been used to extract metadata layers and mask output layers, respectively
    if inps.multilooking is not None:
        # Import functions
        from ARIAtools.shapefile_util import open_shapefile
        from ARIAtools.vrtmanager import resampleRaster
        bounds=open_shapefile(standardproduct_info.bbox_file, 0, 0).bounds
        # Resample mask
        if inps.mask is not None:
            resampleRaster(inps.mask.GetDescription(), inps.multilooking, bounds, prods_TOTbbox, inps.rankedResampling, outputFormat=inps.outputFormat, num_threads=inps.num_threads)
        # Resample DEM
        if demfile is not None:
            resampleRaster(demfile.GetDescription(), inps.multilooking, bounds, prods_TOTbbox, inps.rankedResampling, outputFormat=inps.outputFormat, num_threads=inps.num_threads)

    # Perform GACOS-based tropospheric corrections (if specified).
    if inps.tropo_products:
        tropo_correction(standardproduct_info.products, inps.tropo_products, standardproduct_info.bbox_file, prods_TOTbbox, outDir=inps.workdir, outputFormat=inps.outputFormat, verbose=inps.verbose, num_threads=inps.num_threads)

    # Generate Stack
    generateStack(standardproduct_info,'coherence','cohStack',workdir=inps.workdir)
    generateStack(standardproduct_info,'connectedComponents','connCompStack',workdir=inps.workdir)
    if inps.tropo_products:
        generateStack(standardproduct_info,'tropocorrected_products','unwrapStack',workdir=inps.workdir)
    else:
        generateStack(standardproduct_info,'unwrappedPhase','unwrapStack',workdir=inps.workdir)
예제 #18
0
def prep_mask(product_dict,
              maskfilename,
              bbox_file,
              prods_TOTbbox,
              proj,
              amp_thresh=None,
              arrshape=None,
              workdir='./',
              outputFormat='ENVI',
              num_threads='2'):
    """Function to load and export mask file.
    If "Download" flag, GSHHS water mask will be donwloaded on the fly.
    If full resolution NLCD landcover data is given (NLCD...img) it gets cropped
    """

    # Import functions
    from ARIAtools.vrtmanager import renderOGRVRT
    _world_watermask = [f' /vsizip/vsicurl/http://www.soest.hawaii.edu/pwessel/'\
                        f'gshhg/gshhg-shp-2.3.7.zip/GSHHS_shp/f/GSHHS_f_L{i}.shp'\
                        for i in range(1, 5)]
    _world_watermask.append( ' /vsizip/vsicurl/https://osmdata.openstreetmap.de'\
                             '/download/land-polygons-complete-4326.zip/'\
                             'land-polygons-complete-4326/land_polygons.shp')
    # If specified DEM subdirectory exists, delete contents
    workdir = os.path.join(workdir, 'mask')
    os.makedirs(workdir, exist_ok=True)

    # Get bounds of user bbox_file
    bounds = open_shapefile(bbox_file, 0, 0).bounds

    # File must be physically extracted, cannot proceed with VRT format. Defaulting to ENVI format.
    if outputFormat == 'VRT':
        outputFormat = 'ENVI'

    # Download mask
    if maskfilename.lower() == 'download':
        log.info("***Downloading water mask... ***")
        maskfilename = os.path.join(workdir, 'watermask' + '.msk')
        os.environ['CPL_ZIP_ENCODING'] = 'UTF-8'
        ###Make coastlines/islands union VRT
        renderOGRVRT(os.path.join(workdir, 'watermsk_shorelines.vrt'),
                     _world_watermask[::2])
        ###Make lakes/ponds union VRT
        renderOGRVRT(os.path.join(workdir, 'watermsk_lakes.vrt'),
                     _world_watermask[1::2])

        ###Initiate water-mask with coastlines/islands union VRT
        # save uncropped mask
        gdal.Rasterize(os.path.join(workdir, 'watermask_uncropped.msk'),
                       os.path.join(workdir, 'watermsk_shorelines.vrt'),
                       format=outputFormat,
                       outputBounds=bounds,
                       outputType=gdal.GDT_Byte,
                       width=arrshape[1],
                       height=arrshape[0],
                       burnValues=[1],
                       layers='merged')
        gdal.Translate(os.path.join(workdir, 'watermask_uncropped.msk.vrt'),
                       os.path.join(workdir, 'watermask_uncropped.msk'),
                       format="VRT")
        # save cropped mask
        gdal.Warp(maskfilename,
                  os.path.join(workdir, 'watermask_uncropped.msk.vrt'),
                  format=outputFormat,
                  outputBounds=bounds,
                  outputType=gdal.GDT_Byte,
                  width=arrshape[1],
                  height=arrshape[0],
                  multithread=True,
                  options=['NUM_THREADS=%s' % (num_threads)])

        update_file = gdal.Open(maskfilename, gdal.GA_Update)
        update_file.SetProjection(proj)
        update_file.GetRasterBand(1).SetNoDataValue(0.)
        del update_file
        gdal.Translate(f'{maskfilename}.vrt', maskfilename, format='VRT')

        ###Must take inverse of lakes/ponds union because of opposite designation (1 for water, 0 for land) as desired (0 for water, 1 for land)
        lake_masks = gdal.Rasterize('',
                                    os.path.join(workdir,
                                                 'watermsk_lakes.vrt'),
                                    format='MEM',
                                    outputBounds=bounds,
                                    outputType=gdal.GDT_Byte,
                                    width=arrshape[1],
                                    height=arrshape[0],
                                    burnValues=[1],
                                    layers='merged',
                                    inverse=True)

        lake_masks.SetProjection(proj)
        lake_masks = lake_masks.ReadAsArray()

        ## Update water-mask with lakes/ponds union
        mask_file = gdal.Open(maskfilename, gdal.GA_Update)
        mask_file.GetRasterBand(1).WriteArray(
            lake_masks * gdal.Open(maskfilename).ReadAsArray())
        #Delete temp files
        del lake_masks, mask_file

    ## Use NLCD Mask
    elif os.path.basename(maskfilename).lower().startswith('nlcd'):
        log.info("***Accessing and cropping the NLCD mask...***")
        maskfilename = NLCDMasker(os.path.dirname(workdir))(proj, bounds,
                                                            arrshape,
                                                            outputFormat)

    ## User specified mask
    else:
        # Path to local version of user specified mask
        user_mask = maskfilename  # for clarity
        user_mask_n = os.path.basename(os.path.splitext(user_mask)[0])
        local_mask = os.path.join(workdir, f'{user_mask_n}.msk')
        local_mask_unc = os.path.join(workdir, f'{user_mask_n}_uncropped.msk')
        if user_mask == local_mask:
            log.warning('The mask you specified already exists in %s, '\
                    'using the existing one...', os.path.dirname(local_mask))

        else:
            # move the mask to the local directory and built a VRT for it
            gdal.UseExceptions()
            # shutil.copy(user_mask, local_mask_unc)
            ds = gdal.BuildVRT(f'{local_mask_unc}.vrt',
                               user_mask,
                               outputBounds=bounds)

            assert ds is not None, f'GDAL could not open user mask: {user_mask}'

            # crop the user mask and write
            gdal.Warp(f'{local_mask}',
                      ds,
                      format=outputFormat,
                      cutlineDSName=prods_TOTbbox,
                      outputBounds=bounds,
                      width=arrshape[1],
                      height=arrshape[0],
                      multithread=True,
                      options=[f'NUM_THREADS={num_threads}'])

            # set projection of the local mask
            mask_file = gdal.Open(local_mask, gdal.GA_Update)
            mask_file.SetProjection(proj)
            del mask_file

            # create vrt for local cropped mask
            gdal.Translate(f'{local_mask}.vrt', local_mask, format='VRT')
        # assign new local mask for amp thresh
        maskfilename = local_mask

    ## Make average amplitude mask
    if amp_thresh is not None:
        amp_file = rasterAverage(os.path.join(workdir, 'avgamplitude'),
                                 product_dict,
                                 bounds,
                                 prods_TOTbbox,
                                 outputFormat=outputFormat,
                                 thresh=amp_thresh)

        # Update mask with average amplitude
        mask_file = gdal.Open(maskfilename, gdal.GA_Update)
        mask_arr = mask_file.ReadAsArray()
        mask_file.GetRasterBand(1).WriteArray(mask_arr * amp_file)

        # Delete temp files
        del mask_file, amp_file

    # crop/expand mask to DEM size?
    mask = gdal.Warp('',
                     maskfilename,
                     format='MEM',
                     cutlineDSName=prods_TOTbbox,
                     outputBounds=bounds,
                     width=arrshape[1],
                     height=arrshape[0],
                     multithread=True,
                     options=[f'NUM_THREADS={num_threads}'])

    mask.SetProjection(proj)
    mask.SetDescription(maskfilename)

    try:
        ## remove extra files
        os.remove(path_shorelines) if os.path.exists(path_shorelines) else ''
        os.remove(path_lakes) if os.path.exists(path_lakes) else ''
    except:
        pass

    return mask