Exemplo n.º 1
0
    def __init__(self, filearg, bbox=None, workdir='./', verbose=False):
        # If user wants verbose mode
        self.verbose=verbose
        # Parse through file(s)/bbox input
        self.files = []
        self.products = []
        # Track bbox file
        self.bbox_file = None
        # Pair name for layer extraction
        self.pairname = None

        ### Determine if file input is single file, a list, or wildcard.
        # If list of files
        if len([str(val) for val in filearg.split(',')])>1:
            self.files=[str(val) for val in filearg.split(',')]
        # If single file or wildcard
        else:
            # If single file
            if os.path.isfile(filearg):
                self.files=[filearg]
            # If wildcard
            else:
                self.files=self.glob.glob(os.path.expanduser(os.path.expandvars(filearg)))
            # Convert relative paths to absolute paths
            self.files=[os.path.normpath(os.path.join(os.getcwd(),i)) if not os.path.isabs(i) else i for i in self.files]
        if len(self.files)==0:
            raise Exception('No file match found')
        # If specified workdir doesn't exist, create it
        if not os.path.exists(workdir):
            os.mkdir(workdir)

        ### Check if bbox input is valid list or shapefile.
        if bbox is not None:
            # If list
            if isinstance ([str(val) for val in bbox.split()], list) and not os.path.isfile(bbox):
                from shapely.geometry import Polygon
                try:
                    bbox = [float(val) for val in bbox.split()]
                except:
                    raise Exception('Cannot understand the --bbox argument. String input is incorrect or path does not exist.')
                # Use shapely to make list
                self.bbox = Polygon(np.column_stack((np.array([bbox[2],bbox[3],bbox[3],bbox[2],bbox[2]]),
                            np.array([bbox[0],bbox[0],bbox[1],bbox[1],bbox[0]])))) #Pass lons/lats to create polygon
                # Save polygon in shapefile
                save_shapefile(os.path.join(workdir,'user_bbox.shp'), self.bbox, 'GeoJSON')
                self.bbox_file=os.path.join(workdir,'user_bbox.shp')
                print("Shapefile %s created for input user bounds."%os.path.join(workdir,'user_bbox.shp'))
            # If shapefile
            elif os.path.isfile(bbox):
                self.bbox = open_shapefile(bbox, 0, 0)                       ##SS => We should track the projection of the shapefile. i.e. if user provides this in e.g. UTM etc.
                self.bbox_file = bbox
            else:
                raise Exception('bbox input neither valid list nor file')
        else:
            self.bbox=None

        ### Report dictionaries for all valid products
        self.__run__()
Exemplo n.º 2
0
def merged_productbbox(product_dict,
                       workdir='./',
                       bbox_file=None,
                       croptounion=False):
    '''
        Extract/merge productBoundingBox layers for each pair and update dict, report common track bbox (default is to take common intersection, but user may specify union), and expected shape for DEM.
    '''

    # Import functions
    from ARIAtools.shapefile_util import save_shapefile

    # If specified workdir doesn't exist, create it
    if not os.path.exists(workdir):
        os.mkdir(workdir)

    # Extract/merge productBoundingBox layers
    for scene in product_dict:
        # Get pair name, expected in dictionary
        pair_name = scene["pair_name"][0]
        outname = os.path.join(workdir, pair_name + '.shp')

        # Create union of productBoundingBox layers
        for frame in scene["productBoundingBox"]:
            prods_bbox = open_shapefile(frame, 'productBoundingBox', 1)
            if os.path.exists(outname):
                union_bbox = open_shapefile(outname, 0, 0)
                prods_bbox = prods_bbox.union(union_bbox)
            save_shapefile(
                outname, prods_bbox, 'GeoJSON'
            )  ##SS can we track and provide the proj information of the geojson?
        scene["productBoundingBox"] = [outname]

    prods_TOTbbox = os.path.join(workdir, 'productBoundingBox.shp')
    # Initiate intersection file with first product
    # this is for different scenes
    save_shapefile(
        prods_TOTbbox,
        open_shapefile(product_dict[0]['productBoundingBox'][0], 0, 0),
        'GeoJSON')
    for scene in product_dict[1:]:
        prods_bbox = open_shapefile(scene['productBoundingBox'][0], 0, 0)
        total_bbox = open_shapefile(prods_TOTbbox, 0, 0)
        # Generate footprint for the union of all products
        if croptounion:
            prods_bbox = prods_bbox.union(total_bbox)
        # Generate footprint for the common intersection of all products
        else:
            prods_bbox = prods_bbox.intersection(total_bbox)
        # Check if there is any common overlap
        if prods_bbox.bounds == ():
            raise Exception(
                'No common overlap, footprint cannot be generated. Last scene checked: %s'
                % (scene['productBoundingBox'][0]))
        save_shapefile(prods_TOTbbox, prods_bbox, 'GeoJSON')

    # If bbox specified, intersect with common track intersection/union
    if bbox_file is not None:
        user_bbox = open_shapefile(bbox_file, 0, 0)
        total_bbox = open_shapefile(prods_TOTbbox, 0, 0)
        user_bbox = user_bbox.intersection(total_bbox)
        save_shapefile(prods_TOTbbox, user_bbox, 'GeoJSON')

        # Estimate percentage of overlap with bbox
        prods_bbox_area = open_shapefile(prods_TOTbbox, 0, 0).bounds
        prods_bbox_area = (max(prods_bbox_area[0], prods_bbox_area[2]) -
                           min(prods_bbox_area[0], prods_bbox_area[2])) * (
                               max(prods_bbox_area[1], prods_bbox_area[3]) -
                               min(prods_bbox_area[1], prods_bbox_area[3]))
        bbox_area = open_shapefile(bbox_file, 0, 0).bounds
        bbox_area = (max(bbox_area[0], bbox_area[2]) - min(
            bbox_area[0], bbox_area[2])) * (max(bbox_area[1], bbox_area[3]) -
                                            min(bbox_area[1], bbox_area[3]))
        per_overlap = (prods_bbox_area / bbox_area) * 100
        if per_overlap < 50.:
            print(
                "WARNING: Common track extent only has %d%% overlap with bbox"
                % per_overlap + '\n')
    else:
        bbox_file = prods_TOTbbox

    # Warp the first scene with the output-bounds defined above
    ds = gdal.Warp('',
                   gdal.BuildVRT('', product_dict[0]['unwrappedPhase'][0]),
                   options=gdal.WarpOptions(format="MEM",
                                            outputBounds=open_shapefile(
                                                bbox_file, 0, 0).bounds))
    # Get shape of full res layers
    arrshape = [ds.RasterYSize, ds.RasterXSize]
    # Get projection of full res layers
    proj = ds.GetProjection()
    ds = None

    return product_dict, bbox_file, prods_TOTbbox, arrshape, proj
Exemplo n.º 3
0
    def __init__(self, filearg, bbox=None, workdir='./', verbose=False):
        # If user wants verbose mode
        # Parse through file(s)/bbox input
        if verbose: logger.setLevel(logging.DEBUG)
        self.files = []
        self.products = []
        # Track bbox file
        self.bbox_file = None
        # Pair name for layer extraction
        self.pairname = None

        ### Determine if file input is single file, a list, or wildcard.
        # If list of files
        if len([str(val) for val in filearg.split(',')]) > 1:
            self.files = [str(i) for i in filearg.split(',')]
            # If wildcard
            self.files = [
                os.path.abspath(item) for sublist in [
                    self.glob.glob(os.path.expanduser(os.path.expandvars(i))
                                   ) if '*' in i else [i] for i in self.files
                ] for item in sublist
            ]
        # If list of URLs provided
        elif os.path.basename(filearg).endswith('.txt'):
            with open(filearg, 'r') as fh:
                self.files = [f.rstrip('\n') for f in fh.readlines()]
        # If single file or wildcard
        else:
            # If single file
            if os.path.isfile(filearg):
                self.files = [filearg]
            # If wildcard
            else:
                self.files = self.glob.glob(
                    os.path.expanduser(os.path.expandvars(filearg)))
            # Convert relative paths to absolute paths
            self.files = [os.path.abspath(i) for i in self.files]

        # If URLs, append with '/vsicurl/'
        self.files = [
            '/vsicurl/{}'.format(i) if 'https://' in i else i
            for i in self.files
        ]
        #check if virtual file reader is being captured as netcdf
        if any("https://" in i for i in self.files):
            # must configure gdal to load URLs
            gdal.SetConfigOption('GDAL_HTTP_COOKIEFILE', 'cookies.txt')
            gdal.SetConfigOption('GDAL_HTTP_COOKIEJAR', 'cookies.txt')
            #gdal.SetConfigOption('CPL_VSIL_CURL_CHUNK_SIZE','10485760')
            gdal.SetConfigOption('VSI_CACHE', 'YES')

            fmt = gdal.Open([s for s in self.files if 'https://' in s
                             ][0]).GetDriver().GetDescription()
            if fmt != 'netCDF':
                raise Exception(
                    'System update required to read requested virtual products: Linux kernel >=4.3 and libnetcdf >=4.5'
                )
        #check if local file reader is being captured as netcdf
        if any("https://" not in i for i in self.files):
            fmt = gdal.Open([s for s in self.files if 'https://' not in s
                             ][0]).GetDriver().GetDescription()
            if fmt != 'netCDF':
                raise Exception(
                    'System update required to read requested local products: Linux kernel >=4.3 and libnetcdf >=4.5'
                )

        if len(self.files) == 0:
            raise Exception('No file match found')
        # If specified workdir doesn't exist, create it
        if not os.path.exists(workdir):
            os.mkdir(workdir)

        ### Check if bbox input is valid list or shapefile.
        if bbox is not None:
            # If list
            if isinstance([str(val) for val in bbox.split()],
                          list) and not os.path.isfile(bbox):
                from shapely.geometry import Polygon
                try:
                    bbox = [float(val) for val in bbox.split()]
                except:
                    raise Exception(
                        'Cannot understand the --bbox argument. String input is incorrect or path does not exist.'
                    )
                # Use shapely to make list
                self.bbox = Polygon(
                    np.column_stack((np.array([
                        bbox[2], bbox[3], bbox[3], bbox[2], bbox[2]
                    ]), np.array([bbox[0], bbox[0], bbox[1], bbox[1], bbox[0]
                                  ]))))  #Pass lons/lats to create polygon
                # Save polygon in shapefile
                save_shapefile(os.path.join(workdir, 'user_bbox.json'),
                               self.bbox, 'GeoJSON')
                self.bbox_file = os.path.join(workdir, 'user_bbox.json')
                log.info("Shapefile %s created for input user bounds.",
                         os.path.join(workdir, 'user_bbox.json'))
            # If shapefile
            elif os.path.isfile(bbox):
                self.bbox = open_shapefile(
                    bbox, 0, 0
                )  ##SS => We should track the projection of the shapefile. i.e. if user provides this in e.g. UTM etc.
                self.bbox_file = bbox
            else:
                raise Exception('bbox input neither valid list nor file')
        else:
            self.bbox = None

        ### Report dictionaries for all valid products
        self.__run__()