class DGImage: def __init__(self, image_id): self.image_id = image_id self.image = None self.image_path = os.path.join(GBDX_DIR, '{}.tif'.format(random_word(10))) self.bc = None def __getattr__(self, item): return getattr(self.image, item) def fetch(self, *args, **kwargs): self.image = CatalogImage(self.image_id, *args, **kwargs) def load(self, *args, **kwargs): if self.image is not None: self.image.geotiff(self.image_path, *args, **kwargs) else: raise AttributeError('Fetch image before loading') def transform(self, dst_path, channels): if os.path.exists(self.image_path): # split raster to separate bands split_raster(self.image, dst_path, channels) # extract meta information and save meta_fc = get_meta(self.image) meta_fc.save(os.path.join(dst_path, 'meta.geojson')) else: raise FileExistsError('Load GBDX Image before transform')
def _load_image(self, image: gbdxtools.CatalogImage, path: str, aoi: aoi.AreaOfInterest = None, **kwargs): try: if aoi is not None: image = image.aoi(bbox=aoi.bbox) image.geotiff(path=path, **kwargs) except Exception as e: if errors.ImageExpiredError.catch(e): raise errors.ImageExpiredError(str(e)) else: raise e
def fetch(self, key, aoi, pansharpen=False, acomp=False, dra=False, **kwargs): if self.order(): # create tempfile for AOI with tempfile.NamedTemporaryFile(suffix='.geojson', mode='w', delete=False) as f: aoiname = f.name aoistr = json.dumps(aoi) f.write(aoistr) geovec = gippy.GeoVector(aoiname) ext = geovec.extent() bbox = [ext.x0(), ext.y0(), ext.x1(), ext.y1()] # defaults spec = '' pansharpen = False acomp = False dra = False nodata = 0 if self['eo:platform'] in ['GEOEYE01', 'QUICKBIRD02' ] else -1e10 opts = COG # set options if key == 'rgb': spec = 'rgb' nodata = 0 #opts = JPEG_COG elif key == 'rgb-pan': pansharpen = True spec = 'rgb' nodata = 0 elif key == 'visual': pansharpen = True dra = True nodata = 0 #opts = JPEG_COG elif key == 'analytic': acomp = True fout = os.path.join(self.get_path(), self.get_filename(suffix='_%s' % key)) + '.tif' with TemporaryDirectory() as temp_dir: try: if not os.path.exists(fout): logger.info('Fetching %s: %s' % (key, fout)) # TODO - allow for other projections img = CatalogImage( self['id'], pansharpen=pansharpen, acomp=acomp, dra=dra, bbox=bbox) #, proj=utm_epsg(scenes.center())) tmp_fout1 = os.path.join( temp_dir, '%s_%s_1.tif' % (self['id'], key)) tmp_fout2 = os.path.join( temp_dir, '%s_%s_2.tif' % (self['id'], key)) tif = img.geotiff(path=tmp_fout1, proj='EPSG:4326', spec=spec) # clip and save geoimg = gippy.GeoImage(tif, True) # workaround for gbdxtools scaling if key in ['rgb', 'visual']: geoimg = geoimg.autoscale(1, 255).save(tmp_fout2) geoimg.set_nodata(0) # this clips the image to the AOI res = geoimg.resolution() imgout = alg.cookie_cutter([geoimg], fout, geovec[0], xres=res.x(), yres=res.y(), proj=geoimg.srs(), options=opts) imgout.add_overviews([2, 4, 8, 16], resampler='average') imgout = None except Exception as e: logger.warning('Error fetching: %s' % str(e)) #logger.warning('Traceback: %s', traceback.format_exc()) os.remove(aoiname) return fout
def downloadImage(self, cat_id, outFile, output="IMAGE", boundingBox=None, curWKT=None, imgChipSize=1000, band_type="MS", panSharpen=True, acomp=True, getOutSize=False, specificTiles=[[], []]): ''' Uses the CatalogImage object to download and write data http://gbdxtools.readthedocs.io/en/latest/api_reference.html#catalogimage Args: cat_id [(string) - CatalogID for Digital Globe that is present in IDAHO outFile (string) - path to output image output (string, optional: IMAGE, INDICES, NDSV) - what kind of image to return. 'IMAGE' is the raw imagery, 'INDICES' returns a stacked NDVI, NDWI and 'NDSV' return the Normalized Differece Spectral Vector boundingBox (list of bottom, left, top, right, optional) - bounding box to subset catalog image with specificImages (list of list of integers, optional) - specific columns (first list) and rows (second list) to create - used mostly for re-running missing, crashed, or broken results ''' img = CatalogImage(cat_id, pansharpen=panSharpen, band_type=band_type, acomp=acomp) sensor = img.metadata['image']['sensorPlatformName'] if boundingBox: img = img.aoi(bbox=boundingBox) if curWKT: #Intersect the bounds of the curImg and the curWKT b = img.bounds curImageBounds = [[[b[0], b[1]], [b[0], b[3]], [b[2], b[3]], [b[2], b[1]], [b[0], b[1]]]] inPoly = geojson.Polygon(curImageBounds) imgBounds = shape(inPoly) if imgBounds.intersects(curWKT): img = img.aoi(wkt=str(imgBounds.intersection(curWKT))) else: raise ValueError( "Provided KML does not intersect image bounds") if getOutSize: #If this flag is set, return the size of the total image instead return img.shape #If the output image is going to be large, then write the output as tiled results if img.shape[1] > imgChipSize or img.shape[2] > imgChipSize: #Create output directory based on file name outFolder = outFile.replace(".tif", "") try: os.mkdir(outFolder) except: pass rowSteps = range(0, img.shape[1], imgChipSize) rowSteps.append(img.shape[1]) rowIndex = range(0, len(rowSteps) - 1, 1) if len(specificTiles[1]) > 0: rowIndex = specificTiles[1] colSteps = range(0, img.shape[2], imgChipSize) colSteps.append(img.shape[2]) colIndex = range(0, len(colSteps) - 1, 1) if len(specificTiles[0]) > 0: colIndex = specificTiles[0] for rIdx in rowIndex: for cIdx in colIndex: logging.info( "Downloading row %s of %s and column %s of %s" % (rIdx, len(rowSteps), cIdx, len(colSteps))) outputChip = os.path.join(outFolder, "C%s_R%s.tif" % (cIdx, rIdx)) curChip = img[0:img.shape[0], rowSteps[rIdx]:rowSteps[rIdx + 1], colSteps[cIdx]:colSteps[cIdx + 1]] if not os.path.exists(outputChip): if output == "IMAGE": #curChip.geotiff(path=outputChip) out_meta = { "dtype": curChip.dtype, "compress": 'lzw', "driver": "GTiff", "count": curChip.shape[0], "height": curChip.shape[1], "width": curChip.shape[2], "transform": curChip.affine, #Affine(img.metadata['georef']['scaleX'], 0.0, img.metadata['georef']['translateX'],0.0, img.metadata['georef']['scaleY'], img.metadata['georef']['translateY']), "crs": curChip.proj } with rasterio.open(outputChip, "w", **out_meta) as dest: dest.write(curChip) if output == "INDICES": outImage = self.calculateIndices( curChip, sensor, outputChip) if output == 'NDSV': outImage = self.calculateNDSV( curChip, sensor, outputChip) else: #img.geotiff(path=outFile) if output == "IMAGE": img.geotiff(path=outFile) if output == "INDICES": outImage = self.calculateIndices(img, sensor, outFile) if output == 'NDSV': outImage = self.calculateNDSV(img, sensor, outFile) return 1