def test_cat_image_aoi(self): _id = '104001002838EC00' img = CatalogImage(_id) aoi = img.aoi(bbox=[ -85.81455230712892, 10.416235163695223, -85.77163696289064, 10.457089934231618 ]) assert aoi.shape == (8, 3037, 3189)
def test_image_fetch(self): wv2 = CatalogImage('1030010076B8F500') aoi = wv2.aoi(bbox=[-104.98815365500539, 39.71459029774345, -104.98317715482573, 39.71956679792311]) aoi = apply_mock(aoi) arr = aoi.read() self.assertEquals(arr.shape, aoi.shape) rgb = aoi.rgb() self.assertEquals(rgb.shape, (256,256,3)) ndvi = aoi.ndvi() self.assertEquals(ndvi.shape, (256,256))
def test_image_fetch(self): wv2 = CatalogImage('1030010076B8F500') aoi = wv2.aoi(bbox=[ -104.98815365500539, 39.71459029774345, -104.98317715482573, 39.71956679792311 ]) aoi = apply_mock(aoi) arr = aoi.read() self.assertEquals(arr.shape, aoi.shape) rgb = aoi.rgb() self.assertEquals(rgb.shape, (256, 256, 3)) ndvi = aoi.ndvi() self.assertEquals(ndvi.shape, (256, 256))
def _load_image(self, image: gbdxtools.CatalogImage, path: str, aoi: aoi.AreaOfInterest = None, **kwargs): try: if aoi is not None: image = image.aoi(bbox=aoi.bbox) image.geotiff(path=path, **kwargs) except Exception as e: if errors.ImageExpiredError.catch(e): raise errors.ImageExpiredError(str(e)) else: raise e
def test_cat_image_aoi(self): img = CatalogImage(WV02_CATID) self.assertEqual(img.cat_id, WV02_CATID) aoi = img.aoi(bbox=WV02_BBOX) self.assertEqual(aoi.shape, (8, 2323, 2322))
def downloadImage(self, cat_id, outFile, output="IMAGE", boundingBox=None, curWKT=None, imgChipSize=1000, band_type="MS", panSharpen=True, acomp=True, getOutSize=False, specificTiles=[[], []]): ''' Uses the CatalogImage object to download and write data http://gbdxtools.readthedocs.io/en/latest/api_reference.html#catalogimage Args: cat_id [(string) - CatalogID for Digital Globe that is present in IDAHO outFile (string) - path to output image output (string, optional: IMAGE, INDICES, NDSV) - what kind of image to return. 'IMAGE' is the raw imagery, 'INDICES' returns a stacked NDVI, NDWI and 'NDSV' return the Normalized Differece Spectral Vector boundingBox (list of bottom, left, top, right, optional) - bounding box to subset catalog image with specificImages (list of list of integers, optional) - specific columns (first list) and rows (second list) to create - used mostly for re-running missing, crashed, or broken results ''' img = CatalogImage(cat_id, pansharpen=panSharpen, band_type=band_type, acomp=acomp) sensor = img.metadata['image']['sensorPlatformName'] if boundingBox: img = img.aoi(bbox=boundingBox) if curWKT: #Intersect the bounds of the curImg and the curWKT b = img.bounds curImageBounds = [[[b[0], b[1]], [b[0], b[3]], [b[2], b[3]], [b[2], b[1]], [b[0], b[1]]]] inPoly = geojson.Polygon(curImageBounds) imgBounds = shape(inPoly) if imgBounds.intersects(curWKT): img = img.aoi(wkt=str(imgBounds.intersection(curWKT))) else: raise ValueError( "Provided KML does not intersect image bounds") if getOutSize: #If this flag is set, return the size of the total image instead return img.shape #If the output image is going to be large, then write the output as tiled results if img.shape[1] > imgChipSize or img.shape[2] > imgChipSize: #Create output directory based on file name outFolder = outFile.replace(".tif", "") try: os.mkdir(outFolder) except: pass rowSteps = range(0, img.shape[1], imgChipSize) rowSteps.append(img.shape[1]) rowIndex = range(0, len(rowSteps) - 1, 1) if len(specificTiles[1]) > 0: rowIndex = specificTiles[1] colSteps = range(0, img.shape[2], imgChipSize) colSteps.append(img.shape[2]) colIndex = range(0, len(colSteps) - 1, 1) if len(specificTiles[0]) > 0: colIndex = specificTiles[0] for rIdx in rowIndex: for cIdx in colIndex: logging.info( "Downloading row %s of %s and column %s of %s" % (rIdx, len(rowSteps), cIdx, len(colSteps))) outputChip = os.path.join(outFolder, "C%s_R%s.tif" % (cIdx, rIdx)) curChip = img[0:img.shape[0], rowSteps[rIdx]:rowSteps[rIdx + 1], colSteps[cIdx]:colSteps[cIdx + 1]] if not os.path.exists(outputChip): if output == "IMAGE": #curChip.geotiff(path=outputChip) out_meta = { "dtype": curChip.dtype, "compress": 'lzw', "driver": "GTiff", "count": curChip.shape[0], "height": curChip.shape[1], "width": curChip.shape[2], "transform": curChip.affine, #Affine(img.metadata['georef']['scaleX'], 0.0, img.metadata['georef']['translateX'],0.0, img.metadata['georef']['scaleY'], img.metadata['georef']['translateY']), "crs": curChip.proj } with rasterio.open(outputChip, "w", **out_meta) as dest: dest.write(curChip) if output == "INDICES": outImage = self.calculateIndices( curChip, sensor, outputChip) if output == 'NDSV': outImage = self.calculateNDSV( curChip, sensor, outputChip) else: #img.geotiff(path=outFile) if output == "IMAGE": img.geotiff(path=outFile) if output == "INDICES": outImage = self.calculateIndices(img, sensor, outFile) if output == 'NDSV': outImage = self.calculateNDSV(img, sensor, outFile) return 1
def attachRGB_NDSV_Bands(self, catID): ''' Attach mean band values and resize to another tiled INPUT rgbI [rasterio object ... might also be a dask image] - original image to summarized inputTile [string] - path to stacked image of output resolution and size outImage [string] - new output image to create EXAMPLE import rasterio, scipy from gbdxtools import CatalogImage from gbdxtools import Interface import dask.array as da gbdx = Interface() inputTile = r"D:\Kinshasa\Shohei_Poverty\spfeas\103001007FA97400_stacked\058558367010_01_assembly_clip__BD1_BK8_SC8-16-32__ST1-006__TL000002.tif" outImage = r"D:\Kinshasa\Shohei_Poverty\spfeas\058558367010_01_assembly_clip__BD1_BK8_SC8-16-32__ST1-006__TL000002.tif" ''' allTiffs = os.listdir(self.stackedFolder) rgbI = CatalogImage(catID) cnt = 0 tilesToDelete = [] for inputTile in allTiffs: cnt += 1 logging.info("Adding RBG and NDSV for %s of %s" % (cnt, len(allTiffs))) outImage = os.path.join(self.stackedFolderRGB, os.path.basename(inputTile)) fullInputTilePath = os.path.join(self.stackedFolder, inputTile) spI = rasterio.open(fullInputTilePath) meta = spI.meta.copy() tBandCount = spI.count #Spfeas band count tBandCount = tBandCount + rgbI.shape[0] #Add bands from RGB tBandCount = tBandCount + int(rgbI.shape[0] * (rgbI.shape[0] - 1) / 2) #Add bands from NDSV meta.update(count = tBandCount) #Open satellite imagery try: data = rgbI.aoi(bbox=spI.bounds) allRes = [] resTitles = [] shrunkenData = [] bandNames = rgbI.metadata['image']['bandAliases'] bIdx = 0 for bIdx in range(0, rgbI.shape[0]): cData = data[bIdx,:,:] newData = scipy.ndimage.zoom(cData, (1/float(cData.shape[0] / spI.shape[0])), order=2) newData = newData[:spI.shape[0],:spI.shape[1]] shrunkenData.append(newData) allRes.append(newData) resTitles.append("RAW_%s" % bandNames[bIdx]) for b1 in range(0, rgbI.shape[0]): for b2 in range(0, rgbI.shape[0]): if b1 < b2: #cMetric = (data[b1,:,:] - data[b2,:,:]) / (data[b1,:,:] + data[b2,:,:]) cMetric = (shrunkenData[b1] - shrunkenData[b2]) / (shrunkenData[b1] + shrunkenData[b2]) allRes.append(cMetric) resTitles.append("NDSV_%s_%s" % (bandNames[b1], bandNames[b2])) #Read in spfeas results and stack results spfeas = spI.read() #Write with rasterio with rasterio.open(outImage, 'w', **meta) as dest1: totalBandCount = 1 #Write spfeas bands for bIdx in range(0, spI.count): dest1.write(spfeas[bIdx,:,:], totalBandCount) totalBandCount += 1 #Write RGB and NDSV results for bIdx in range(0, len(allRes)):#outData.shape[0]): #dest1.write(outData[bIdx,:,:], totalBandCount) dest1.write(allRes[bIdx], totalBandCount) totalBandCount += 1 except Exception as e: logging.warning(str(e)) tilesToDelete.append(fullInputTilePath) return(tilesToDelete)
def test_cat_image_aoi(self): _id = '104001002838EC00' img = CatalogImage(_id) assert img.cat_id == _id aoi = img.aoi(bbox=[-85.81455230712892,10.416235163695223,-85.77163696289064,10.457089934231618]) assert aoi.shape == (8, 3037, 3190)