def test_get_s3tmp_cred(self): gi = Interface() s3creds = gi.get_s3tmp_cred() self.assertTrue("bucket" in s3creds.keys() ) self.assertTrue("prefix" in s3creds.keys() ) self.assertTrue("S3_secret_key" in s3creds.keys() ) self.assertTrue("S3_access_key" in s3creds.keys() ) self.assertTrue("S3_session_token" in s3creds.keys() )
# Run atmospheric compensation on Landsat8 data from gbdxtools import Interface gbdx = Interface() acomp = gbdx.Task('AComp_New', data='s3://landsat-pds/L8/033/032/LC80330322015035LGN00') workflow = gbdx.Workflow([acomp]) workflow.savedata(acomp.outputs.data, location='acomp_output_folder') workflow.execute()
def setUpClass(cls): mock_gbdx_session = get_mock_gbdx_session(token="dummytoken") cls.gbdx = Interface(gbdx_connection=mock_gbdx_session)
# First we'll run atmospheric compensation on Landsat8 data from gbdxtools import Interface gbdx = Interface() acomp = gbdx.Task('AComp', data='s3://landsat-pds/L8/033/032/LC80330322015035LGN00') # Now we'll save the result to our own S3 bucket. First we need to generate temporary AWS credentials # (this assumes you have an AWS account and your IAM credentials are appropriately accessible via boto) import boto3 client = boto3.client('sts') response = client.get_session_token(DurationSeconds=86400) access_key_id = response['Credentials']['AccessKeyId'] secret_key = response['Credentials']['SecretAccessKey'] session_token = response['Credentials']['SessionToken'] # Save the data to your s3 bucket using the SaveToS3 task: savetask = gbdx.Task('SaveToS3') savetask.inputs.data = acomp.outputs.data.value savetask.inputs.destination = "s3://your-bucket/your-path/" savetask.inputs.access_key_id = access_key_id savetask.inputs.secret_key = secret_key savetask.inputs.session_token = session_token workflow = gbdx.Workflow([acomp, savetask]) workflow.execute()
from gbdxtools import Interface gbdx = Interface() QB = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/QB" WV1 = "s3://receiving-dgcs-tdgplatform-com/054876516120_01_003" WV2 = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/WV2" WV3 = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/WV3" GE = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/GE/055217125010_01" # Capture AOP task outputs #orthoed_output = aoptask.get_output('data') task = gbdx.Task("ENVI_ImageThresholdToROI") task.inputs.input_raster=WV1 task.inputs.file_types = "tif" task.inputs.roi_name = "[\"Water\"]" task.inputs.roi_color = "[[0,255,0]" task.inputs.threshold = "[[138]" task.inputs.output_roi_uri_filename = "roi.xml" workflow = gbdx.Workflow([ task ]) ''' workflow.savedata( task.outputs.output_roi_uri, location='Benchmark/ImgToROI/QB' ) ''' workflow.savedata( task.outputs.output_roi_uri, location='Benchmark/ImgToROI/WV1'
def setUpClass(cls): mock_gbdx_session = get_mock_gbdx_session(token='dummytoken') cls.gbdx = Interface(gbdx_connection=mock_gbdx_session) cls._temp_path = tempfile.mkdtemp() print("Created: {}".format(cls._temp_path))
from gbdxtools import Interface gbdx = Interface( ) # Instantiate gbdxtools, which logs you in using your .config file #### SEARCHING THE CATALOG WITH FILTERS wkt_string = "POLYGON((-122.43434482199342028 47.69012820633496119,-122.24650297391180231 47.6831711008504584,-122.23954586842728531 47.49532925276882622,-122.41347350553991191 47.49532925276882622,-122.43434482199342028 47.69012820633496119))" filters = filters = [ "(sensorPlatformName = 'WORLDVIEW03' OR sensorPlatformName ='WORLDVIEW02')", "cloudCover < 10", "offNadirAngle < 10" ] results = gbdx.catalog.search(searchAreaWkt=wkt_string, startDate="2004-01-01T00:00:00.000Z", endDate="2016-01-01T00:00:00.000Z", filters=filters) catalog_ids = [r['identifier'] for r in results ] #Save catIDs in a list to use to find s3 locations print catalog_ids #### RUNNING WORKFLOWS workflowList = [ ] # We will keep track of our workflows by putting workflow IDs in a list as they are created for catID in catalog_ids: # Loop through all of the catIDs we found earlier to find their s3 location. This will be the input for the first task. s3path = gbdx.catalog.get_data_location(catalog_id=catID) print s3path
def setUpClass(cls): # create mock session, replace dummytoken with real token to create cassette mock_gbdx_session = get_mock_gbdx_session(token="dummytoken") cls.gbdx = Interface(gbdx_connection=mock_gbdx_session) cls._temp_path = tempfile.mkdtemp() print("Created: {}".format(cls._temp_path))
# coding: utf-8 # # GBDX Tools Training Notebook # ## First initialize the Interface object that handles authentication # In[ ]: from gbdxtools import Interface # In[ ]: gbdx = Interface() # ## Searching for Data # ### Searching by Geographic Area # In[ ]: wkt_string = "POLYGON((-113.88427734375 40.36642741921034,-110.28076171875 40.36642741921034,-110.28076171875 37.565262680889965,-113.88427734375 37.565262680889965,-113.88427734375 40.36642741921034))" results = gbdx.catalog.search(searchAreaWkt=wkt_string) # In[ ]: results[0:10]
from gbdxtools import Interface gbdx = Interface( ) # Instantiate gbdxtools, which logs you in using your .config file s3path1 = 's3://receiving-dgcs-tdgplatform-com/055364007010_01_003' aoptask1 = gbdx.Task("AOP_Strip_Processor", data=s3path1, enable_acomp=True, enable_pansharpen=False, enable_dra=False, bands='MS') s3task1 = gbdx.Task("StageDataToS3") s3task1.inputs.data = aoptask1.outputs.data.value s3task1.inputs.destination = "s3://change_detection/test_job/Steps/acomp_fastortho_step-pre_image_task/Output" s3path2 = 's3://receiving-dgcs-tdgplatform-com/055364005010_01_003' aoptask2 = gbdx.Task("AOP_Strip_Processor", data=s3path2, enable_acomp=True, enable_pansharpen=False, enable_dra=False, bands='MS') s3task2 = gbdx.Task("StageDataToS3") s3task2.inputs.data = aoptask1.outputs.data.value s3task2.inputs.destination = "s3://change_detection/test_job/Steps/acomp_fastortho_step-post_image_task/Output" cdtask = gbdx.Task("change_detection") cdtask.inputs.pre_image = aoptask1.outputs.data.value cdtask.inputs.post_image = aoptask2.outputs.data.value
import geopandas as gpd from matplotlib import pyplot as plt from scipy.misc import bytescale import shapely from shapely.wkt import loads import pandas as pd from shapely.geometry import box from fiona.crs import from_epsg import shutil import requests import rasterio as rio ## Provide a sample query for import sample_query = 'item_type:WV03_VNIR OR item_type:WV02 OR item_type:WV04 OR item_type:ESAProduct' gbdx = Interface() def get_preview_image_arr(catid): ''' gbdx: interface object for GBDX access catid: catolog ID for image asset returns: numpy array for preview image ''' my_url = gbdx.catalog.get(catid)['properties']['browseURL'] response = requests.get(my_url, stream=True) temp = 'temp.png' with open(temp, 'wb') as file: shutil.copyfileobj(response.raw, file) del response
def test_init(): gi = Interface(gbdx_connection = mock_gbdx_session) assert isinstance(gi, Interface)
def preview(image, **kwargs): ''' Show a slippy map preview of the image. Requires iPython. Args: image (image): image object to display zoom (int): zoom level to intialize the map, default is 16 center (list): center coordinates to initialize the map, defaults to center of image bands (list): bands of image to display, defaults to the image's default RGB bands ''' try: from IPython.display import Javascript, HTML, display from gbdxtools.rda.interface import RDA from gbdxtools import Interface gbdx = Interface() except: print("IPython is required to produce maps.") return zoom = kwargs.get("zoom", 16) bands = kwargs.get("bands") if bands is None: bands = image._rgb_bands wgs84_bounds = kwargs.get( "bounds", list(loads(image.metadata["image"]["imageBoundsWGS84"]).bounds)) center = kwargs.get("center", list(shape(image).centroid.bounds[0:2])) if image.proj != 'EPSG:4326': code = image.proj.split(':')[1] conn = gbdx.gbdx_connection proj_info = conn.get( 'https://ughlicoordinates.geobigdata.io/ughli/v1/projinfo/{}'. format(code)).json() tfm = partial(pyproj.transform, pyproj.Proj(init='EPSG:4326'), pyproj.Proj(init=image.proj)) bounds = list(ops.transform(tfm, box(*wgs84_bounds)).bounds) else: proj_info = {} bounds = wgs84_bounds rda = RDA() dra = rda.HistogramDRA(image) image = dra.aoi(bbox=image.bounds) graph_id = image.rda_id node_id = image.rda.graph()['nodes'][0]['id'] map_id = "map_{}".format(str(int(time.time()))) scales = ','.join(['1'] * len(bands)) offsets = ','.join(['0'] * len(bands)) display( HTML( Template(''' <div id="$map_id"/> <link href='https://openlayers.org/en/v4.6.4/css/ol.css' rel='stylesheet' /> <script src="https://cdn.polyfill.io/v2/polyfill.min.js?features=requestAnimationFrame,Element.prototype.classList,URL"></script> <style>body{margin:0;padding:0;}#$map_id{position:relative;top:0;bottom:0;width:100%;height:400px;}</style> <style></style> ''').substitute({"map_id": map_id}))) js = Template(""" require.config({ paths: { oljs: 'https://cdnjs.cloudflare.com/ajax/libs/openlayers/4.6.4/ol', proj4: 'https://cdnjs.cloudflare.com/ajax/libs/proj4js/2.4.4/proj4' } }); require(['oljs', 'proj4'], function(oljs, proj4) { oljs.proj.setProj4(proj4) var md = $md; var georef = $georef; var graphId = '$graphId'; var nodeId = '$nodeId'; var extents = $bounds; var x1 = md.minTileX * md.tileXSize; var y1 = ((md.minTileY + md.numYTiles) * md.tileYSize + md.tileYSize); var x2 = ((md.minTileX + md.numXTiles) * md.tileXSize + md.tileXSize); var y2 = md.minTileY * md.tileYSize; var tileLayerResolutions = [georef.scaleX]; var url = '$url' + '/tile/'; url += graphId + '/' + nodeId; url += "/{x}/{y}.png?token=$token&display_bands=$bands&display_scales=$scales&display_offsets=$offsets"; var proj = '$proj'; var projInfo = $projInfo; if ( proj !== 'EPSG:4326' ) { var proj4def = projInfo["proj4"]; proj4.defs(proj, proj4def); var area = projInfo["area_of_use"]; var bbox = [area["area_west_bound_lon"], area["area_south_bound_lat"], area["area_east_bound_lon"], area["area_north_bound_lat"]] var projection = oljs.proj.get(proj); var fromLonLat = oljs.proj.getTransform('EPSG:4326', projection); var extent = oljs.extent.applyTransform( [bbox[0], bbox[1], bbox[2], bbox[3]], fromLonLat); projection.setExtent(extent); } else { var projection = oljs.proj.get(proj); } var rda = new oljs.layer.Tile({ title: 'RDA', opacity: 1, extent: extents, source: new oljs.source.TileImage({ crossOrigin: null, projection: projection, extent: extents, tileGrid: new oljs.tilegrid.TileGrid({ extent: extents, origin: [extents[0], extents[3]], resolutions: tileLayerResolutions, tileSize: [md.tileXSize, md.tileYSize], }), tileUrlFunction: function (coordinate) { if (coordinate === null) return undefined; const x = coordinate[1] + md.minTileX; const y = -(coordinate[2] + 1 - md.minTileY); if (x < md.minTileX || x > md.maxTileX) return undefined; if (y < md.minTileY || y > md.maxTileY) return undefined; return url.replace('{x}', x).replace('{y}', y); } }) }); var map = new oljs.Map({ layers: [ rda ], target: '$map_id', view: new oljs.View({ projection: projection, center: $center, zoom: $zoom }) }); }); """).substitute({ "map_id": map_id, "proj": image.proj, "projInfo": json.dumps(proj_info), "graphId": graph_id, "bounds": bounds, "bands": ",".join(map(str, bands)), "nodeId": node_id, "md": json.dumps(image.metadata["image"]), "georef": json.dumps(image.metadata["georef"]), "center": center, "zoom": zoom, "token": gbdx.gbdx_connection.access_token, "scales": scales, "offsets": offsets, "url": VIRTUAL_RDA_URL }) display(Javascript(js))
from gbdxtools import Interface gbdx = Interface() QB = "s3://receiving-dgcs-tdgplatform-com/054876960040_01_003" WV1 = "s3://receiving-dgcs-tdgplatform-com/054876516120_01_003" WV2 = "s3://receiving-dgcs-tdgplatform-com/054876618060_01_003" WV3 = "s3://receiving-dgcs-tdgplatform-com/055605759010_01_003" GE = "s3://receiving-dgcs-tdgplatform-com/055217125010_01_003" #aoptask = gbdx.Task("AOP_Strip_Processor", data=GE, enable_acomp=True, enable_pansharpen=False, enable_dra=False, bands='MS') aoptask2 = gbdx.Task('AOP_Strip_Processor', data=WV1, bands='PAN', enable_acomp=False, enable_pansharpen=False, enable_dra=False) # creates acomp'd multispectral image # Capture AOP task outputs #orthoed_output = aoptask.get_output('data') aop2envi = gbdx.Task("Build_ENVI_HDR") aop2envi.inputs.image = aoptask2.outputs.data.value #hdr file used to compute spectral index #envi_ndvi = gbdx.Task("ENVI_SpectralIndex") #envi_ndvi.inputs.input_raster = aop2envi.outputs.output_data.value #envi_ndvi.inputs.file_types = "hdr" #envi_ndvi.inputs.index = "Normalized Difference Vegetation Index" #spectral index file used in color slice classification task envi_color = gbdx.Task('ENVI_ColorSliceClassification') envi_color.inputs.input_raster = aoptask2.outputs.data.value
def setUpClass(cls): # create mock session, replace dummytoken with real token to create cassette mock_gbdx_session = get_mock_gbdx_session(token="dummytoken") cls.gbdx = Interface(gbdx_connection=mock_gbdx_session)
def preview(image, **kwargs): try: from IPython.display import Javascript, HTML, display from gbdxtools import Interface gbdx = Interface() except: print("IPython is required to produce maps.") return zoom = kwargs.get("zoom", 16) bands = kwargs.get("bands", image._rgb_bands) wgs84_bounds = kwargs.get( "bounds", list(loads(image.ipe_metadata["image"]["imageBoundsWGS84"]).bounds)) center = kwargs.get("center", list(shape(image).centroid.bounds[0:2])) graph_id = image.ipe_id node_id = image.ipe.graph()['nodes'][0]['id'] # fetch a tile in order to calc stats and do a simple stretch y = image.shape[1] / 2 x = image.shape[2] / 2 aoi = image[:, y:y + 256, x:x + 256].read(quiet=True) means, stds = aoi.mean(axis=(1, 2)), aoi.std(axis=(1, 2)) scales = (255.0 / (4.0 * stds)) offsets = map( list(((means - (2.0 * stds)) * scales * -1.0)).__getitem__, bands) scales = map(list(scales).__getitem__, bands) if image.proj != 'EPSG:4326': code = image.proj.split(':')[1] conn = gbdx.gbdx_connection proj_info = conn.get( 'https://ughlicoordinates.geobigdata.io/ughli/v1/projinfo/{}'. format(code)).json() tfm = partial(pyproj.transform, pyproj.Proj(init='EPSG:4326'), pyproj.Proj(init=image.proj)) bounds = list(ops.transform(tfm, box(*wgs84_bounds)).bounds) else: proj_info = {} bounds = wgs84_bounds map_id = "map_{}".format(str(int(time.time()))) display( HTML( Template(''' <div id="$map_id"/> <link href='https://openlayers.org/en/v4.6.4/css/ol.css' rel='stylesheet' /> <script src="https://cdn.polyfill.io/v2/polyfill.min.js?features=requestAnimationFrame,Element.prototype.classList,URL"></script> <style>body{margin:0;padding:0;}#$map_id{position:relative;top:0;bottom:0;width:100%;height:400px;}</style> <style></style> ''').substitute({"map_id": map_id}))) js = Template(""" require.config({ paths: { ol: 'https://openlayers.org/en/v4.6.4/build/ol', proj4: 'https://cdnjs.cloudflare.com/ajax/libs/proj4js/2.4.4/proj4' } }); require(['ol', 'proj4'], function(ol, proj4) { ol.proj.setProj4(proj4); var md = $md; var georef = $georef; var graphId = '$graphId'; var nodeId = '$nodeId'; var extents = $bounds; var x1 = md.minTileX * md.tileXSize; var y1 = ((md.minTileY + md.numYTiles) * md.tileYSize + md.tileYSize); var x2 = ((md.minTileX + md.numXTiles) * md.tileXSize + md.tileXSize); var y2 = md.minTileY * md.tileYSize; var tileLayerResolutions = [georef.scaleX]; var url = '$url' + '/tile/'; url += graphId + '/' + nodeId; url += "/{x}/{y}.png?token=$token&bands=$bands&scales=$scales&offsets=$offsets"; var proj = '$proj'; var projInfo = $projInfo; if ( proj !== 'EPSG:4326' ) { var proj4def = projInfo["proj4"]; proj4.defs(proj, proj4def); var area = projInfo["area_of_use"]; var bbox = [area["area_west_bound_lon"], area["area_south_bound_lat"], area["area_east_bound_lon"], area["area_north_bound_lat"]] var projection = ol.proj.get(proj); var fromLonLat = ol.proj.getTransform('EPSG:4326', projection); var extent = ol.extent.applyTransform( [bbox[0], bbox[1], bbox[2], bbox[3]], fromLonLat); projection.setExtent(extent); } else { var projection = ol.proj.get(proj); } var rda = new ol.layer.Tile({ title: 'RDA', opacity: 1, extent: extents, source: new ol.source.TileImage({ crossOrigin: null, projection: projection, extent: extents, tileGrid: new ol.tilegrid.TileGrid({ extent: extents, origin: [extents[0], extents[3]], resolutions: tileLayerResolutions, tileSize: [md.tileXSize, md.tileYSize], }), tileUrlFunction: function (coordinate) { if (coordinate === null) return undefined; const x = coordinate[1] + md.minTileX; const y = -(coordinate[2] + 1 - md.minTileY); if (x < md.minTileX || x > md.maxTileX) return undefined; if (y < md.minTileY || y > md.maxTileY) return undefined; return url.replace('{x}', x).replace('{y}', y); } }) }); var map = new ol.Map({ layers: [ rda ], target: '$map_id', view: new ol.View({ projection: projection, center: $center, zoom: $zoom }) }); }); """).substitute({ "map_id": map_id, "proj": image.proj, "projInfo": json.dumps(proj_info), "graphId": graph_id, "bounds": bounds, "bands": ",".join(map(str, bands)), "nodeId": node_id, "md": json.dumps(image.ipe_metadata["image"]), "georef": json.dumps(image.ipe_metadata["georef"]), "center": center, "zoom": zoom, "token": gbdx.gbdx_connection.access_token, "scales": ",".join(map(str, scales)), "offsets": ",".join(map(str, offsets)), "url": VIRTUAL_IPE_URL }) display(Javascript(js))
''' from gbdxtools import Interface from gbdxtools.workflow import Workflow import vcr from auth_mock import get_mock_gbdx_session # How to use the mock_gbdx_session and vcr to create unit tests: # 1. Add a new test that is dependent upon actually hitting GBDX APIs. # 2. Decorate the test with @vcr appropriately # 3. replace "dummytoken" with a real gbdx token # 4. Run the tests (existing test shouldn't be affected by use of a real token). This will record a "cassette". # 5. replace the real gbdx token with "dummytoken" again # 6. Edit the cassette to remove any possibly sensitive information (s3 creds for example) mock_gbdx_session = get_mock_gbdx_session(token="dummytoken") gbdx = Interface(gbdx_connection=mock_gbdx_session) def test_init(): wf = Workflow(gbdx) assert isinstance(wf, Workflow) assert wf.s3 is not None assert wf.gbdx_connection is not None @vcr.use_cassette('tests/unit/cassettes/test_list_tasks.yaml', filter_headers=['authorization']) def test_list_tasks(): wf = Workflow(gbdx) taskinfo = wf.list_tasks() assert taskinfo is not None
from gbdxtools import Interface gbdx = Interface() QB = "s3://receiving-dgcs-tdgplatform-com/054876960040_01_003" WV1 = "s3://receiving-dgcs-tdgplatform-com/054876516120_01_003" WV2 = "s3://receiving-dgcs-tdgplatform-com/054876618060_01_003" WV3 = "s3://receiving-dgcs-tdgplatform-com/055605759010_01_003" GE = "s3://receiving-dgcs-tdgplatform-com/055217125010_01_003" aoptask1 = gbdx.Task('AOP_Strip_Processor', data=QB, bands='MS', enable_acomp=True, enable_pansharpen=False, enable_dra=False) # creates acomp'd multispectral image #aoptask2 = gbdx.Task('AOP_Strip_Processor', data=WV1, bands='PAN', enable_acomp=False, enable_pansharpen=False, enable_dra=False) # creates acomp'd multispectral image #aoptask3 = gbdx.Task('AOP_Strip_Processor', data=WV2, bands='MS', enable_acomp=True, enable_pansharpen=False, enable_dra=False) # creates acomp'd multispectral image #aoptask4 = gbdx.Task('AOP_Strip_Processor', data=WV3, bands='MS', enable_acomp=True, enable_pansharpen=False, enable_dra=False) # creates acomp'd multispectral image #aoptask5 = gbdx.Task('AOP_Strip_Processor', data=GE, bands='MS', enable_acomp=True, enable_pansharpen=False, enable_dra=False) # creates acomp'd multispectral image workflow = gbdx.Workflow([aoptask1]) workflow.savedata( aoptask1.outputs.data, location='Benchmark/QB' ) ''' workflow.savedata( aoptask2.outputs.data, location='Benchmark/WV1' ) workflow.savedata( aoptask3.outputs.data,
from gbdxtools import Interface import json gi = Interface() #catid = '101001000DB2FB00' #catid = '1020010013C4CF00' catid = '10400100120FEA00' idaho_images = gi.get_idaho_images_by_catid(catid) description = gi.describe_idaho_images(idaho_images) print json.dumps(description, indent=4, sort_keys=True) gi.create_idaho_leaflet_viewer(idaho_images, 'outputmap.html')
def setUpClass(cls): cls.ipe = Ipe() mock_gbdx_session = get_mock_gbdx_session(token='dummytoken') cls.gbdx = Interface(gbdx_connection=mock_gbdx_session)