Пример #1
0
def aopImage(catalog_id,
             s3_location,
             local_dir=None,
             panPixelSize=0.5,
             clip=None):
    gbdx = Interface()
    isWV1 = catalog_id.startswith('102')
    if (isWV1):
        print(
            "WARNING is a WV1 image and MS or Pansharpened image can't be ordered"
        )
    isSWIR = catalog_id.startswith('104A')
    if (isSWIR):
        print("ERROR SWIR image can't be orthorectified")
        return
    order_id = order(gbdx, catalog_id)
    data = gbdx.ordering.status(order_id)[0]['location']
    gdalwarpOptions = "  -r near --config GDAL_CACHEMAX 4000 -wm 4000 -co TILED=TRUE -co COMPRESS=PACKBITS -co BIGTIFF=YES "
    aoptask = gbdx.Task("AOP_Strip_Processor",
                        data=data,
                        enable_acomp=True,
                        enable_pansharpen=False,
                        enable_dra=False,
                        ortho_epsg='UTM',
                        bands='MS',
                        ortho_pixel_size=str(4 * panPixelSize),
                        ortho_dem_specifier=dem)
    if (clip is not None):
        clipTask = gbdx.Task("gdalcrop",
                             image=aoptask.outputs.data,
                             crop=clip + ' -tr ' + str(4 * panPixelSize) +
                             ' ' + str(4 * panPixelSize) + gdalwarpOptions,
                             ship='false')
        workflow = gbdx.Workflow([aoptask, clipTask])
        workflow.savedata(clipTask.outputs.cropped,
                          location=s3_location + 'MS')
    else:
        workflow = gbdx.Workflow([aoptask])
        workflow.savedata(aoptask.outputs.data, location=s3_location + 'MS')
    workflow.execute()
    print('AOP is processing image ' + catalog_id + ' MS workflow id is ' +
          workflow.id)
    waitForWorkflow(workflow)
    print('MS      image ' + catalog_id + ' ' + str(workflow.status) + ' at ' +
          str(datetime.now()))
    if local_dir == '':
        return
    if (local_dir is not None):
        print('Downloading AOP images')
        if not os.path.exists(local_dir):
            os.makedirs(local_dir)
        gbdx.s3.download(location=s3_location, local_dir=local_dir)
        print('Image downloaded' + catalog_id + ' at ' + str(datetime.now()))

    return
Пример #2
0
def changePrep(catalog_id, s3_location, clip=None):
    size = '2'
    gbdx = Interface()
    isWV1 = catalog_id.startswith('102')
    if (isWV1 and (ms or pansharpen or pansharpenship)):
        print("ERROR Image is a WV1 image.")
        return
    isSWIR = catalog_id.startswith('104A')
    if (isSWIR):
        print("ERROR Image is a WV1 image.")
        return
    order_id = order(gbdx, catalog_id)
    data = gbdx.ordering.status(order_id)[0]['location']
    gdalwarpOptions = "  -r near --config GDAL_CACHEMAX 4000 -wm 4000 -co TILED=TRUE -co COMPRESS=PACKBITS -co BIGTIFF=YES "
    gdalwarpOptions = "  -r near -co TILED=TRUE -co COMPRESS=PACKBITS -co BIGTIFF=YES "
    aoptask = gbdx.Task("AOP_Strip_Processor",
                        data=data,
                        enable_acomp=True,
                        enable_pansharpen=False,
                        enable_dra=False,
                        ortho_epsg='UTM',
                        bands='MS',
                        ortho_pixel_size=size)
    #aoptask = gbdx.Task("AOP_Strip_Processor", data=data, enable_acomp=False, enable_pansharpen=False, enable_dra=False, ortho_epsg='UTM', bands='MS', ortho_pixel_size='16')
    topoTask = gbdx.Task("topo-correction", image=aoptask.outputs.data)
    topoTask.impersonation_allowed = True
    cloudTask = gbdx.Task("CloudPuncher",
                          image=topoTask.outputs.data,
                          maskOnly='false')
    #cloudTask = gbdx.Task("CloudPuncher", image = aoptask.outputs.data, maskOnly = 'false')
    if (clip is not None):
        #####################################################################################################################################################################################
        clipTask = gbdx.Task('gdalcrop',
                             image=cloudTask.outputs.mask,
                             crop=clip + ' -tr ' + size + ' ' + size +
                             gdalwarpOptions,
                             ship='false')
        workflow = gbdx.Workflow([aoptask, topoTask, cloudTask, clipTask])
        workflow.savedata(clipTask.outputs.cropped, location=s3_location)
    else:
        workflow = gbdx.Workflow([aoptask, topoTask, cloudTask])
        workflow.savedata(cloudTask.outputs.mask, location=s3_location)
    workflow.execute()
    print('AOP is processing image ' + catalog_id + ' MS workflow id is ' +
          workflow.id)
    waitForWorkflow(workflow)
    print('MS      image ' + catalog_id + ' ' + str(workflow.status) +
          ' wfl id ' + workflow.id + ' at ' + str(datetime.now()))
    if (not workflow.status["event"] == "succeeded"):
        print("workflow.status", workflow.status,
              workflow.status["event"] == "succeeded")
        return 1
    return 0
Пример #3
0
def runMTCDmosaicPre(id, s3_location):
    gbdx = Interface()
    if (id == -2):
        # do the mosaicing of 1 and 2
        images1 = getS3location(gbdx, s3_location + "/1")
        images2 = getS3location(gbdx, s3_location + "/2")
        tsk = gbdx.Task('mtcdvrt', images1=images1, images2=images2, id="pre")
    elif (id == -3):
        # do the mosaicing of 1, 2 snd 3
        images1 = getS3location(gbdx, s3_location + "/1")
        images2 = getS3location(gbdx, s3_location + "/2")
        images3 = getS3location(gbdx, s3_location + "/3")
        tsk = gbdx.Task('mtcdvrt',
                        images1=images1,
                        images2=images2,
                        images3=images3,
                        id="pre")
    else:
        print("Wrong id in runMTCDmosaicPre", id)
        return 1
    workflow = gbdx.Workflow([tsk])
    workflow.savedata(tsk.outputs.data, location=s3_location + "/image2image/")
    workflow.execute()
    print('MTCD mosaic start', id, images1, workflow.id)
    waitForWorkflow(workflow)
    print('MTCD mosaic done ' + images1 + ' ' + images2 + ' ' + str(id) + ' ' +
          str(workflow.status) + ' at ' + str(datetime.now()))
    if (not workflow.status["event"] == "succeeded"):
        print('MTCD mosaic failed')
        return 1
    return 0
Пример #4
0
def image2image(ref_image_s3_dir,
                image_s3_dir,
                output_s3,
                source_filename=None,
                reference_filename=None,
                clip=None,
                pixelSize=2,
                fileName=None):
    gbdx = Interface()
    full_ref_image_s3_dir = getS3location(gbdx, ref_image_s3_dir)
    full_image_s3_dir = getS3location(gbdx, image_s3_dir)
    task = gbdx.Task("image2image",
                     reference_directory=full_ref_image_s3_dir,
                     source_directory=full_image_s3_dir,
                     source_filename=source_filename,
                     reference_filename=reference_filename)
    task.timeout = 36000
    if (clip is not None):
        clipTask1 = gbdx.Task("gdalcrop",
                              image=task.outputs.out,
                              crop=clip + ' -tr ' + str(pixelSize) + ' ' +
                              str(pixelSize),
                              ship='False',
                              updateName='False',
                              fileName=fileName)
        workflow = gbdx.Workflow([task, clipTask1])
        workflow.savedata(clipTask1.outputs.cropped, location=output_s3)
    else:
        workflow = gbdx.Workflow([task])
        workflow.savedata(task.outputs.out, location=output_s3)

    workflow.execute()

    print('image2Image ' + ref_image_s3_dir + ' ' + image_s3_dir + ' ' +
          output_s3 + ' ' + fileName + ' workflow ' + str(workflow.id) +
          ' started at ' + str(datetime.now()))
    waitForWorkflow(workflow)
    print('image2Image ' + str(workflow.id) + ' ' + ref_image_s3_dir + ' ' +
          image_s3_dir + ' ' + output_s3 + ' ' + str(workflow.status) +
          ' at ' + str(datetime.now()))
    if (not workflow.status["event"] == "succeeded"):
        return 1
    return 0
Пример #5
0
def runMTCDmosaic(id, s3_location):
    gbdx = Interface()
    images = getS3location(gbdx, s3_location + "/" + str(id))
    tsk = gbdx.Task('mtcdvrt', images1=images, id=str(id))
    workflow = gbdx.Workflow([tsk])
    workflow.savedata(tsk.outputs.data, location=s3_location + "/image2image/")
    workflow.execute()
    print('MTCD mosaic start', id, images, workflow.id)
    waitForWorkflow(workflow)
    print('MTCD mosaic done ' + images + ' ' + str(id) + ' ' +
          str(workflow.status) + ' at ' + str(datetime.now()))
    if (not workflow.status["event"] == "succeeded"):
        print('MTCD mosaic failed')
        return 1
    return 0
Пример #6
0
def runMTCD(s3_locationPre, s3_locationPost, out_s3_location):
    gbdx = Interface()
    preimage = getS3location(gbdx, s3_locationPre)
    postimage = getS3location(gbdx, s3_locationPost)
    tsk = gbdx.Task('mtcd', preimage=preimage, postimage=postimage)
    workflow = gbdx.Workflow([tsk])
    workflow.savedata(tsk.outputs.data, location=out_s3_location)
    workflow.execute()
    print('MTCD image pair start ' + s3_locationPre + ' ' + s3_locationPost +
          ' ' + out_s3_location + ' ' + str(workflow.id) + ' at ' +
          str(datetime.now()))
    waitForWorkflow(workflow)
    print('MTCD image pair done ' + s3_locationPre + ' ' + s3_locationPost +
          ' ' + out_s3_location + ' ' + str(workflow.status) + ' at ' +
          str(datetime.now()))
    if (not workflow.status["event"] == "succeeded"):
        return 1
    return 0
Пример #7
0
                     enable_pansharpen=False,
                     enable_dra=False)  # creates acomp'd multispectral image
#aoptask3 = gbdx.Task('AOP_Strip_Processor', data=WV2, bands='MS', enable_acomp=True, enable_pansharpen=False, enable_dra=False)     # creates acomp'd multispectral image
#aoptask4 = gbdx.Task('AOP_Strip_Processor', data=WV3, bands='MS', enable_acomp=True, enable_pansharpen=False, enable_dra=False)     # creates acomp'd multispectral image
#aoptask5 = gbdx.Task('AOP_Strip_Processor', data=GE, bands='MS', enable_acomp=True, enable_pansharpen=False, enable_dra=False)     # creates acomp'd multispectral image

envitask = gbdx.Task("ENVI_ISODATAClassification")
envitask.inputs.file_types = 'tif'
envitask.inputs.input_raster = aoptask1.outputs.data.value
envitask.outputs.output_raster = "ENVI"

shptask = gbdx.Task("ENVI_ClassificationToShapefile")
shptask.inputs.input_raster = envitask.outputs.output_raster_uri.value
shptask.inputs.file_types = "hdr"

workflow = gbdx.Workflow([aoptask1, envitas, shptask])

workflow.savedata(shptask.outputs.output_vector_uri,
                  location='Benchmark/SHP/WV1/')

workflow.savedata(envitask.outputs.output_raster_uri,
                  location='Benchmark/SHP/WV1')
'''

workflow.savedata(
    shptask.outputs.data,
        location='Benchmark/SHP/WV2'
)

workflow.savedata(
    shptask.outputs.data,
Пример #8
0
sts = boto3.client('sts')

creds = sts.get_session_token()['Credentials']

ingest = gbdx.Task(
    'ingest-s3-data',
    data=
    's3://viper-projects/open_data/intermediate/PreprocessImage/1030010069650A00/059441867010_01/',
    aws_access_key_id=creds['AccessKeyId'],
    aws_secret_access_key=creds['SecretAccessKey'],
    aws_session_token=creds['SessionToken'])

cog = gbdx.Task(
    'CloudOptimizedGeoTIFF:2.0.1',
    data=ingest.outputs.data.value,
)

save = gbdx.Task(
    'SaveToS3',
    data=cog.outputs.data.value,
    destination='s3://jduckworth/cog-tests/1030010069650A00',
    access_key_id=creds['AccessKeyId'],
    secret_key=creds['SecretAccessKey'],
    session_token=creds['SessionToken'],
)

workflow = gbdx.Workflow([ingest, cog, save])
workflow.execute()

print(workflow.id)
Пример #9
0
#aoptask = gbdx.Task('AOP_Strip_Processor', data=WV3, bands='MS', enable_acomp=True, enable_pansharpen=False, enable_dra=False)     # creates acomp'd multispectral image
#aoptask = gbdx.Task('AOP_Strip_Processor', data=GE, bands='MS', enable_acomp=True, enable_pansharpen=False, enable_dra=False)     # creates acomp'd multispectral image

isodata = gbdx.Task("ENVI_ISODATAClassification")
isodata.inputs.input_raster = GE
isodata.inputs.file_types = "tif"

sieve = gbdx.Task("ENVI_ClassificationSieving")
sieve.inputs.input_raster = isodata.outputs.output_raster_uri.value
sieve.inputs.file_types = "hdr"

clump = gbdx.Task("ENVI_ClassificationClumping")
clump.inputs.input_raster = sieve.outputs.output_raster_uri.value
clump.inputs.file_types = "hdr"

workflow = gbdx.Workflow([isodata, sieve, clump])
'''
workflow.savedata(
	clump.outputs.output_raster_uri,
		location='Benchmark/clump/QB'
)


workflow.savedata(
    clump.outputs.output_raster_uri,
        location='Benchmark/clump/WV2'
)

workflow.savedata(
    clump.outputs.output_raster_uri,
        location='Benchmark/clump/WV3'
Пример #10
0
from gbdxtools import Interface
gbdx = Interface()

s3_flood_extents = 's3://egolden/floodwatch/intersect_inputs/flood_shp_one/'
s3_footprints = 's3://egolden/floodwatch/intersect_inputs/footprints_shp/'

intersect_task = gbdx.Task('classify_flooded_footprints',
                           footprints_shp=s3_footprints,
                           flood_shp=s3_flood_extents)

workflow = gbdx.Workflow([intersect_task])
workflow.savedata(intersect_task.outputs.footprints_flood_results, location='egolden/intersect_results')
workflow.execute()
print(workflow.id)

Пример #11
0
QB = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/QB"
WV1 = "s3://receiving-dgcs-tdgplatform-com/054876516120_01_003"
WV2 = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/WV2"
WV3 = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/WV3"
GE = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/GE/055217125010_01"


aop2envi = gbdx.Task("AOP_ENVI_HDR")
aop2envi.inputs.image = GE
envi_ndvi = gbdx.Task("ENVI_SpectralIndices")
envi_ndvi.inputs.input_raster = aop2envi.outputs.output_data.value
envi_ndvi.inputs.file_types = "hdr"
# Specify a string/list of indicies to run on the input_raster variable.  The order of indicies wi
envi_ndvi.inputs.index = '["Normalized Difference Vegetation Index", "Simple Ratio"]'

workflow = gbdx.Workflow([aop2envi, envi_ndvi])
'''
workflow.savedata(
	       envi_ndvi.outputs.output_raster_uri,
	          location='Benchmark/spectralindices/QB'
)

workflow.savedata(
	       envi_ndvi.outputs.output_raster_uri,
	          location='Benchmark/spectralindices/WV2'
)

workflow.savedata(
	       envi_ndvi.outputs.output_raster_uri,
	          location='Benchmark/spectralindices/WV3'
)
Пример #12
0
# The order_id is unique to your image order and can be used to track the progress of your order. The ordered image sits in a directory on S3. The output of the following describes where:
status = gbdx.ordering.status(order_id)
# result:
# [{u'acquisition_id': u'10400100143FC900',
#   u'state': u'delivered',
#   u'location': u's3://receiving-dgcs-tdgplatform-com/055546367010_01_003'}]

# test a quick workflow on the item
data = str(status[0]['location'])

aoptask = gbdx.Task("AOP_Strip_Processor",
                    data=data,
                    enable_acomp=True,
                    enable_pansharpen=True)
workflow = gbdx.Workflow([aoptask])

data = "s3://receiving-dgcs-tdgplatform-com/054813633050_01_003"  # WV02 Image over San Francisco
aoptask = gbdx.Task("AOP_Strip_Processor", data=data)

s3task = gbdx.Task("StageDataToS3")
s3task.inputs.data = aoptask.outputs.data.value
s3task.inputs.destination = "s3://gbd-customer-data"

workflow = gbdx.Workflow([aoptask, s3task])
workflow.execute()

# workflow.execute()

# At this point the workflow is launched, and you can get status as follows:
workflow.status
Пример #13
0
WV3 = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/WV3"
GE = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/GE/055217125010_01"


# Capture AOP task outputs
#orthoed_output = aoptask.get_output('data')

task = gbdx.Task("ENVI_ImageThresholdToROI")
task.inputs.input_raster=WV1
task.inputs.file_types = "tif"
task.inputs.roi_name = "[\"Water\"]"
task.inputs.roi_color = "[[0,255,0]"
task.inputs.threshold = "[[138]"
task.inputs.output_roi_uri_filename = "roi.xml"

workflow = gbdx.Workflow([ task ])
'''
workflow.savedata(
    task.outputs.output_roi_uri,
        location='Benchmark/ImgToROI/QB'
)
'''
workflow.savedata(
    task.outputs.output_roi_uri,
        location='Benchmark/ImgToROI/WV1'
)
'''
workflow.savedata(
    task.outputs.output_roi_uri,
        location='Benchmark/ImgToROI/WV2'
)
Пример #14
0
#hdr file used to compute spectral index

#envi_ndvi = gbdx.Task("ENVI_SpectralIndex")
#envi_ndvi.inputs.input_raster = aop2envi.outputs.output_data.value
#envi_ndvi.inputs.file_types = "hdr"
#envi_ndvi.inputs.index = "Normalized Difference Vegetation Index"

#spectral index file used in color slice classification task

envi_color = gbdx.Task('ENVI_ColorSliceClassification')
envi_color.inputs.input_raster = aoptask2.outputs.data.value
envi_color.file_types = 'hdr'


workflow = gbdx.Workflow([aoptask2, envi_color])

#workflow.savedata(
 # envi_ndvi.outputs.output_raster_uri,
 # location='Benchmark/color_slice/'
#)

workflow.savedata(
  envi_color.outputs.output_raster_uri,
  location='Benchmark/color_slice/WV1/Color16'
)

workflow.execute()

print workflow.execute()
print workflow.id
Пример #15
0
s3path2 = 's3://receiving-dgcs-tdgplatform-com/055364005010_01_003'
aoptask2 = gbdx.Task("AOP_Strip_Processor",
                     data=s3path2,
                     enable_acomp=True,
                     enable_pansharpen=False,
                     enable_dra=False,
                     bands='MS')
s3task2 = gbdx.Task("StageDataToS3")
s3task2.inputs.data = aoptask1.outputs.data.value
s3task2.inputs.destination = "s3://change_detection/test_job/Steps/acomp_fastortho_step-post_image_task/Output"

cdtask = gbdx.Task("change_detection")
cdtask.inputs.pre_image = aoptask1.outputs.data.value
cdtask.inputs.post_image = aoptask2.outputs.data.value

workflow = gbdx.Workflow([aoptask1, aoptask2, s3task1, s3task2, cdtask])
workflow.savedata(
    cdtask.outputs.cd_output.value,
    location="change_detection/test_job/Steps/change_detection-singleton/Output"
)

workflow.execute()
'''

JSON version:

{
    "name": "3d33bfa2-4ea6-42d5-8101-17dcfa6bf785",
    "tasks": [
        {
            "containerDescriptors": [
Пример #16
0
# First we'll run atmospheric compensation on Landsat8 data
from gbdxtools import Interface
gbdx = Interface()

acomp = gbdx.Task('AComp',
                  data='s3://landsat-pds/L8/033/032/LC80330322015035LGN00')

# Now we'll save the result to our own S3 bucket.  First we need to generate temporary AWS credentials
# (this assumes you have an AWS account and your IAM credentials are appropriately accessible via boto)
import boto3
client = boto3.client('sts')
response = client.get_session_token(DurationSeconds=86400)
access_key_id = response['Credentials']['AccessKeyId']
secret_key = response['Credentials']['SecretAccessKey']
session_token = response['Credentials']['SessionToken']

# Save the data to your s3 bucket using the SaveToS3 task:
savetask = gbdx.Task('SaveToS3')
savetask.inputs.data = acomp.outputs.data.value
savetask.inputs.destination = "s3://your-bucket/your-path/"
savetask.inputs.access_key_id = access_key_id
savetask.inputs.secret_key = secret_key
savetask.inputs.session_token = session_token

workflow = gbdx.Workflow([acomp, savetask])
workflow.execute()
Пример #17
0
from gbdxtools import Interface
gbdx = Interface()
'''

from gbdxtools import Interface
gbdx = Interface()
image = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/RPCOrtho/image1"
dem = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/RPCOrtho/DEM/image1"


envi_RPCO = gbdx.Task("ENVI_RPCOrthorectification")
envi_RPCO.inputs.input_raster_metadata = '{"sensor type": "IKONOS"}'
envi_RPCO.inputs.input_raster_band_grouping = 'multispectral'
envi_RPCO.inputs.input_raster = image
envi_RPCO.inputs.dem_raster = dem



workflow = gbdx.Workflow([envi_RPCO])

workflow.savedata(
    envi_RPCO.outputs.output_raster_uri,
        location='Benchmark/ENVI_RPCO/results'
)

workflow.execute()

status = workflow.status["state"]
wf_id = workflow.id
Пример #18
0
skynet.inputs.model = '{model_location_s3}'
skynet.inputs.log_level = 'trace'
# AnswerFactory auto populates {confidence} with the value that comes from the user-defined
# confidence parameter introduced in the recipe definition
skynet.inputs.confidence = '{confidence}'
skynet.inputs.pyramid = 'true'
skynet.inputs.pyramid_window_sizes = '[768]'
skynet.inputs.pyramid_step_sizes = '[700]'
skynet.inputs.step_size = '512'
skynet.inputs.tags = 'Airliner, Fighter, Other, Military cargo'
# AnswerFactory auto populates {non_maximum_suppression} with the value that comes from the user-defined
# non_maximum_suppression parameter introduced in the recipe definition
skynet.inputs.non_maximum_suppression = '60'
skynet.impersonation_allowed = True

workflow = gbdx.Workflow([aop, xmlfix, skynet])

# create parameters for the recipe that are configurable at runtime in the GUI or via API
confidence_param = RecipeParameter(name='confidence',
                                   _type='string',
                                   required=True,
                                   description='Lower bound for match scores',
                                   allow_multiple=False,
                                   allowed_values=['60', '65', '70'])

properties = {
    "partition_size": "50.0",
    "model_type":
    "OpenSkyNetDetectNetMulti",  # type of model; registered in the model catalog
    "image_bands": "Pan_MS1_MS2",  # Pan | Pan_MS1 | Pan_MS1_MS2
}
Пример #19
0
# set the input data location.  This could also be pulled from a catalog API response using a catalog_id
data = "s3://receiving-dgcs-tdgplatform-com/055186940010_01_003/"

# build the task used in the workflow
aoptask = gbdx.Task("AOP_Strip_Processor",
                    data=data,
                    enable_acomp=True,
                    enable_pansharpen=False,
                    enable_dra=False,
                    bands='MS')
pp_task = gbdx.Task(
    "ProtogenPrep", raster=aoptask.outputs.data.value
)  # ProtogenPrep task is used to get AOP output into proper format for protogen task
prot_lulc = gbdx.Task("protogenV2LULC", raster=pp_task.outputs.data.value)
# build the workflow ( AOP -> ProtogenPrep -> protogenV2LULC )
workflow = gbdx.Workflow([aoptask, pp_task, prot_lulc])
workflow.savedata(prot_lulc.outputs.data.value, location=out_data_loc)

# optional: print workflow tasks, to check the json
print
print(aoptask.generate_task_workflow_json())
print
print(pp_task.generate_task_workflow_json())
print
print(prot_lulc.generate_task_workflow_json())
print

# kick off the workflow
workflow.execute()

# print out the workflow.status, this is one way to get the workflow id
Пример #20
0
from gbdxtools import Interface
import os
from gbdxrun.local_task import LocalTask
from gbdxrun.local_workflow import LocalWorkflow

HOST = os.environ.get('GBDXTOOLS_HOST', None)
CONFIG = os.environ.get('GBDXTOOLS_PROFILE', None)

config_kwargs = {}
if HOST:
    config_kwargs['host'] = HOST
elif CONFIG:
    config_kwargs['config_file'] = CONFIG

gbdx = Interface(**config_kwargs)

gbdx.Task = LocalTask
gbdx.Workflow = LocalWorkflow


class LocalWorkflowError(Exception):
    pass
Пример #21
0
# Run atmospheric compensation on Landsat8 data
from gbdxtools import Interface
gbdx = Interface()

acomp = gbdx.Task('AComp_New', data='s3://landsat-pds/L8/033/032/LC80330322015035LGN00')
workflow = gbdx.Workflow([acomp])
workflow.savedata(acomp.outputs.data, location='acomp_output_folder')
workflow.execute()
Пример #22
0
from gbdxtools import Interface
gbdx = Interface()

QB = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/QB"
WV1 = "s3://receiving-dgcs-tdgplatform-com/054876516120_01_003"
WV2 = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/WV2"
WV3 = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/WV3"
GE = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/GE/055217125010_01"

envi_task = gbdx.Task("ENVI_RXAnomalyDetection")
envi_task.inputs.file_types = 'til'
envi_task.inputs.kernel_size = '3'
envi_task.inputs.input_raster = WV3

workflow = gbdx.Workflow([envi_task])
workflow.savedata(envi_task.outputs.task_meta_data,
                  location='envi_task_output')
workflow.savedata(envi_task.outputs.output_raster_uri,
                  location='envi_task_output')
'''
workflow.savedata(
  envi_task.outputs.task_meta_data,
    location='Benchmark/RX/QB'
)

workflow.savedata(
  envi_task.outputs.task_meta_data,
    location='Benchmark/RX/WV1'
)

Пример #23
0
s3creds = gbdx.s3.info
s3creds


# ## Here’s a quick workflow that starts with a Worldview 2 image over San Francisco, runs it through DigitalGlobe’s “Fast Ortho” and “Acomp” tasks, then saves to a user-specified location under s3://bucket/prefix.

# In[ ]:

bucket = s3creds['bucket']
prefix = s3creds['prefix']
s3out = "s3://" + bucket + "/" + "demo_output/"

data = "s3://receiving-dgcs-tdgplatform-com/054813633050_01_003" # WV02 Image over San Francisco
aoptask = gbdx.Task("AOP_Strip_Processor", data=data, enable_acomp=True, enable_pansharpen=True)
workflow = gbdx.Workflow([ aoptask ])
workflow.savedata(aoptask.outputs.data, location="demo_output")
workflow.execute()


# ### We can easily cancel a workflow that we've kicked off as follows:

# In[ ]:

workflow2 = gbdx.Workflow( [] )  # instantiate a blank workflow
workflow2.id = <known_workflow_id>
workflow2.cancel()


# ### At this point the workflow is launched, and you can get status as follows:
# 
Пример #24
0
def processChangeSet(idsets,
                     s3_location,
                     clip=None,
                     deleteIntermediateFromS3=False):
    start_time = time.time()
    ids = []
    if (len(idsets) < 4 or len(idsets) > 6):
        print("FATAL number of idsets has to be 4, 5, or 6. it is " +
              str(len(idsets)))
        return
    for idset in idsets:
        for catid in idset:
            ids.append(catid)

    print("AOP, Topocorrection, Cloud punching")
    threads = []
    id = 0
    for idset in idsets:
        id += 1
        for catID in idset:
            if (catID is not None and len(catID) > 3):
                if catID.find(' ') > 0:
                    catID = catID[0:catID.index(' ')]
                thread = startChangePrepThread(catID,
                                               s3_location + "/" + str(id),
                                               clip)
                threads.append(thread)
                time.sleep(2)
    nThreads = len(threads)
    i = 0
    for thread in threads:
        ret = thread.join()
        i += 1
        print(str(i) + " out of " + str(nThreads) + " threads done")
        if (ret != 0):
            print("ChangePrep error", ret)
            return 1
    print('AOP/Topo/CloudDet done. Elapsed time: {} min'.format(
        round((time.time() - start_time) / 60)))

    print("mosaicking")
    id = 0
    threads = []
    if (len(idsets) == 4):
        thread = startMTCDmosaicThread(-2, s3_location)
    else:
        thread = startMTCDmosaicThread(-3, s3_location)
    threads.append(thread)
    time.sleep(2)

    for idset in idsets:
        id += 1
        thread = startMTCDmosaicThread(id, s3_location)
        threads.append(thread)
        time.sleep(2)
    nThreads = len(threads)
    i = 0
    for thread in threads:
        ret = thread.join()
        i += 1
        print(
            str(i) + " out of " + str(nThreads) + " threads done. Return ",
            ret)
        if (ret != 0):
            print("MTCDmosaic error", ret)
            return 1
    print('mtcdvrt done. Elapsed time: {} min'.format(
        round((time.time() - start_time) / 60)))

    threads = []
    id = 0
    for idset in idsets:
        id += 1
        if (deleteIntermediateFromS3):
            deleteFromS3(s3_location + "/" + str(id) + "/")
        fileName = str(id) + "_warped.tif"
        thread = startimage2imageThread(
            s3_location + "/image2image/pre.tif",
            s3_location + "/image2image/" + str(id) + ".tif",
            s3_location + "/image2imageFinal/" + str(id),
            clip=clip,
            fileName=fileName)
        threads.append(thread)
        time.sleep(2)
    print("Running image2image alignment")
    nThreads = len(threads)
    i = 0
    for thread in threads:
        ret = thread.join()
        i += 1
        print(
            str(i) + " out of " + str(nThreads) + " threads done. Return ",
            ret)
        if (ret != 0):
            print("image2image error", ret)
            return 1

    print('Image2image done. Elapsed time: {} min'.format(
        round((time.time() - start_time) / 60)))
    gbdx = Interface()
    watertsk = gbdx.Task('kk-watermask')
    watertsk.inputs.image = getS3location(gbdx,
                                          s3_location + "/image2imageFinal/1")
    waterwfl = gbdx.Workflow([watertsk])
    waterwfl.savedata(watertsk.outputs.mask,
                      location=s3_location + "/waterMask")
    waterwfl.execute()
    print('Water mask task start ' + str(waterwfl.id) + ' at ' +
          str(datetime.now()))

    nImages = len(idsets)
    threads = []
    for i in range(1, nImages + 1):
        for j in range(i + 1, nImages + 1):
            threads.append(
                startMTCDThread(
                    s3_location + "/image2imageFinal/" + str(i) + "/" +
                    str(i) + "_warped.tif",
                    s3_location + "/image2imageFinal/" + str(j) + "/" +
                    str(j) + "_warped.tif", s3_location + "/changePairs/"))

    nThreads = len(threads)
    i = 0
    for thread in threads:
        ret = thread.join()
        i += 1
        print(
            str(i) + " out of " + str(nThreads) + " threads done. Return ",
            ret)
        if (ret != 0):
            print("MTCDThread error", ret)
            return 1
    print('Image pair change done. Elapsed time: {} min'.format(
        round((time.time() - start_time) / 60)))

    if (deleteIntermediateFromS3):
        deleteFromS3(s3_location + "/image2imageFinal/")
        deleteFromS3(s3_location + "/image2image/")
    waitForWorkflow(waterwfl)
    print('Water mask task done ' + str(waterwfl.status) + ' at ' +
          str(datetime.now()))
    gbdx = Interface()
    changeImages = getS3location(gbdx, s3_location + "/changePairs/")
    maskFolder = getS3location(gbdx, s3_location + "/waterMask")
    tsk = gbdx.Task('mtcd2', image=changeImages, mask=maskFolder)
    wfl = gbdx.Workflow([tsk])
    wfl.savedata(tsk.outputs.data, location=s3_location + "/change/")
    wfl.execute()
    print('MTCD time filter start ' + str(wfl.id) + ' at ' +
          str(datetime.now()))
    waitForWorkflow(wfl)
    print('MTCD time filter done ' + str(wfl.status) + ' at ' +
          str(datetime.now()))

    if (deleteIntermediateFromS3):
        deleteFromS3(s3_location + "/changePairs/")
        deleteFromS3(s3_location + "/waterMask/")
    print('MTCD change ALL processes done. Elapsed time: {} min'.format(
        round((time.time() - start_time) / 60)))
    print("All done " + wfl.status['event'])
    return 0
Пример #25
0
protoLULC = gbdx.Task("protogenV2LULC", raster=gluetask2.outputs.data.value)
protoPAN = gbdx.Task("protogenV2PANTEX10", raster=gluetask2.outputs.data.value)
protoRAC = gbdx.Task("protogenV2RAC", raster=gluetask2.outputs.data.value)
protoRAS = gbdx.Task("protogenV2RAS", raster=gluetask2.outputs.data.value)
protoRAV = gbdx.Task("protogenV2RAV", raster=gluetask2.outputs.data.value)
protoRAW = gbdx.Task("protogenV2RAW", raster=gluetask2.outputs.data.value)

'''
protoLULC3 = gbdx.Task("protogenV2LULC", raster=prep3.outputs.data)
protoPAN3 = gbdx.Task("protogenV2PANTEX10", raster=prep3.outputs.data)
protoRAC3 = gbdx.Task("protogenV2RAC", raster=prep3.outputs.data)
protoRAS3 = gbdx.Task("protogenV2RAS", raster=prep3.outputs.data)
protoRAV3 = gbdx.Task("protogenV2RAV", raster=prep3.outputs.data)
protoRAW3 = gbdx.Task("protogenV2RAW", raster=prep3.outputs.data)

workflow = gbdx.Workflow(
    [prep3, protoLULC3, protoPAN3, protoRAC3, protoRAS3, protoRAV3, protoRAW3])
'''
workflow.savedata(protoLULC.outputs.data, location="Benchmark/Protogen/LULC")
workflow.savedata(protoPAN.outputs.data, location="Benchmark/Protogen/PAN")
workflow.savedata(protoRAC.outputs.data, location="Benchmark/Protogen/RAC")
workflow.savedata(protoRAS.outputs.data, location="Benchmark/Protogen/RAS")
workflow.savedata(protoRAV.outputs.data, location="Benchmark/Protogen/RAV")
workflow.savedata(protoRAW.outputs.data, location="Benchmark/Protogen/RAW")
'''

workflow.savedata(protoLULC3.outputs.data,
                  location="Benchmark/Protogen/prep/LULC3")
workflow.savedata(protoPAN3.outputs.data,
                  location="Benchmark/Protogen/prep/PAN3")
workflow.savedata(protoRAC3.outputs.data,
                  location="Benchmark/Protogen/prep/RAC3")
Пример #26
0
WV2 = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/WV2"
WV3 = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/WV3"
GE = "s3://gbd-customer-data/7d8cfdb6-13ee-4a2a-bf7e-0aff4795d927/Benchmark/GE/055217125010_01"


aop2envi = gbdx.Task("AOP_ENVI_HDR")
#aop2envi.inputs.image = QB
#aop2envi.inputs.image = WV2
#aop2envi.inputs.image = WV3
aop2envi.inputs.image = GE

envi_query = gbdx.Task("ENVI_QuerySpectralIndices")
envi_query.inputs.input_raster = aop2envi.outputs.output_data.value
envi_query.inputs.file_types = "hdr"

workflow = gbdx.Workflow([aop2envi, envi_query])

'''
workflow.savedata(
  envi_query.outputs.task_meta_data,
    location='Benchmark/QSI/QB'
)

workflow.savedata(
  envi_query.outputs.task_meta_data,
    location='Benchmark/QSI/WV2'
)

workflow.savedata(
  envi_query.outputs.task_meta_data,
    location='Benchmark/QSI/WV3'
Пример #27
0
from gbdxtools import Interface
"""
Example using multiple inputs with 1 submission
"""

gbdx = Interface()

# note there are 2 inputs
data = [
    "s3://receiving-dgcs-tdgplatform-com/054813633050_01_003",
    "http://test-tdgplatform-com/data/QB02/LV1B/053702625010_01_004/053702625010_01/053702625010_01_P013_MUL"
]

aoptask = gbdx.Task("AOP_Strip_Processor",
                    data=data,
                    enable_acomp=True,
                    enable_pansharpen=True)

workflow = gbdx.Workflow([aoptask])

workflow.savedata(aoptask.outputs.data, location='some_folder')

batch_workflow_id = workflow.execute()
Пример #28
0
                    bands="MS",
                    enable_pansharpen=False,
                    enable_dra=False)

# Capture AOP task outputs
log = aoptask.get_output('log')
orthoed_output = aoptask.get_output('data')

# Stage AOP output for the Protogen Task using the Protogen Prep Task
pp_task = gbdx.Task("ProtogenPrep", raster=aoptask.outputs.data.value)

# Setup ProtogenV2LULC Task
prot_lulc = gbdx.Task("protogenV2LULC", raster=pp_task.outputs.data.value)

# Run Combined Workflow
workflow = gbdx.Workflow([aoptask, pp_task, prot_lulc])

# Send output to  s3 Bucket.
# Once you are familiar with the process it is not necessary to save the output from the intermediate steps.
#Edit the following line(s) to reflect specific folder(s) for the output file (example location provided)
# workflow.savedata(aoptask.outputs.data,location='s3://gbd-customer-data/CustomerAccount#/Protogen_LULC/')
# workflow.savedata(pp_task.outputs.data,location='s3://gbd-customer-data/CustomerAccount#/ProtoPrep/')
# workflow.savedata(prot_lulc.outputs.data,location='s3://gbd-customer-data/CustomerAccount#/Protogen_LULC/LULC/')

base_folder = 'denver_lulc_example_sw_{}_mcglinchy'.format(scene_id)
workflow.savedata(aoptask.outputs.data,
                  location='{}/Protogen_LULC/'.format(base_folder))
workflow.savedata(pp_task.outputs.data,
                  location='{}/ProtoPrep/'.format(base_folder))
workflow.savedata(prot_lulc.outputs.data,
                  location='{}/Protogen_LULC/LULC/'.format(base_folder))
Пример #29
0
        print s3path

        aoptask = gbdx.Task("AOP_Strip_Processor",
                            data=s3path,
                            enable_acomp=True,
                            enable_pansharpen=False,
                            enable_dra=False,
                            bands='MS')
        s3task = gbdx.Task("StageDataToS3")
        s3task.inputs.data = aoptask.outputs.data.value
        s3task.inputs.destination = "s3://molly-g/seattleAOP/"

        pp_task = gbdx.Task(
            "ProtogenPrep", raster=aoptask.outputs.data.value
        )  # ProtogenPrep task is used to get AOP output into proper format for protogen task
        prot_ubfp = gbdx.Task("protogenV2UBFP",
                              raster=pp_task.outputs.data.value)
        workflow = gbdx.Workflow([pp_task, aoptask, prot_ubfp, s3task
                                  ])  # Cool tip: these can be in any order!
        workflow.savedata(
            prot_ubfp.outputs.data.value, location="/molly-g/seattle"
        )  # 'location' should be your subdirectory in your bucket prefix
        workflow.execute()
        workflowList.append(workflow.id)

print workflowList

for wfID in workflowList:
    status = gbdx.workflow.status(wfID)
    print status