예제 #1
0
def deploy_keras_model(dltile, src_product_id, dest_product_id):
    import tensorflow as tf
    import numpy as np
    catalog_client = dl.Catalog()
    raster_client = dl.Raster()
    metadata_client = dl.Metadata()
    # NOTE substitute your own trained model here.
    model = tf.keras.applications.resnet50.ResNet50()
    scene = metadata_client.search(src_product_id,
                                   geom=raster_client.dltile(dltile),
                                   limit=1)['features'][0]['id']
    tile, meta = raster_client.ndarray(scene,
                                       bands=['red', 'green', 'blue'],
                                       scales=[[0, 255]] * 3,
                                       ot='Byte',
                                       dltile=dltile)
    # resnet50 expects the shape of the input array to be 4 dimensional, which allows for batch
    # predictions.
    tile_for_prediction = tile[np.newaxis, :]
    pred = model.predict(tile_for_prediction)
    # get predicted class with a simple maximum of class probabilities.
    class_ = np.argmax(pred, 1)
    # create a new raster of the tile area with one channel of the prediction from the model.
    image = np.full(tile.shape[:-1], class_, dtype=np.uint16)
    # upload a tile of this "prediction" to catalog
    image_id = ':'.join([src_product_id, dltile.replace(':', '_')])
    catalog_client.upload_ndarray(image,
                                  dest_product_id,
                                  image_id,
                                  raster_meta=meta)
예제 #2
0
 def __init__(self,  aoi, 
              item_type=ItemType.PSScene4Band,
              product_id=None,
              title=None,
              description=None,
              overwrite=False,
              start_datetime=None,
              end_datetime=None,
              cloud_fraction=1,
              limit=None,
              order_id=None,
              api_key=None):
     
     self._planet = p.ClientV1(api_key=api_key)
     self._catalog = dl.Catalog()
     self._metadata = dl.Metadata()
     self._auth = dl.Auth()
     self._order_id = order_id
     self._title=title
     self._description=description
     self.stats = None
     self._running = False
     self._items = []
     self.uploads = []
     
     
     if self._running:
         raise Exception('Already processing')
     else:
         self._running = True
         
     self._start_datetime = start_datetime
     self._end_datetime = end_datetime
     self._cloud_fraction = cloud_fraction
     self._limit =limit
    
     self._get_items(aoi, [item_type.name])
     self._init_product(product_id, item_type=item_type, overwrite=overwrite)
    
     item_ids = [item['id'] for item in self._items]
     
     scenes = clip_and_download(aoi, item_ids, item_type.name, item_type.bundle, api_key, order_id=self._order_id)
     for scene_id, scene in scenes.items():
             
         
             with open(scene['metadata.json']) as meta_file:
                 metadata = json.load(meta_file)['properties']
             
             with open(scene['3B_AnalyticMS_metadata_clip.xml']) as xml_file:
                 xml_meta = xmltodict.parse(xml_file.read())
             for band in xml_meta['ps:EarthObservation']['gml:resultOf']['ps:EarthObservationResult']['ps:bandSpecificMetadata']:
                 metadata[f"band_{band['ps:bandNumber']}_radiometricScaleFactor"] = band['ps:radiometricScaleFactor']
                 metadata[f"band_{band['ps:bandNumber']}_reflectanceCoefficient"] = band['ps:reflectanceCoefficient']
             
             self._upload_image([str(scene[str(file_key)]) for file_key in item_type.files] , metadata, scene_id)
예제 #3
0
def run_model(model_version, dltile_key, output_product_id):
    import descarteslabs as dl
    import numpy as np
    import pickle
    import os

    # Load the model, optionally downloading it from DL storage
    model_filename = "water_model_{}".format(model_version)
    if not os.path.exists(model_filename):
        try:
            dl.Storage().get_file(model_filename, model_filename)
        except dl.client.exceptions.NotFoundError:
            print("Could not find {} in DL storage".format(model_filename))
            return

    with open(model_filename, "rb") as f:
        classifier = pickle.load(f)

    # Find input scenes for the dltile
    dltile = dl.scenes.DLTile.from_key(dltile_key)
    input_scenes, ctx = dl.scenes.search(
        dltile,
        products=["sentinel-1:GRD"],
        start_datetime="2019-03-01",
        end_datetime="2019-05-01",
        query=getattr(dl.properties, "pass") == "ASCENDING",
    )
    print("Input scenes:", input_scenes)

    input_stack = input_scenes.stack("vv vh", ctx, bands_axis=-1)

    # Predict
    water_prediction = classifier.predict(input_stack.reshape(-1, 2))
    water_prediction = water_prediction.reshape(input_stack[:, :, :, 0].shape)

    # Aggregate the predictions into a single raster for the tile
    composite_prediction = water_prediction.mean(axis=0)

    # Discretize our prediction from 0.0-1.0 to 0-255
    composite_prediction = (composite_prediction * 255).astype(np.uint8)
    print("Uploading output to the catalog with shape",
          composite_prediction.shape)

    # Upload the result back to catalog
    upload_id = dl.Catalog().upload_ndarray(
        composite_prediction,
        output_product_id,
        dltile.key,
        geotrans=ctx.geotrans,
        proj4=ctx.proj4,
    )

    print("Upload task id:", upload_id)
예제 #4
0
def create_product(product_name):
    catalog = dl.Catalog()

    # Create a product in the catalog. We will upload output rasters from the
    # model to this product for storage and further consumption.
    product = catalog.add_product(
        product_name,
        title="Water Classification Example",
        description="Water Classification Example",
    )

    # Configure a band for our classification output
    catalog.add_band(
        product["data"]["id"],
        name="water",
        type="mask",
        srcband=1,  # 1-based index
        dtype="Byte",
        nbits=8,
        data_range=[0, 255],
    )

    print("Created product", product["data"]["id"])
예제 #5
0
"""Simple example of how to create a product, then add
some bands and imagery. We will use the included file `building_mask.tif`
as an example of some imagery you might want to upload with the catalog.
"""

import descarteslabs as dl
import os
from random import randint
from time import sleep

catalog_client = dl.Catalog()
metadata_client = dl.Metadata()
raster_client = dl.Raster()

# First step, create a product, which is a descriptive document you use to
# group related images.

product_id = catalog_client.add_product(
    "building_mask:osm:v0",
    title="OSM Building Mask",
    description=
    "Rasterized OSM building footprints from vector data. Quality varies regionally",
)["data"]["id"]

# Next we need to add bands. The core function of a band is to tell us how data
# is encoded in the imagery that you are going to upload. For these building
# masks there is only one file per scene, and each scene has one 8 bit band.

band_id = catalog_client.add_band(
    product_id=product_id,  # id of the product we just created.
    name="footprint",  # this is a unique name to describe what the band encodes.
예제 #6
0
def deploy(model_version, output_product):
    deploy_aoi = {
        "type":
        "Polygon",
        "coordinates": [[
            [-99.24164417538321, 26.138411465362807],
            [-93.37666136803256, 26.138411465362807],
            [-93.37666136803256, 31.060649553995205],
            [-99.24164417538321, 31.060649553995205],
            [-99.24164417538321, 26.138411465362807],
        ]],
    }

    # Make sure the output product exists
    try:
        dl.Catalog().get_product(output_product)
    except dl.client.exceptions.NotFoundError:
        print("Output product {} does not exist".format(output_product))
        return

    # Decompose our AOI into 1024x1024 pixel tiles at 90m resolution in UTM
    tiles = dl.scenes.DLTile.from_shape(deploy_aoi,
                                        resolution=90.0,
                                        tilesize=1024,
                                        pad=0)

    # Register our prediction function in the Tasks environment.
    #
    # We specify the resource requirements per worker (1 CPU & 2GB of RAM),
    # the environment (container with Python 3.7), and any extra PyPI
    # requirements (descarteslabs client and scikit-learn).
    tasks = dl.Tasks()
    run_model_remotely = tasks.create_function(
        run_model,
        name="example water model deployment",
        image=
        "us.gcr.io/dl-ci-cd/images/tasks/public/py3.7/default:v2019.05.29",
        cpu=1.0,
        memory="2Gi",
        requirements=[
            "descarteslabs[complete]==0.19.0", "scikit-learn==0.21.1"
        ],
    )

    # Create a list with arguments of each invocation of run_model
    task_arguments = [(model_version, dltile.key, output_product)
                      for dltile in tiles]

    results = run_model_remotely.map(*zip(*task_arguments))
    print("Submitted {} tasks to task group {}...".format(
        len(tiles), run_model_remotely.group_id))

    # Iterate through task results as they complete.
    #
    # If some of the tasks failed, we will print the console output and the
    # arguments of that invocation.
    #
    # Note that this is for informational purposes only, and the tasks will
    # continue running if the script is interrupted at this point. You can use
    # https://monitor.descarteslabs.com/ to see the status of all running
    # task groups.
    for i, task in enumerate(as_completed(results, show_progress=False)):
        percent_complete = 100.0 * i / len(results)
        print(
            "Progress update: {} completed out of {} ({:.2f}%) - last task took {:.2f}sec to {}"
            .format(
                i + 1,
                len(results),
                percent_complete,
                task.runtime,
                "succeed" if task.is_success else "fail",
            ))

        if not task.is_success:
            print("\nTASK FAILURE with arguments {}:\n{}".format(
                task.args, task.log.decode()))

    # Clean up the task group
    tasks.delete_group_by_id(run_model_remotely.group_id)
예제 #7
0
"""
==================================================
Upload ndarray to new product
==================================================

This example demonstrates how to create a product
in our Catalog and upload an example scene.
"""
import descarteslabs as dl
import numpy as np

catalog = dl.Catalog()

################################################
# Create a product entry in our Catalog
product = catalog.add_product(
    "Paris_final_3",
    title="Simple Image Upload_final_3",
    description="An example of creating a product, adding the visible band range, and ingesting a single scene.",
)

# Maintain the product id to upload scenes
product_id = product["data"]["id"]

################################################
# Add band information to the product
# This is a necessary step, and requires the user
# to know a bit about the data to be ingested
bands = ["red", "green", "blue"]

for val, band in enumerate(bands):