示例#1
0
 def test_from_url_with_plugin_style_params(self):
     url0 = (
         r"https://mobility-devel.crearecomputing.com/geowatch?&SERVICE=WMS&REQUEST=GetMap&VERSION=1.3.0&"
         r"LAYERS=Arange&STYLES=&FORMAT=image%2Fpng&TRANSPARENT=true&HEIGHT=256&WIDTH=256"
         r"&CRS=EPSG%3A3857&BBOX=-20037508.342789244,10018754.171394618,-10018754.171394622,20037508.34278071&"
         r'PARAMS={"plugin": "podpac.algorithm"}')
     url1 = (
         r"https://mobility-devel.crearecomputing.com/geowatch?&SERVICE=WMS&REQUEST=GetMap&VERSION=1.3.0&"
         r"LAYERS=datalib.terraintiles.TerrainTiles&STYLES=&FORMAT=image%2Fpng&TRANSPARENT=true&HEIGHT=256&WIDTH=256&"
         r"TIME=2021-03-01T12%3A00%3A00.000Z&CRS=EPSG%3A3857&BBOX=-10018754.171394622,5009377.08569731,-9392582.035682458,5635549.221409475"
         r'&PARAMS={"style": {"name": "Aspect (Composited 30-90 m)","units": "radians","colormap": "hsv","clim": [0,6.283185307179586]}}'
     )
     node = Node.from_url(url0)
     node = Node.from_url(url1)
示例#2
0
    def test_from_url(self):
        url = (
            r"http://testwms/?map=map&&service={service}&request=GetMap&{layername}={layer}&styles=&format=image%2Fpng"
            r"&transparent=true&version=1.1.1&transparency=true&width=256&height=256&srs=EPSG%3A4326"
            r"&bbox=40,-71,41,70&time=2018-05-19&PARAMS={params}")

        params = ["{}", '{"a":{"node":"algorithm.Arange"}}', "{}", "{}"]

        for service, layername in zip(["WMS", "WCS"], ["LAYERS", "COVERAGE"]):
            for layer, param in zip(
                [
                    "algorithm.SinCoords",
                    "%PARAMS%",
                    # urllib.urlencode({'a':'https://raw.githubusercontent.com/creare-com/podpac/develop/podpac/core/pipeline/test/test.json'})[2:],
                    # urllib.urlencode({'a':'s3://podpac-s3/test/test.json'})[2:]  # Tested locally, works fine. Hard to test with CI
                ],
                    params,
            ):
                pipe = Node.from_url(
                    url.format(service=service,
                               layername=layername,
                               layer=layer,
                               params=param))
示例#3
0
def handler(event, context):
    """Lambda function handler
    
    Parameters
    ----------
    event : dict
        Description
    context : TYPE
        Description
    get_deps : bool, optional
        Description
    ret_pipeline : bool, optional
        Description
    """
    print (event)

    # Add /tmp/ path to handle python path for dependencies
    sys.path.append("/tmp/")

    # handle triggers
    trigger = get_trigger(event)

    # parse event
    pipeline = parse_event(trigger, event)

    # bail if we can't parse
    if pipeline is None:
        return

    # -----
    # TODO: remove when layers is configured
    # get configured bucket to download dependencies
    # If specified in the environmental variables, we cannot overwrite it. Otherwise it HAS to be
    # specified in the settings.
    bucket = os.environ.get("S3_BUCKET_NAME", pipeline["settings"].get("S3_BUCKET_NAME"))

    # get dependencies path
    if "FUNCTION_DEPENDENCIES_KEY" in pipeline["settings"] or "FUNCTION_DEPENDENCIES_KEY" in os.environ:
        dependencies = os.environ.get(
            "FUNCTION_DEPENDENCIES_KEY", pipeline["settings"].get("FUNCTION_DEPENDENCIES_KEY")
        )
    else:
        dependencies = "podpac_deps_{}.zip".format(
            os.environ.get("PODPAC_VERSION", pipeline["settings"].get("PODPAC_VERSION"))
        ) 
        if 'None' in dependencies:
            dependencies = 'podpac_deps.zip'  # Development version of podpac
        # this should be equivalent to version.semver()

    # Check to see if this function is "hot", in which case the dependencies have already been downloaded and are
    # available for use right away.
    if os.path.exists("/tmp/scipy"):
        print (
            "Scipy has been detected in the /tmp/ directory. Assuming this function is hot, dependencies will"
            " not be downloaded."
        )
    else:
        # Download dependencies from specific bucket/object
        print ("Downloading and extracting dependencies from {} {}".format(bucket, dependencies))
        s3 = boto3.client("s3")
        s3.download_file(bucket, dependencies, "/tmp/" + dependencies)
        subprocess.call(["unzip", "/tmp/" + dependencies, "-d", "/tmp"])
        sys.path.append("/tmp/")
        subprocess.call(["rm", "/tmp/" + dependencies])
        # -----

    # Load PODPAC

    # Need to set matplotlib backend to 'Agg' before importing it elsewhere
    import matplotlib

    matplotlib.use("agg")
    from podpac import settings
    from podpac.core.node import Node
    from podpac.core.coordinates import Coordinates
    from podpac.core.utils import JSONEncoder, _get_query_params_from_url
    import podpac.datalib

    # update podpac settings with inputs from the trigger
    settings.update(json.loads(os.environ.get("SETTINGS", "{}")))
    settings.update(pipeline["settings"])
  
    # build the Node and Coordinates
    if trigger in ("eval", "S3"):
        node = Node.from_definition(pipeline["pipeline"])
        coords = Coordinates.from_json(json.dumps(pipeline["coordinates"], indent=4, cls=JSONEncoder))

    # TODO: handle API Gateway better - is this always going to be WCS?
    elif trigger == "APIGateway":
        node = Node.from_url(pipeline["url"])
        coords = Coordinates.from_url(pipeline["url"])

    # make sure pipeline is allowed to be run
    if "PODPAC_RESTRICT_PIPELINES" in os.environ:
        whitelist = json.loads(os.environ["PODPAC_RESTRICT_PIPELINES"])
        if node.hash not in whitelist:
            raise ValueError("Node hash is not in the whitelist for this function")

    # run analysis
    output = node.eval(coords)

    # convert to output format
    body = output.to_format(pipeline["output"]["format"], **pipeline["output"]["format_kwargs"])

    # Response
    if trigger == "eval":
        return body

    elif trigger == "S3":
        s3.put_object(Bucket=settings["S3_BUCKET_NAME"], Key=pipeline["output"]["filename"], Body=body)

    elif trigger == "APIGateway":

        # TODO: can we handle the deserialization better?
        try:
            json.dumps(body)
        except Exception as e:
            print ("Output body is not serializable, attempting to decode.")
            body = body.decode()

        return {
            "statusCode": 200,
            "headers": {"Content-Type": pipeline["output"]["format"]},
            "isBase64Encoded": pipeline["output"]["format_kwargs"]["return_base64"],
            "body": body,
        }