Esempio n. 1
0
def main(req: func.HttpRequest,
         doc: func.Out[func.Document]) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    name = req.route_params.get('name')

    if not name:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            name = req_body.get('name')

    if name:
        newdocs = func.DocumentList()
        newproduct_dict = {"id": str(uuid.uuid4()), "name": name}
        newdocs.append(func.Document.from_dict(newproduct_dict))
        doc.set(newdocs)

        return func.HttpResponse(
            f"Hello, {name}. This HTTP triggered function executed successfully."
        )
    else:
        return func.HttpResponse(
            "This HTTP triggered function executed successfully. Pass a name in the query string or in the request body for a personalized response.",
            status_code=200)
def test_main_handler_should_generate_events_if_hidden_attrib_is_found():
    out = OutImpl()
    documents = func.DocumentList()
    for i in range(10):
        documents.append(func.Document.from_dict(generate_sample_document()))
    for i in range(5):
        documents.append(func.Document.from_dict(generate_sample_document(False)))

    main_handler(documents, out)

    assert len(out.get()) == 10
Esempio n. 3
0
def main(mytimer: func.TimerRequest, azServicesOut: func.Out[func.Document],
         azCapabilitiesOut: func.Out[func.Document]) -> str:
    logging.basicConfig()
    logging.getLogger().setLevel(logging.DEBUG)

    utc_timestamp = datetime.datetime.utcnow().replace(
        tzinfo=datetime.timezone.utc).isoformat()
    timestamp = datetime.datetime.now()

    if mytimer.past_due:
        logging.warn('The timer is past due!')

    logging.info('Python timer trigger function ran at %s', timestamp)

    sl = AzGovProductAvailabilty()
    # sl.initialize()

    try:
        svcList = sl.getServicesList()

        svcDocs = func.DocumentList()

        for service in svcList:
            logging.info(service)
            svcDocs.append(func.Document.from_dict(service))

        azServicesOut.set(svcDocs)

        capList = sl.getCapabilitiesList()

        capDocs = func.DocumentList()

        for cap in capList:
            logging.info(cap)
            capDocs.append(func.Document.from_dict(cap))

        azCapabilitiesOut.set(capDocs)

    except Exception as e:
        logging.error('Error:' + e)
        return 0
Esempio n. 4
0
def main(myblob: func.InputStream, outputDoc: func.Out[func.DocumentList]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes")

    outdata = json.loads(myblob.read().decode('utf-8'))
    download_time = outdata["download_time"]
    gameweek = transformations.get_game_week(outdata["events"])
    transformations.add_gw_and_download_time(outdata["elements"],
                                             download_time, gameweek)
    transformations.add_unique_id(outdata["elements"])
    outputDoc.set(func.DocumentList(outdata["elements"]))
def main(documents: func.DocumentList) -> func.DocumentList:

    logging.info(f"CosmosDB trigger executed!")

    for doc in documents:
        logging.info(f"Document: {doc.to_json()}")

    returnDocs = []
    for x in range(0, 3):
        newDoc = func.Document.from_dict({"text": str(x), "foo": "bar"})
        returnDocs.append(newDoc)

    return func.DocumentList(returnDocs)
Esempio n. 6
0
def main(documentsin: func.DocumentList,
         documentsout: func.Out[func.Document]) -> str:
    # Set DocumentList to update second collection with
    returnDocs = func.DocumentList()

    # Loop around documents in incoming DocumentList
    for doc in documentsin:
        logging.info(f"Document: {doc.to_json()}")
        newDoc = func.Document.from_json(doc.to_json())
        logging.info("newDoc: %s", newDoc)
        returnDocs.append(newDoc)

    documentsout.set(returnDocs)
Esempio n. 7
0
def main(hourdocin: func.DocumentList,
         hourdocout: func.Out[func.Document]) -> str:
    # Set DocumentList to update second collection with
    returnDocs = func.DocumentList()

    # Loop around documents in incoming DocumentList, and update a value
    for doc in hourdocin:
        logging.info(f"Document: {doc.to_json()}")
        newDoc = func.Document.from_json(doc.to_json())
        newDoc["value"] = newDoc["value"] + 10
        logging.info("newDoc: %s", newDoc.to_json())
        returnDocs.append(newDoc)

    hourdocout.set(returnDocs)
Esempio n. 8
0
def main(inputblob: func.InputStream, doc: func.Out[func.Document],
         outputblob: func.Out[str]):
    request_json = json.load(inputblob)
    text = request_json["Text"]

    logging.info(f"Processing request from blob {inputblob.name}")

    response_doc = aze.extract_entities([text], inputblob.name)
    outputblob.set(json.dumps(response_doc))

    doclist = func.DocumentList()
    doclist.append(func.Document.from_dict(response_doc))
    doc.set(doclist)

    logging.info(f"extractEntities was executed successfully.")
Esempio n. 9
0
def main(docs: func.DocumentList, outdoc: func.Out[func.Document]) -> str:

    newdocs = func.DocumentList()
    for doc in docs:
        logging.info(doc.to_json())

        ## Process Something
        clear_text = doc["text"]
        encrypted_text = process_rot13(clear_text)

        ## Create a new doc (type:Dict)
        newdoc_dict = {"name": doc["name"], "text": encrypted_text}

        ## Append the new doc to DocumentList for output
        newdocs.append(func.Document.from_dict(newdoc_dict))

    ## Set the DocumentList to outdoc to store into CosmosDB using CosmosDB output binding
    outdoc.set(newdocs)
Esempio n. 10
0
def main(mytimer: func.TimerRequest,
         azCosmosOut: func.Out[func.Document]) -> str:
    logging.basicConfig()
    logging.getLogger().setLevel(logging.DEBUG)

    utc_timestamp = datetime.datetime.utcnow().replace(
        tzinfo=datetime.timezone.utc).isoformat()
    timestamp = datetime.datetime.now()

    if mytimer.past_due:
        logging.warn('The timer is past due!')

    logging.info('Python timer trigger function ran at %s', timestamp)

    sl = AuditScopeList()
    #sl.initialize()

    try:
        #outdata = {
        #    "id": "Audit Scopes",
        #    "clouds": [
        #        sl.getAzureGovernmentJson(),
        #        sl.getAzurePublicJson()
        #    ]
        #}

        #outdoc.set(func.Document.from_json(json.dumps(outdata)))

        # azPubOut.set(func.Document.from_json(json.dumps(sl.getAzurePublicJson())))
        # azGovOut.set(func.Document.from_json(json.dumps(sl.getAzureGovernmentJson())))

        cosmosArray = sl.getCosmosArray()

        newdocs = func.DocumentList()

        for service in cosmosArray:
            logging.info(service)
            newdocs.append(func.Document.from_dict(service))

        azCosmosOut.set(newdocs)

    except Exception as e:
        logging.error('Error:' + e)
        return 0
def main(req: func.HttpRequest,
         cosmosDB: func.Out[func.Document]) -> func.HttpResponse:
    logging.basicConfig()
    logging.getLogger().setLevel(logging.DEBUG)

    logging.info('Python HTTP trigger function processed a request.')

    name = req.params.get('name')
    if not name:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            name = req_body.get('name')

    # ***************************************************************

    sc = AuditScopeList()
    av = AzGovProductAvailabilty()
    dj = DataJoiner(sc, av)

    m = dj.getJoinedData()

    # logging.debug(json.dumps(m))

    l = func.DocumentList()

    for i in m.values():
        logging.info(i)
        l.append(func.Document.from_dict(i))

    cosmosDB.set(l)

    # ***************************************************************

    if name:
        return func.HttpResponse(
            f"Hello, {name}. This HTTP triggered function executed successfully."
        )
    else:
        return func.HttpResponse(
            "This HTTP triggered function executed successfully. Pass a name in the query string or in the request body for a personalized response.",
            status_code=200)
def main(mytimer: func.TimerRequest,
         outputDocument: func.Out[func.Document]) -> None:
    getNRTData = GetNRTDataFromDefinedSat()
    logging.info("Delete container")
    deleteResponse = getNRTData.removeContainer()
    logging.info(deleteResponse)
    getNRTData.getListofFiles()
    jsonFilePath = getNRTData.getDataFromArchives()
    if os.path.isfile(jsonFilePath):
        logging.info("Database update STARTED!")
        newDocs = func.DocumentList()
        modifyFile = ModifyFile()
        with open(jsonFilePath, "r") as f:
            datastore = json.load(f)
            for data in datastore:
                newdoc_dict = modifyFile.forestFireDataStructure(data)
                newDocs.append(func.Document.from_dict(newdoc_dict))
        outputDocument.set(newDocs)
        os.remove(jsonFilePath)
        logging.info("Database update DONE!")
Esempio n. 13
0
def main(reservationsBlob: func.InputStream, locationsBlob: func.InputStream,
         document: func.Out[func.DocumentList]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {reservationsBlob.name}\n"
                 f"Blob Size: {reservationsBlob.length} bytes")

    reservations = parse_csv_blob(reservationsBlob, reservation_schema)
    locations = parse_csv_blob(locationsBlob, location_schema)

    for r in reservations:
        # Denormalize each reservation by adding location information
        location = next((l for l in locations if l["id"] == r["location_id"]),
                        None)
        r["location_title"] = location["title"] if location else None
        r["location_wgs84_polygon"] = location[
            "wgs84_polygon"] if location else None

        # Cosmos DB requires string id's
        r["id"] = str(r["id"])

    # Store all reservations in CosmosDB
    document.set(func.DocumentList(reservations))
Esempio n. 14
0
def main(documents: func.DocumentList, events: func.Out[func.Document]):
    if documents:
        new_events = func.DocumentList()

        for doc in documents:
            logging.info(doc.to_json())

            event_context = doc.get("_last_event_ctx")
            if event_context is not None:
                new_events.append(
                    func.Document.from_dict({
                        "id":
                        str(uuid.uuid4()),
                        "date":
                        datetime.utcnow().isoformat(),
                        "user_id":
                        event_context.get("user_id"),
                        "action":
                        event_context.get("action"),
                        "description":
                        event_context.get("description"),
                        "item_id":
                        doc.get("id"),
                        "container_id":
                        event_context.get("container_id"),
                        "session_id":
                        event_context.get("session_id"),
                        "tenant_id":
                        event_context.get("tenant_id"),
                    }))
            else:
                logging.warning("- Not saved!")

        if len(new_events):
            events.set(new_events)
        else:
            logging.warning("No valid events were found!")
Esempio n. 15
0
def main(mytimer: func.TimerRequest,
         document: func.Out[func.Document]) -> None:
    utc_timestamp = (datetime.datetime.utcnow().replace(
        tzinfo=datetime.timezone.utc).isoformat())

    get_vars()

    # Read the secrets from the environment
    client_id = os.environ.get("SWISSCOM_CLIENT_ID")
    client_secret = os.environ.get("SWISSCOM_CLIENT_SECRET")

    # See https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#backend-application-flow.
    client = BackendApplicationClient(client_id=client_id)
    oauth = OAuth2Session(client=client)

    # Fetch an access token.
    oauth.fetch_token(token_url=TOKEN_URL,
                      client_id=client_id,
                      client_secret=client_secret)

    # Use the access token to query an endpoint.
    resp = oauth.get(
        "https://api.swisscom.com/layer/heatmaps/demo/grids/municipalities/261",
        headers={"scs-version": "2"},
    )

    if not resp.ok:
        logging.error("Failed to reach Swisscom API")
        return

    tiles = resp.json()["tiles"]

    logging.info("Loaded %d tiles from Swisscom", len(tiles))

    tile_density = {}

    tile_ids = (tile["tileId"] for tile in tiles)
    for chunk in partition(100, tile_ids):
        resp = oauth.get(
            "https://api.swisscom.com/layer/heatmaps/demo/heatmaps/dwell-density/daily/2020-03-28",  # TODO this should load data for the previous day instead
            params={"tiles": chunk},
            headers={"scs-version": "2"},
        )

        if not resp.ok:
            logging.error("Failed to reach Swisscom API: %s", resp.json())
            continue

        tile_density.update(
            (tile["tileId"], tile["score"]) for tile in resp.json()["tiles"])

    logging.info("Loaded densitiy for %d tiles from Swisscom",
                 len(tile_density))

    documents = func.DocumentList()
    for tile in tiles:
        tile_id = tile["tileId"]
        if tile_id not in tile_density:
            continue

        density = tile_density[tile_id]
        location = {
            "type": "Point",
            "coordinates": [tile["ll"]["x"], tile["ll"]["y"]]
        }

        documents.append(
            func.Document.from_dict({
                "id": str(tile_id),
                "tileId": tile_id,
                "density": density,
                "location": location
            }))

    document.set(documents)

    logging.info("Finished outputting data")
Esempio n. 16
0
def main(documents: func.DocumentList, outdoc: func.Out[func.Document]):
    if documents:
        logging.info('Total Documents: %s', str(len(documents)))

    # Grab Env Variables
    #   Local Development = local.settings.json
    #   In Production = application settings in Azure
    Cognitive_Endpoint = os.environ['Cognitive_Endpoint']
    Cognitive_Key = os.environ['Cognitive_Key']

    # SDK Auth Flow
    credentials = CognitiveServicesCredentials(Cognitive_Key)
    text_analytics = TextAnalyticsClient(endpoint=Cognitive_Endpoint,
                                         credentials=credentials)

    # Create Doc List to append each doc to
    eventlist = func.DocumentList()

    # Set Batch Variables
    batch_size = 100
    batch_job = []
    computed_batches = []

    # look through every issue, filter to issue creation and set variables
    #check NER to avoid recursive loop
    for documents_items in documents:
        if len(documents_items["NER"]) == 0:
            eventId = documents_items["id"]
            IssueTitle = documents_items["issue_title"]

            #Create dict to pass to cognitive service issue titles
            doc_phrase_dict = {
                "id": eventId,
                "language": "en",
                "text": IssueTitle
            }

            # Append to batch_job for batch call
            batch_job.append(doc_phrase_dict)

            # Call API every 100 docs or we reach the end of the document list
            if len(batch_job) >= batch_size or eventId == documents[-1]["id"]:

                # Pass Batch to SDK
                response = text_analytics.key_phrases(documents=batch_job)

                # Loop through each item and update the original document
                for response_items in response.documents:

                    # Apply NER findings back to each document
                    computed_batches.append(response_items)

    # Now we loop through the original list and join the NER cells back
    for documents_items in documents:
        if len(documents_items["NER"]) == 0:
            for x in computed_batches:
                if x.id == documents_items["id"]:

                    # Update NER and append to list
                    documents_items["NER"] = x.key_phrases
                    eventlist.append(func.Document.from_dict(documents_items))

                    # Delete item from computed batches
                    # HELP! Can't figure out!
                    # want to do something like: del computed_batches[x.id]

    ## Set the DocumentList to outdoc to store into CosmosDB using CosmosDB output binding
    logging.info("Item Count: %s" % (len(eventlist)))
    outdoc.set(eventlist)
Esempio n. 17
0
def main(inputblob: func.InputStream, outputdb: func.Out[func.Document]):
    logging.info(
        'Data uploaded to raw Blob Storage, function processing a new request.'
    )
    t.sleep(45)

    # Store output in variable Data as JSON
    with gzip.open(inputblob) as lines:
        data = [json.loads(i) for i in lines]

    # Create Doc List for batch upload
    NewEventList = func.DocumentList()

    # Empty variables
    NER = []
    count = 0

    #Log number of records
    records_count = str(len(data))
    logging.info(f'All Records: {records_count}')

    # Format records for CosmosDB Insertion
    for event in data:

        # filter to only issue-related events
        if "Issue" in event["type"]:

            # Filter to 'New Issues' only
            if event["payload"]["issue"]["comments"] == 0:

                #Count # of new records in loop
                count += 1

                # Reshape Data + Store Record
                new_eventrecord = {
                    "id": event["id"],
                    "issue_id": event["payload"]["issue"]["id"],
                    "issue_title": event["payload"]["issue"]["title"],
                    "issue_num": event["payload"]["issue"]["number"],
                    "repo_id": event["repo"]["id"],
                    "repo_name": event["repo"]["name"].rsplit('/', 1)[1],
                    "created_datetime": event["created_at"],
                    "lastupdated_datetime": event["created_at"],
                    "issue_url": event["payload"]["issue"]["html_url"],
                    "last_state": event["payload"]["action"],
                    "NER": NER,
                    "json": event
                }

                #Append Record to list of CosmosDB events for batch
                NewEventList.append(func.Document.from_dict(new_eventrecord))

            # If Issue != New, do nothing
            else:
                pass

        #if event type != Issue, do nothing
        else:
            pass

    t.sleep(5)
    outputdb.set(NewEventList)
    logging.info(
        f'Total Record Count | New Issue Creation Records: {str(count)}')