コード例 #1
0
def main(msg: func.QueueMessage) -> None:
    try:
        message = OATQueueMessage.parse_obj(msg.get_json())
        clp_id = message.clp_id
        detections = message.detections
        post_data = message.post_data

        token = utils.find_token_by_clp(clp_id, API_TOKENS)

        if not token:
            raise GeneralException(f'Token not found for clp: {clp_id}')

        # get workbench detail
        raw_logs = get_search_data(token, post_data)

        # transform data
        transfromed_logs = _transfrom_logs(clp_id, detections, raw_logs)

        # send to log analytics
        log_analytics = LogAnalytics(WORKSPACE_ID, WORKSPACE_KEY, OAT_LOG_TYPE)
        log_analytics.post_data(transfromed_logs)
        logging.info(f'Send oat data successfully. count: {len(transfromed_logs)}.')

    except HTTPError as e:
        logging.exception(f'Fail to get search data! Exception: {e}')
        raise
    except:
        logging.exception('Internal error.')
        raise
def main(msg: func.QueueMessage) -> None:
    body = msg.get_json()
    date = body['date']
    year, month, _ = date.split("T")[0].split("-")
    city = body['city']
    country = body['country']
    temperature = body['temperature']
    # Obtain HOST and MASTER_KEY from: https://portal.azure.com/#@[user_email]/resource/subscriptions/[subscription_id]/resourceGroups/[resource_group_name]/providers/Microsoft.DocumentDb/databaseAccounts/[db_account_name]/keys
    HOST = "//TODO"
    MASTER_KEY = "//TODO"
    # construct your own doc_link and document as needed. This is a sample.
    # Please see the readme for details regarding the code below.
    client = cosmos_client.CosmosClient(HOST, {'masterKey': MASTER_KEY})
    database_link = 'dbs/' + 'weather-data'
    collection_link = database_link + '/colls/' + 'weather-data'
    doc_id = year + "-" + month + "___" + city + "-" + country
    doc_link = collection_link + '/docs/' + doc_id
    try:
        document = {
            'id': doc_id,
            'city': city,
            'country': country,
            'Temperature_List': [str(temperature)],
            'Month': month,
            'Year': year
        }
        client.CreateItem(collection_link, document)
    except:
        response = client.ReadItem(doc_link)
        response["Temperature_List"].append(str(temperature))
        client.UpsertItem(collection_link, response)
    logging.info('Python queue trigger function processed a queue item:')
コード例 #3
0
def main(msgIn: func.QueueMessage):
    try:
        args = json.loads(msgIn.get_body())
    except:
        args = msgIn.get_json()

    os.environ['__PW_ACTIVATION_ID'] = str(msgIn.id)
    if 'remote_invoker' in args:
        logger.info("Pywren v{} - Starting invoker".format(__version__))
        function_invoker(args)
    else:
        logger.info("Pywren v{} - Starting execution".format(__version__))
        function_handler(args)

    return {"Execution": "Finished"}
コード例 #4
0
def main(rcaMsg: func.QueueMessage) -> None:
    try:
        payload = rcaMsg.get_json()
        clp_id = payload['clp_id']
        workbench_id = payload['workbench_id']
        task_id = payload['task_id']
        task_name = payload['task_name']
        target_guid = payload['target_guid']
        target_info = payload['target_info']

        token = utils.find_token_by_clp(clp_id, API_TOKENS)
        if not token:
            raise GeneralException(f'Token not found for clp: {clp_id}')

        rca_task_detail = get_rca_task_detail(token, task_id, target_guid)
        
        
        target_info = {
            'xdrCustomerID': clp_id,
            'taskId': task_id,
            'taskName': task_name,
            'agentEntity': target_info,
            'workbenchId': workbench_id
        }

        rca_task_result_log = transform_utils.transform_rca_result(target_info, rca_task_detail)

        if len(rca_task_result_log) > 0:
            log_type = configurations.get_rca_log_type()
            log_analytics = LogAnalytics(WORKSPACE_ID, WORKSPACE_KEY, log_type)
            log_analytics.post_data(
                rca_task_result_log
            )

        logging.info(
            f'Send rca data successfully. Task id: {task_id}, Task name: {task_name},Target guid: {target_guid}'
        )
    except HTTPError as e:
        logging.exception(
            f'Fail to get rca detail!  Exception: {e}',
        )
        raise
    except:
        logging.exception('Internal error.')
        raise
コード例 #5
0
def main(msgIn: func.QueueMessage, msgOut: func.Out[func.QueueMessage]):
    try:
        args = json.loads(msgIn.get_body())
    except Exception:
        args = msgIn.get_json()

    os.environ['__LITHOPS_ACTIVATION_ID'] = str(msgIn.id)
    setup_lithops_logger(args['log_level'])

    if 'get_preinstalls' in args:
        logger.info("Lithops v{} - Generating metadata".format(__version__))
        runtime_meta = get_runtime_preinstalls()
        msgOut.set(json.dumps(runtime_meta))
    elif 'remote_invoker' in args:
        logger.info("Lithops v{} - Starting invoker".format(__version__))
        function_invoker(args)
    else:
        logger.info("Lithops v{} - Starting execution".format(__version__))
        function_handler(args)
コード例 #6
0
def main_queue(msgIn: func.QueueMessage, msgOut: func.Out[func.QueueMessage]):
    try:
        payload = json.loads(msgIn.get_body())
    except Exception:
        payload = msgIn.get_json()

    setup_lithops_logger(payload['log_level'])

    os.environ['__LITHOPS_ACTIVATION_ID'] = str(msgIn.id)
    os.environ['__LITHOPS_BACKEND'] = 'Azure Functions (event)'

    if 'get_preinstalls' in payload:
        logger.info("Lithops v{} - Generating metadata".format(__version__))
        runtime_meta = get_runtime_preinstalls()
        msgOut.set(json.dumps(runtime_meta))
    elif 'remote_invoker' in payload:
        logger.info("Lithops v{} - Starting Azure Functions (event) invoker".format(__version__))
        function_invoker(payload)
    else:
        logger.info("Lithops v{} - Starting Azure Functions (event) execution".format(__version__))
        function_handler(payload)
コード例 #7
0
def main(msg: func.QueueMessage) -> None:
    jsonDict = msg.get_json()  # Parse QueueMessage to Python object.
    module_name = jsonDict['title'].split('_')[0]  # Get module name.
    # Build a Markdown formate link that link to the product page.
    caption = '[' + jsonDict['title'] + ']' + '(' +\
        'https://www.freitag.ch/en/' + module_name +\
        '?productID=' + jsonDict['RowKey'] + ')'
    # Build up data that will be sent out.
    data = {
        'chat_id':
        CHANNEL_ID,
        'media': [
            getInputMediaPhoto(src, caption)
            for src in jsonDict['product_cover_photo']
        ]
    }

    url = 'https://api.telegram.org/bot' + BOT_TOKEN + '/sendMediaGroup'
    # Post data as JSON to Telegram API.
    requests.post(url, json=data)

    logging.info('Python queue trigger function processed a queue item:')
コード例 #8
0
def main(req: func.QueueMessage) -> func.HttpResponse:
    try:
        logging.info("SendGrid email triggered.")

        logging.debug("Parsing message data from request body")
        body = req.get_json()
        from_email = Email(body["from"])
        to_email = To(body["to"])
        subject = Subject(body["subject"])
        template_id = TemplateId(body["template_id"])

        logging.debug("Getting template value substitutions")
        substitutions = []
        for substitution_key in body["substitutions"].keys():
            message_substitution = Substitution(
                key=substitution_key,
                value=body["substitutions"][substitution_key])
            substitutions.append(message_substitution)

        logging.info("Message contents parsed from request input.")
        sg = sendgrid.SendGridAPIClient(
            api_key=environ.get('SENDGRID_API_KEY'))
        logging.info("SendGrid client initialized")
        mail = Mail(from_email=from_email,
                    to_email=to_email,
                    subject=subject,
                    global_substitutions=substitutions)
        mail.template_id = template_id
        logging.info("Message initialized")
        response = sg.client.mail.send.post(request_body=mail.get())
        logging.info("Message sent!")

        return func.HttpResponse(body=json.dumps(response.body),
                                 status_code=response.status_code,
                                 mimetype="application/json")

    except Exception as email_exception:
        logging.error("Error sending email!")
        logging.error(email_exception)
コード例 #9
0
def main(msg: func.QueueMessage):
    message_json = msg.get_json()
    runner = DBTRunner()
    runner.go(**message_json)
コード例 #10
0
def main(msg: func.QueueMessage):
    result = json.dumps({
        'id':
        msg.id,
        'body':
        msg.get_json(),
        'expiration_time':
        (msg.expiration_time.isoformat() if msg.expiration_time else None),
        'insertion_time':
        (msg.insertion_time.isoformat() if msg.insertion_time else None),
        'time_next_visible':
        (msg.time_next_visible.isoformat() if msg.time_next_visible else None),
        'pop_receipt':
        msg.pop_receipt,
        'dequeue_count':
        msg.dequeue_count
    })

    # get the json fron the queue
    res = json.loads(result)

    # create empty list for bulk update
    payload_file = []

    glossaryGuid = os.environ.get('glossaryGuid')

    #calculate if it is a single term to be updated or a bulk update
    singleTerm = False
    if len(res['body']) == 1:
        singleTerm = True

    # the purviewcli is very verbose in the logs, therefore we need to suppress the output and use logging.warning
    # the createGlossaryItems function is configured to log only warnings in the 'hosts.json' file
    logging.warning("singleTerm: %s" % (singleTerm))

    # parse the json
    for objBody in res['body']:
        termValue = parse_json_recursively(objBody, "termName")
        # Terms name with uppper / lower case combinations can cause the import to fail - set the first character to lower case
        termValue = termValue[0].lower() + termValue[1:]

        if termValue != None:
            importTerm = True
            # A term name can't have a period (".") in the Azure purview catalog, so we need to remove it
            termValue = termValue.replace(".", " ")
            termName = "--termName=" + unicodedata.normalize('NFKD', termValue)

            #check if term already exists in catalog
            sys.argv = [
                "pv", "search", "query",
                "--keywords=\"" + re.escape(termValue) + "\""
            ]
            f = io.StringIO()
            with redirect_stdout(f):
                try:
                    pv.main()
                except:
                    logging.error("failed to search Azure Purview")
                    logging.warning("sys.argv = %s" % (sys.argv))
                    logging.error(sys.exc_info()[0])
                    logging.exception("The exception is:")

            # if the search value is 0 it doesn't exits. If it is greater than Zero we need to parse the JSON for an extact match
            if int(json.loads(f.getvalue())['@search.count']) > 0:
                for element in json.loads(f.getvalue())['value']:
                    qualifiedName = parse_json_recursively(element, 'name')
                    # try all combinations of escaping
                    if qualifiedName != None and (qualifiedName == termValue \
                    or qualifiedName == re.escape(termValue) \
                    or re.escape(qualifiedName) == re.escape(termValue) \
                    or re.escape(qualifiedName) == termValue):
                        importTerm = False
                        logging.warning(
                            "term %s already exists and will not be imported" %
                            (termValue))

            # The term will be imported if it doesn't already exist in the catalog
            if importTerm == True:
                # get the description
                definitionValue = parse_json_recursively(
                    objBody, "longDescription")
                if definitionValue != None:
                    definitionName = "--longDescription=" + unicodedata.normalize(
                        'NFKD', definitionValue)
                    if singleTerm == False:
                        #check termName is not already in the list
                        if termName.replace(
                                '"', "") not in json.dumps(payload_file):
                            logging.warning(
                                "Term Name %s will be bulk-imported" %
                                (termValue))
                            # append termName and defintionName to the payload file that will be bulk imported
                            anchor = {"glossaryGuid": glossaryGuid}
                            json_file = {}
                            json_file['anchor'] = anchor
                            json_file[
                                'longDescription'] = definitionName.replace(
                                    '"', "")
                            json_file['name'] = termName.replace('"', "")
                            payload_file.append(json_file)
                    elif singleTerm == True:
                        # import a single term straight away
                        sys.argv = [
                            "pv", "glossary", "createTerm", glossaryGuid,
                            termName.replace('"', ""), "--longDescription=" +
                            definitionValue.replace('"', "")
                        ]
                        logging.warning(
                            "Term Name %s will be imported as a single item" %
                            (termValue))
                        anchor = {"glossaryGuid": glossaryGuid}
                        json_file = {}
                        json_file['anchor'] = anchor
                        json_file['longDescription'] = definitionName.replace(
                            '"', "")
                        json_file['name'] = termName.replace('"', "")
                        singlePayload = []
                        singlePayload.append(json_file)

                        fileName = str(randrange(6000)) + 'payload.json'
                        tempFilePath = os.path.join(tempfile.gettempdir(),
                                                    fileName)
                        f = open(tempFilePath, "w+")
                        f.write(json.dumps(singlePayload))
                        f.flush()
                        f.close
                        sys.argv = [
                            "pv", "glossary", "createTerm",
                            "--payload-file=" + tempFilePath
                        ]
                        logging.warning(
                            "Term will be imported: The Arguments are: %s " %
                            (sys.argv))
                        pv.main()
                else:
                    logging.error("no description found for the term %s" %
                                  (termValue))

    logging.warning("Number of items parsed is: %s" % (len(res['body'])))
    # bulk update
    if (len(payload_file) > 0):
        fileName = str(randrange(6000)) + 'payload.json'
        tempFilePath = os.path.join(tempfile.gettempdir(), fileName)
        f = open(tempFilePath, "w+")
        f.write(json.dumps(payload_file))
        f.flush()
        f.close
        sys.argv = [
            "pv", "glossary", "createTerms", "--payload-file=" + tempFilePath
        ]
        logging.warning("Term will be imported: The Arguments are: %s " %
                        (sys.argv))
        pv.main()

        #update purview
        logging.warning(
            "Terms will be imported into the catalog. The arguments are:")
        logging.warning(sys.argv)
        try:
            f = io.StringIO()
            with redirect_stdout(f):
                pv.main()
            logging.warning("Results of the glossary update are:")
            logging.warning(f.getvalue())
            #add some logging for debugging purposes
            logging.warning("payload length is : %s" % (len(payload_file)))
        except AttributeError as error:
            # Output expected AttributeErrors.
            logging.error(
                "failed to update Azure Purview because of an attribute error")
            logging.exception(error)
            logging.error(sys.exc_info()[0])
        except:
            logging.error("failed to update Azure Purview")
            logging.error(sys.exc_info()[0])
            logging.error("Results of the glossary update are:")
            logging.error(f.getvalue())
            #add some logging for debugging purposes
            logging.warning("payload length is : %s" % (len(payload_file)))
            logging.exception("The exception is:")
コード例 #11
0
def main(wbMsg: func.QueueMessage, rcaMsg: func.Out[typing.List[str]]) -> None:
    try:
        payload = wbMsg.get_json()

        clp_id = payload['clp_id']
        workbench_record = payload['workbench_record']
        workbench_id = workbench_record['workbenchId']

        token = utils.find_token_by_clp(clp_id, API_TOKENS)

        if not token:
            raise GeneralException(f'Token not found for clp: {clp_id}')

        # get workbench detail
        workbench_detail = get_workbench_detail(token, workbench_id)

        # transform data
        customized_workbench_json = customize_json(
            clp_id, workbench_detail, workbench_record
        )

        # send to log analytics
        log_analytics = LogAnalytics(WORKSPACE_ID, WORKSPACE_KEY, WB_LOG_TYPE)
        log_analytics.post_data(customized_workbench_json)
        logging.info(f'Send workbench data successfully. Workbench id: {workbench_id}.')

        rca_tasks = []
        rac_task_log = []

    
        # get rca task
        rca_raw_tasks = get_rca_task(token, workbench_id,)

        for task in rca_raw_tasks:
            task_status = task['status']
            if task_status != 'PROCESS_COMPLETE':
                logging.warning(
                    f'Get rca task with status: {task_status}, Workbench id: {workbench_id}. No need to get rca detail.'
                )
                continue
            
            # process prca task info
            rac_task_log.append(transform_utils.transform_rca_task(clp_id, workbench_id  ,task))

            for target in task['targets']:
                target_status = target['targetStatus']

                if target_status != 'PROCESS_COMPLETE':
                    logging.warning(
                        f'Get rca target with status: {target_status}, Workbench id: {workbench_id}. No need to get rca detail.'
                    )
                    continue
                target_info = target.copy()
                target_info.pop('targetStatus')

                rca_tasks.append(
                    build_queue_message(
                        clp_id, workbench_id, task['id'], task['name'], target['guid'], target_info
                    )
                )

        if len(rac_task_log) > 0:
            log_analytics = LogAnalytics(WORKSPACE_ID, WORKSPACE_KEY, RCA_TASK_LOG_TYPE)
            log_analytics.post_data(rac_task_log)
            logging.info(f'Send prca task data successfully. Workbench id: {workbench_id}, Count: {len(rac_task_log)}.')

        if rca_tasks:
            rcaMsg.set(rca_tasks)

    except HTTPError as e:
        logging.exception(
            f'Fail to get workbench detail! Exception: {e}',
        )
        raise
    except:
        logging.exception('Internal error.')
        raise