コード例 #1
0
 def setUpClass(cls):
     cls.logger = Logger()
コード例 #2
0
 def setUpClass(cls):
     cls.logger = Logger(logname=cls.logname_prefix)
コード例 #3
0
    def run_script(
        self,
        script,
        from_date=None,
        to_date=None,
        batch_id=None,
        params=None,
        batchy_job=None,
        sql_command=None,
    ):
        """
        method for expanding and running sql statements
        :param script:
        :param from_date:
        :param to_date:
        :param batch_id:
        :param params:
        :return:
        """

        paramset = {}

        # set up logger
        if script is not None:
            script_path, script_filename = os.path.split(script)
            logger = Logger(logname=script_filename)
        else:
            logger = Logger(logname="direct_sql")
            sys.excepthook = logger.handle_exception

        # first we retrive params  we will load these into dict first,
        # any additional params specified will override
        if batchy_job:
            wf = batchy_job.split(".")[0]
            try:
                job = (batchy_job.split(".")[1]
                       if len(batchy_job.split(".")[1]) > 0 else "global")
            except Exception as e:
                logger.l(e)
                job = "global"
            batchy_params = Batch(
                wf,
                server=self.batchy_server,
                port=self.batchy_port,
            ).get_status()
            paramset.update(batchy_params[job])

        # next we apply custom params and special metadata fields,
        # again this will overrite batchy params if specified
        # convert string params to dict
        try:
            params = dict(
                (k.strip(), v.strip())
                for k, v in (item.split("-") for item in params.split(",")))
        except Exception as e:
            logger.l(f"issue parsing params: {e}")

        if isinstance(params, dict):
            paramset.update(params)

        if from_date:
            paramset["from_date"] = from_date
        if to_date:
            paramset["to_date"] = to_date
        if batch_id:
            paramset["batch_id"] = batch_id

        # now defaults for special metadata fields
        if paramset.get("from_date") is None:
            paramset["from_date"] = DEFAULT_FROM_DATE
        if paramset.get("to_date") is None:
            paramset["to_date"] = "9999-12-31"
        if paramset.get("batch_id") is None:
            paramset["batch_id"] = "-1"
        # we'll keep batch_no for backwards compatibility
        paramset["batch_no"] = paramset["batch_id"]

        # check if it's direct SQL or SQL file
        if not self.is_test and script is not None:
            try:
                raw_sql = open(script).read()
            except Exception as e:
                e = f"File not found, please check file path."
                logger.l(e)
                raise RuntimeError(e)
        else:
            raw_sql = sql_command
        sql = self.expand_params(raw_sql, paramset)
        sql_message = ("\n\n--sql script start:\n" + sql +
                       "\n--sql script end\n\n")
        logger.l(sql_message)

        self.ms.batch_open()

        logger.l("starting script")
        try:
            self.ms.exec_sql(sql)
            self.ms.batch_commit()
            logger.l("batch commit")
        except Exception as e:
            logger.l("execution failed with error: " + str(e))
            raise RuntimeError(e)
コード例 #4
0
import json
import requests
import argparse
import schedule
import time
import threading
import redis
import sys
from datacoco_core.logger import Logger
from robopager.config_wrapper import ConfigWrapper
from robopager.check_type.daily_email_check import CheckEmails
from robopager.check_type.intraday_latency_check import CheckWF

# -------------------------------------------------------------

log = Logger()
KEY_PREFIX = "rp"

# -------------------------------------------------------------

# common functions


def parse_checklist(check_file):
    """
    reads yaml file into python object
    :param check_file:
    :return: checklist
    """
    with open(check_file, "r") as checklist_yaml:
        check_list = yaml.load(checklist_yaml)
コード例 #5
0
# Email check function

import imaplib
import email
import sys
from datetime import datetime, date
import pytz
from datacoco_core.logger import Logger

log = Logger()


def convert_local_to_utc(t, timezone):
    # convert local time to utc (both are naive)
    local = pytz.timezone(timezone)
    local_dt = local.localize(t)
    utc_dt = local_dt.astimezone(pytz.utc)
    naive_utc = utc_dt.replace(tzinfo=None)
    return naive_utc


class CheckEmails:
    """
    scans and filters gmail inbox
    """
    def __init__(
        self,
        username,
        password,
        subjects,
        senders,
コード例 #6
0
from datetime import datetime

from salesforce_bulk import SalesforceBulk
from salesforce_bulk.salesforce_bulk import BulkApiError
from simple_salesforce import Salesforce, SalesforceError
from salesforce_bulk.util import IteratorBytesIO

from datacoco_cloud.s3_interaction import S3Interaction
from datacoco_core.config import config
from datacoco_redis_tools.redis_tools import RedisInteraction
from datacoco_core.logger import Logger

# quick port from old coco for upload to fix engage sub in squidward
SALESFORCE_UPLOAD_URL = "https://{}/services/data/v29.0/sobjects/{}/"

LOG = Logger()


def parse_sf_records(nested_dict):
    """Recursively parse the nested dictionaries returned by Salesforce Simple API library,

    :param nested_dict: Nested dictionary object
    :return: Flattened dictionary representing record
    """
    for k, v in list(nested_dict.items()):
        if k == "attributes":
            nested_dict.pop(k)
        elif isinstance(v, dict):
            clean_dict = parse_sf_records(v)

            for child_key, clean_value in list(clean_dict.items()):
コード例 #7
0
    def run_script(
        self,
        script,
        from_date=None,
        to_date=None,
        batch_id=None,
        params=None,
        batchy_job=None,
    ):
        """
        Method for expanding and running sql statements
        :param script:
        :param from_date:
        :param to_date:
        :param batch_id:
        :param params:
        :return:
        """

        paramset = {}

        # set up logger
        script_path, script_filename = os.path.split(script)
        logger = Logger(logname=script_filename)
        sys.excepthook = logger.handle_exception

        # Check if file exists
        file = Path(f"{script_path}/{script_filename}").is_file()
        if not file:
            e = "File not found, please check path"
            logger.l(e)
            raise RuntimeError(e)

        # first we retrive params  we will load these into dict first,
        # any additional params specified will override
        if batchy_job:
            try:
                batchy_params = Batch(
                    batchy_job,
                    server=self.batchy_server,
                    port=self.batchy_port,
                ).get_status()
            except Exception as e:
                logger.l(str(e))
                raise Exception(str(e))

            if batchy_params is None:
                error = "Unable to connect to batchy"
                logger.l(error)
                raise Exception(error)

            if 'error' in batchy_params:
                error = f"Batchy encountered an error: {batchy_params['message']}"
                logger.l(error)
                raise Exception(error)

            paramset.update(batchy_params['global'])

        # next we apply custom params and special metadata fields,
        # again this will overrite batchy params if specified
        # convert string params to dict
        try:
            params = dict(
                (k.strip(), v.strip())
                for k, v in (item.split("-") for item in params.split(",")))
        except Exception as e:
            logger.l(f"issue parsing params: {e}")

        if isinstance(params, dict):
            paramset.update(params)

        if from_date:
            paramset["from_date"] = from_date
        if to_date:
            paramset["to_date"] = to_date
        if batch_id:
            paramset["batch_id"] = batch_id

        print(paramset)

        # now defaults for special metadata fields
        if paramset.get("from_date") is None:
            paramset["from_date"] = DEFAULT_FROM_DATE
        if paramset.get("to_date") is None:
            paramset["to_date"] = "9999-12-31"
        if paramset.get("batch_id") is None:
            paramset["batch_id"] = "-1"
        # we'll keep batch_no for backwards compatibility
        paramset["batch_no"] = paramset["batch_id"]

        raw_sql = open(script).read()
        sql = self.expand_params(raw_sql, paramset)
        sql_message = ("\n\n--sql script start:\n" + sql +
                       "\n--sql script end\n\n")
        logger.l(sql_message)
        logger.l("opening batch")
        self.pg.batch_open()
        logger.l("opened batch")
        logger.l("starting script")
        try:
            self.pg.exec_sql(sql)
            self.pg.batch_commit()
            logger.l("batch commit")
            self.pg.batch_close()
            logger.l("batch closed")
        except Exception as e:
            logger.l("execution failed with error: " + str(e))
            self.pg.batch_close()
            raise RuntimeError(e)