Exemplo n.º 1
0
async def runReport(report):

    logging.info("Running report {}...".format(report))
    filename = "/tmp/{}.json".format(payload[report]['saveTo'])

    client = bluefolderAPI()
    client.runAPI(payload[report])
    await asyncio.sleep(1)
    client.saveToCsv(filename)

    if client.response != None:
        db = lib_bigquery.bigqueryWrapper()
        db.settings['table'] = payload[report]['table']
        #db.AddTable()
        db.deleteLoad(
            payload[report]['id_column'],
            [x[payload[report]['id_column']] for x in client.response]
            if payload[report]['id_column_is_string'] == False else [
                "'" + x[payload[report]['id_column']] + "'"
                for x in client.response
            ])
        db.load_json_from_file(filename)
        await asyncio.sleep(1)
    else:
        logger.info("Skipping load for {}".format('serviceRequests'))

    logging.info("Running report {}... Done".format(report))
    async def runReport(self, reportName, outputFile):
        def runAPI():
            print(reportName)
            x = 0
            finalData = None
            while True:
                if reportName == "deals":
                    data = self.deals.list_all_deals(
                        **{
                            'offset': str(x),
                            'limit': '100',
                            'created_after': self.timestamp
                        })
                elif reportName == "contacts":
                    data = self.contacts.list_all_contacts(
                        **{
                            'offset': str(x),
                            'limit': '100',
                            'created_after': self.timestamp
                        })
                elif reportName == 'automations':
                    data = self.automations.list_all_automations(
                        **{
                            'offset': str(x),
                            'limit': '100',
                            'created_after': self.timestamp
                        })
                elif reportName == 'contactAutomations':
                    data = self.contacts.list_all_automations_a_contact_is_in()
                    return data
                elif reportName == 'dealStages':
                    data = self.deals.list_all_stages(
                        **{
                            'offset': str(x),
                            'limit': '100',
                            'created_after': self.timestamp
                        })
                else:
                    raise ValueError("Not recognized endpoint report type")

                if len(data[reportName]) > 0:

                    if finalData == None:
                        finalData = dict(data)
                        x += 100
                    else:
                        finalData[reportName].extend(data[reportName])
                        x += 100
                else:
                    print("Returning")
                    return finalData

        data = runAPI()

        await asyncio.sleep(1)

        def check(data):
            if data != None:
                if type(data) == dict:
                    if len(data[reportName]) > 0:
                        return True
            return False

        def deleteOldfile():
            try:
                os.remove(outputFile)
            except:
                logger.info("Old file not found not removed")

        def parseDic(rowDic):
            timestampFields = [
                k for k, v in lib_bigquery.settingsJson['schema']
                [reportName].items() if v[0] == "TIMESTAMP"
            ]
            for k, v in rowDic.items():
                if type(v) in [list, dict]:
                    rowDic[k] = json.dumps(v)

                elif k in timestampFields:
                    if rowDic[k] != None:
                        if "0000-00-00 00:00:00" == str(rowDic[k])[:19]:
                            rowDic[k] = None
                        else:
                            rowDic[k] = str(rowDic[k])[:19]
            return rowDic

            if type(value) in [list, dict]:
                return json.dumps(value)
            else:
                return value

        # Write to a file
        if check(data) == True:

            deleteOldfile()
            json.dump(data, open("debug" + reportName + ".json", 'w'))
            with open(outputFile, 'a') as writeFile:
                for dic in data[reportName]:
                    writeFile.write(json.dumps(parseDic(dic)) + "\n")
            db = lib_bigquery.bigqueryWrapper()
            db.settings['table'] = reportName
            db.deleteLoad("id", [x['id'] for x in data[reportName]])
            #db.dropTable()
            #db.AddTable()
            db.load_json_from_file(outputFile)

        else:
            logging.info(
                "Report {} did not return any data".format(reportName))
 def __init__(self, URL: str, API_KEY: str):
     self.db = lib_bigquery.bigqueryWrapper()
     self.timestamp = datetime.datetime.now() - datetime.timedelta(days=30 *
                                                                   6)
     #'created_after': self.timestamp.strftime("%Y-%m-%dT%H:%M:%S")
     super(activeCampaign, self).__init__(URL, API_KEY)
Exemplo n.º 4
0
def postLog(payload: dict):
    db = lib_bigquery.bigqueryWrapper()
    db.settings['table'] = "Logs"
    #db.AddTable()
    db.loadRows([payload])
Exemplo n.º 5
0
async def runReport(report, parameters):

    #Run API
    HanlderInstance = Hanlder(parameters['api_url'])

    if "/{}/" in report or "parent_node" in parameters.keys():
        assert "/{}/" in report, "Report name does not contain /{}/ parent node placeholder"
        assert "parent_node" in parameters.keys(
        ), "'parent_node' key was not found within forecast.py keys"
        filename = "/tmp/" + report.strip().replace("/{}/", "_") + ".json"

        while True:
            if parameters['parent_node'] in globalCache.keys():
                break
            else:
                await asyncio.sleep(5)

        #run api for sub-items
        HanlderInstance.runAPI(parameters['api_call'], report, parameters,
                               globalCache[parameters['parent_node']])
        await asyncio.sleep(1)

    else:
        filename = "/tmp/" + report + ".json"
        #run api for regular parent level items
        HanlderInstance.runAPI(parameters['api_call'], report, parameters)
        await asyncio.sleep(1)

    #Database
    db = lib_bigquery.bigqueryWrapper()
    if "{}" in report:
        db.settings['table'] = report.strip().replace("/{}/", "_")
    else:
        db.settings['table'] = report

    #Save handler data and filter out only fields defined within schema
    HanlderInstance.saveData(
        filename,
        [k for k in db.settings['schema'][db.settings['table']].keys()])

    #db.dropTable()
    db.AddTable()
    globalCache[report] = [
        x[parameters["id_field"]] for x in HanlderInstance.data
    ]
    db.deleteLoad(
        parameters["id_field"],
        [str(x[parameters["id_field"]]) for x in HanlderInstance.data])

    #minId = min([x[parameters["id_field"]] for x in HanlderInstance.data])
    #db.runQuery( """
    #            DELETE FROM `"""+db.settings['dataset'] + """.""" + db.settings['table']+"""`
    #            WHERE {} >= {}""".format(parameters["id_field"], str(minId)))

    #await asyncio.sleep(1)

    del HanlderInstance.data
    db.load_json_from_file(filename)

    if "post_processing_query" in parameters.keys():
        db.runQuery(parameters["post_processing_query"].format(
            db.settings['projectName'], db.settings['dataset']))