def main(): # Create a TD client instance. client = pytd.Client(apikey=TD_API_KEY, endpoint=TD_API_SERVER, database=TD_DATABASE) # Retrieves the details for each job. cnt = limit page = 1 while cnt == limit: cnt = 0 for delivery in cuenote.call_api("getDelivList", { "limit": str(limit), "page": str(page) }).iter("deliv_jobqueue"): keys = jobinfo.keys() for info_items in cuenote.call_api( "getDelivInfo", { "delivid": delivery.attrib["delivid"] }).iter("jobinfo"): for key in keys: jobinfo[key] += [ cuenote.format_value(key, info_items.attrib[key]) ] cnt += 1 page += 1 df_jobinfo = pandas.DataFrame(jobinfo.values(), index=jobinfo.keys()).T # Request CN to generate logs for each delivery. expids = {"expid": []} for i in range(len(jobinfo["delivid"])): if jobinfo["delivtime"][i] >= (int(time.time()) - (60 * 60 * 24 * 14)): for expid in cuenote.call_api("startExport", { "delivid": jobinfo["delivid"][i], "strcode": "utf8" }).iter("expid"): expids["expid"] += [int(expid.text)] df_expids = pandas.DataFrame(expids.values(), index=expids.keys()).T # Refresh Job Info table. client.load_table_from_dataframe(df_jobinfo, "jobinfo", writer="bulk_import", if_exists="overwrite") # Insert expids into the queue table. if expids["expid"]: client.load_table_from_dataframe(df_expids, "queue", writer="bulk_import", if_exists="overwrite")
def main(): # Create a TD client instance. client = pytd.Client(apikey=TD_API_KEY, endpoint=TD_API_SERVER, database=TD_DATABASE) # Retrieves the details for each job. cursor = 0 keys = jobinfo.keys() while cursor is not None: result = cuenote.call_api("delivery", { "limit": "50", "cursor": str(cursor) }) jobs = result.json() for job in jobs["list"]: for key in keys: if key in job: jobinfo[key] += [cuenote.format_value(key, job[key])] else: jobinfo[key] += "" cursor = jobs["next_cursor"] df_jobinfo = pandas.DataFrame(jobinfo.values(), index=jobinfo.keys()).T # Refresh Job Info table. client.load_table_from_dataframe(df_jobinfo, "jobinfo", writer="bulk_import", if_exists="overwrite")
def main(): # Create a TD client instance. client = pytd.Client(apikey=TD_API_KEY, endpoint=TD_API_SERVER, database=TD_DATABASE) # Download log files from Cuenote, then upload CSVs to TD expids = client.query("SELECT expid FROM queue") for expid in expids["data"]: for export in cuenote.call_api("getExportStatus", { "expid": str(expid[0]) }).iter("export"): for item in export: csv = cuenote.download_log(item.attrib["url"]) df = pandas.read_csv(io.StringIO(csv), header=0, encoding="UTF-8") df["delivid"] = item.attrib["delivid"] if item.tag == "log_clickcount": df.columns = [ "clicked_at", "clicked_url", "email_address", "click_count", "member_id", "delivid", ] df["clicked_at"] = pandas.to_datetime(df["clicked_at"]) elif item.tag == "log_deliv": df.columns = [ "email_address_id", "email_address", "status_updated_at", "status_loc", "status", "mx_host_name", "connection_ip_port", "smtp_status_updated_at", "smtp_status_loc", "smtp_status", "smtp_response", "bounce_received_at", "bounce_type", "bounce_summary", "bounce_content", "bounce_address", "bounce_log_id", "unreachable_at", "all_retries", "first_retry", "last_retry", "retry_count", "last_retry_status_loc", "last_retry_status", "last_retry_response", "member_id", "delivid", ] df["status_updated_at"] = pandas.to_datetime( df["status_updated_at"]) df["smtp_status_updated_at"] = pandas.to_datetime( df["smtp_status_updated_at"]) df["bounce_received_at"] = pandas.to_datetime( df["bounce_received_at"]) df["unreachable_at"] = pandas.to_datetime( df["unreachable_at"]) if len(df) > 0: client.load_table_from_dataframe(df, item.tag + "_stg", writer="bulk_import", if_exists="append") client.query("DELETE FROM queue WHERE expid = {0}".format( expid[0]))
def main(): # Create a TD client instance. client = pytd.Client(apikey=TD_API_KEY, endpoint=TD_API_SERVER, database=TD_DATABASE) # Download log files from Cuenote, then upload CSVs to TD delivery_ids = client.query( "select delivery_id from jobinfo where TD_INTERVAL(TD_TIME_PARSE(delivery_time), '-{days_refresh_logs}d')" .format(days_refresh_logs=days_refresh_logs)) for delivery_id in delivery_ids["data"]: # Delivery Log result = cuenote.call_api( "delivery/{delivery_id}/log".format(delivery_id=delivery_id[0]), {"with_delivlog": "true"}) df = pandas.read_csv(io.BytesIO(result.content), header=0, encoding="UTF-8") df["delivery_id"] = delivery_id[0] df.columns = [ "email_address_id", "email_address", "status_updated_at", "status_loc", "status", "mx_host_name", "connection_ip_port", "smtp_status_updated_at", "smtp_status_loc", "smtp_status", "smtp_response", "bounce_received_at", "bounce_type", "bounce_summary", "bounce_content", "bounce_address", "bounce_log_id", "unreachable_at", "all_retries", "first_retry", "last_retry", "retry_count", "last_retry_status_loc", "last_retry_status", "last_retry_response", "device", "content", "additional_information", "delivery_id" ] df["status_updated_at"] = pandas.to_datetime(df["status_updated_at"]) df["smtp_status_updated_at"] = pandas.to_datetime( df["smtp_status_updated_at"]) df["bounce_received_at"] = pandas.to_datetime(df["bounce_received_at"]) df["unreachable_at"] = pandas.to_datetime(df["unreachable_at"]) if len(df) > 0: client.load_table_from_dataframe(df, "log_deliv_stg", writer="bulk_import", if_exists="append") # Click Log result = cuenote.call_api( "delivery/{delivery_id}/log/click".format( delivery_id=delivery_id[0]), {"with_delivlog": "true"}) df = pandas.read_csv(io.BytesIO(result.content), header=0, encoding="UTF-8") df["delivery_id"] = delivery_id[0] df.columns = [ "clicked_at", "clicked_url", "email_address", "type", "click_count", "device", "content", "additional_information", "delivery_id" ] df["clicked_at"] = pandas.to_datetime(df["clicked_at"]) if len(df) > 0: client.load_table_from_dataframe(df, "log_clickcount_stg", writer="bulk_import", if_exists="append")