def executeQueries(master, database, username, password, queryList, hostsFile, explain,adminUser, adminPassword, scale, emailAddress=""): loggerInfo = buildReportLogger("queries") reportName = loggerInfo[0] report = loggerInfo[1] header = [] explainString="" if explain: explainString = "explain analyze " startString = "Query Execution Phase" uniInfoLog(startString, report) header = "Executing HAWQ Queries for a "+str(scale)+" Data Set" uniInfoLog(header, report) hawqURI = queries.uri(master, port=5432, dbname=database, user=username, password=password) queryLocations = [] if int(int(queryList[0])) <> 0: # Loop for queryNum in queryList: uniInfoLog("Running Query " + queryNum, report) if int(queryNum) < 10: queryNum = "0" + queryNum queryLocations.append('./hawq-ddl/queries/"+scale+"/query_' + str(queryNum) + '.sql') else: uniInfoLog("Running all Queries", report) queryLocations = sorted(glob.glob("./hawq-ddl/queries/" + scale + "/*.sql")) with queries.Session(hawqURI) as session: for query in queryLocations: uniInfoLog(clearBuffers(hostsFile, adminUser, adminPassword), report) ddlFile = open(query, "r") queryName = ((query.split("/")[4]).split(".")[0]).rstrip() queryString = ddlFile.read() queryString = explainString+queryString startTime = time.time() result = session.query(queryString) if explain: queryPlan="" uniInfoLog(result.query,report) for row in result: queryPlan = queryPlan+str(row['QUERY PLAN']+"\n") uniInfoLog(queryPlan,report) stopTime = time.time() queryTime = stopTime - startTime results = "Query Complete: %s Execution Time(s): %0.2f Rows Returned: %s" % ( queryName, queryTime, str(result.count())) uniInfoLog(results, report) if emailAddress: Email.sendEmail(emailAddress, results[:25], results) if (emailAddress): messageLines = [] with open(reportName, "r") as reportMsg: for line in reportMsg.readlines(): messageLines.append(line) message = " ".join(messageLines) Email.sendEmail(emailAddress, "Query Final Report: " + (reportName.split('/')[2])[:-4], message)
def analyzeHawqTables(master, database, username, password, emailAddress=""): loggerInfo = buildReportLogger("analyze") reportName = loggerInfo[0] report = loggerInfo[1] header = [] startString = "Analyze Database Tables to Generate Statistics" uniInfoLog(startString, report) header = "Analyzing HAWQ Tables" uniInfoLog(header, report) analyzeCMD = "analyzedb -d " + database + " -a -p 10" ssh.exec_command2(master, username, password, analyzeCMD) # hawqURI = queries.uri(master, port=5432, dbname=database, user=username, password=password) # with queries.Session(hawqURI) as session: # uniInfoLog("Analyze Dimension Tables", report) # for table in dimensionTables: # ddlString = "Analyze " + table # startTime = datetime.datetime.now() # uniInfoLog("Start " + ddlString + ": " + str(startTime), report) # result = session.query(ddlString) # stopTime = datetime.datetime.now() # resultString = "Completed " + ddlString + ": " + str(stopTime) + " Elapsed Time: " + str( # stopTime - startTime) # uniInfoLog(resultString, report) # if emailAddress: # Email.sendEmail(emailAddress, ddlString + " Complete", resultString) # uniInfoLog("Analyze Fact Tables", report) # # for table in factTables: # ddlString = "analyze " + table # startTime = datetime.datetime.now() # uniInfoLog("Start " + ddlString + ": " + str(startTime), report) # result = session.query(ddlString) # stopTime = datetime.datetime.now() # resultString = "Completed " + ddlString + ": " + str(stopTime) + " Elapsed Time: " + str( # stopTime - startTime) # uniInfoLog(resultString, report) # if emailAddress: # Email.sendEmail(emailAddress, ddlString + " Complete", resultString) # # if (emailAddress): # messageLines = [] # with open(reportName, "r") as reportMsg: # for line in reportMsg.readlines(): # messageLines.append(line) # message = " ".join(messageLines) # Email.sendEmail(emailAddress, "Table Analyze Final Report: " + (reportName.split('/')[2])[:-4], message) if (emailAddress): Email.sendEmail(emailAddress, "Table Analyze Final Report: " + (reportName.split('/')[2])[:-4], "AnalyzeDB Complete")
def invoice_send_email(entity, receiver, attachment): # Send email scrapymail = Email() if (attachment is None) or attachment.empty: subject = '[PAM Tax Checking] - {0} 发票无异常 {1}'.format( TIME['TODAY'], entity) content = 'Hi All,\r\n\r\n{}的发票无异常记录。\r\n\r\nThanks.'.format(entity) scrapymail.send(subject=subject, content=content, receivers=receiver, attachment=None) else: subject = '[PAM Tax Checking] - {0} 发票异常清单 {1}'.format( TIME['TODAY'], entity) content = 'Hi All,\r\n\r\n请查看附件关于{}的发票异常记录。\r\n\r\nThanks.'.format( entity) entity_path = df_to_excel(df=attachment, path=PATH['FILE_DIR'], file_name=PATH['ATTACHMENT_FILE'].format( TIME['TODAY'], entity), sheet_name=entity) scrapymail.send(subject=subject, content=content, receivers=receiver, attachment=entity_path) logger.info('Delete attachment file.') os.remove(entity_path) scrapymail.close()
def loadHawqTables(master, username, password, database, emailAddress): loggerInfo = buildReportLogger("load") reportName = loggerInfo[0] report = loggerInfo[1] uniInfoLog("Load HAWQ Internal Tables", report) hawqURI = queries.uri(master, port=5432, dbname=database, user=username, password=password) loadList = sorted(glob.glob('./hawq-ddl/load/*.sql')) for load in loadList: ddlFile = open(load, "r") tableName = ((load.split("/")[3]).split(".")[0])[:-5] loadDDL = ddlFile.read() startTime = datetime.datetime.now() pxfName = tableName if tableName in factTables: tableName = tableName + "_nopart" uniInfoLog("Starting Load of " + tableName, report) with queries.Session(hawqURI) as session: result = session.query(loadDDL) stopTime = datetime.datetime.now() uniInfoLog("Completed Load of " + tableName, report) uniInfoLog("Load Time: " + str(stopTime - startTime), report) rowsPXF = rowCount(master, database, username, password, pxfName + "_pxf") rows = rowCount(master, database, username, password, tableName) uniInfoLog("Expected Rows: " + str(rowsPXF), report) uniInfoLog("Actual Rows : " + str(rows), report) if emailAddress: Email.sendEmail(emailAddress, "Completed Load of " + tableName, "Loaded " + str(rows) + " Rows") if (emailAddress): messageLines = [] with open(reportName, "r") as reportMsg: for line in reportMsg.readlines(): messageLines.append(line) message = " ".join(messageLines) Email.sendEmail(emailAddress, "Table Load Final Report: " + (reportName.split('/')[2])[:-4], message)
def get(cls): email = Email.get_instance() r = requests.get(cls.ORIGIN) soup = BeautifulSoup(r.text, "html.parser") for post in soup.find_all("article"): header = post.find("header", attrs={"class": "post-header"}) a = header.find("a") subject = f'[RisingStack] {a.get_text()}' link = f'{cls.ORIGIN}{a.get("href")}' section = post.find("section", attrs={"class": "post-content"}) content = f''' <html> <body> <p>{section.get_text()}</p> <a href="{link}">{link}</a> </body> </html> ''' email.send(subject, content) time.sleep(10)
def run(self): """ Main function """ mailer = Email.Mailer(NotificationDaemon.MAIL_HOST, NotificationDaemon.MAIL_PORT, NotificationDaemon.MAIL_POSTMASTER) # get history and notifyTypes history = self.ovtDB.getHistory() notifyTypeCache = self.ovtDB.getNotifyTypes( ) # to have id->name mapping historyComplete = [] for historyEntry in history: self.handleHistoryEntry(historyEntry, notifyTypeCache, mailer) historyComplete.append(historyEntry['historyid']) # cleanup self.ovtDB.setHistorySent(historyComplete) del mailer
def analyzeHawqTables(master, database, username, password, emailAddress=""): loggerInfo = buildReportLogger("analyze") reportName = loggerInfo[0] report = loggerInfo[1] header = [] startString = "Analyze Database Tables to Generate Statistics" uniInfoLog(startString, report) header = "Analyzing HAWQ Tables" uniInfoLog(header, report) hawqURI = queries.uri(master, port=5432, dbname=database, user=username, password=password) with queries.Session(hawqURI) as session: uniInfoLog("Analyze Dimension Tables", report) for table in dimensionTables: ddlString = "Analyze " + table startTime = datetime.datetime.now() uniInfoLog("Start " + ddlString + ": " + str(startTime), report) result = session.query(ddlString) stopTime = datetime.datetime.now() resultString = "Completed " + ddlString + ": " + str(stopTime) + " Elapsed Time: " + str( stopTime - startTime) uniInfoLog(resultString, report) if emailAddress: Email.sendEmail(emailAddress, ddlString + " Complete", resultString) uniInfoLog("Analyze Fact Tables", report) for table in factTables: ddlString = "analyze " + table startTime = datetime.datetime.now() uniInfoLog("Start " + ddlString + ": " + str(startTime), report) result = session.query(ddlString) stopTime = datetime.datetime.now() resultString = "Completed " + ddlString + ": " + str(stopTime) + " Elapsed Time: " + str( stopTime - startTime) uniInfoLog(resultString, report) if emailAddress: Email.sendEmail(emailAddress, ddlString + " Complete", resultString) if (emailAddress): messageLines = [] with open(reportName, "r") as reportMsg: for line in reportMsg.readlines(): messageLines.append(line) message = " ".join(messageLines) Email.sendEmail(emailAddress, "Table Analyze Final Report: " + (reportName.split('/')[2])[:-4], message)
def main(argv): """ Get tag manager account ID, Site Name and Google Analytics Tracking ID from command line. Account ID as a string input to create a Tag for. Site name for creating container to get javascript code snippet. Google Analytics tracking id, where you want get all type of tracking """ sys.tracebacklimit = settings.DUBUG args_help = """ Site Name, Site URL and Google Analytics Tracking ID from command line. Site name and Site URL for creating container to get javascript code snippet. Google Analytics tracking id, where you want get all type of tracking """ parser = argparse.ArgumentParser(description=args_help) parser.add_argument('--site_name', type=str, help='Your site name', required=True) parser.add_argument('--site_url', type=str, help='Your site URL', required=True) args = parser.parse_args() container_name = str(args.site_name) container_site = str(args.site_url) if not validators.url(container_site): raise Exception('invalid site URL') secret_key_file = settings.GOOGLE_DEVELOPER_SECRET_KEY if not os.path.isfile(secret_key_file): raise Exception( 'Missing required secret key JSON file. Please check file name in settings.py or create one ' 'here https://console.developers.google.com/permissions/serviceaccounts' ) # Define the auth scopes to request. analytics_scope = ['https://www.googleapis.com/auth/analytics.edit'] # Authenticate and construct service. analytics_service = GetAnalyticsService('analytics', 'v3', analytics_scope, secret_key_file) tracking_id = GetOrCreateTrackingId(analytics_service, container_name, container_site) # Define the auth scopes to request. tag_manager_scope = [ 'https://www.googleapis.com/auth/tagmanager.edit.containers', 'https://www.googleapis.com/auth/tagmanager.edit.containerversions', 'https://www.googleapis.com/auth/tagmanager.publish' ] # Authenticate and construct service. tag_manager_service = GetService('tagmanager', 'v1', tag_manager_scope, secret_key_file) account_id = GetAccountID(tag_manager_service) # get container id to create tag container_id = CreateOrGetContainer(tag_manager_service, account_id, container_name, container_site) # Create the hello world tag for tracking id CreateOrGetTag(tag_manager_service, account_id, container_id, tracking_id) container_version_id = CreateContainerVersion(tag_manager_service, account_id, container_id) PublishContainerVersion(tag_manager_service, account_id, container_id, container_version_id) container_public_id = CreateOrGetContainer(tag_manager_service, account_id, container_name, container_site, 'public_id') print('Preparing javascript code snippet...') with open(os.path.join('code_snippet', 'gtm_backup.txt'), 'r') as gtm: gtm_snippet = gtm.read() gtm.close() with open(os.path.join('code_snippet', 'gtm.txt'), 'w') as gtm: gtm_snippet = re.sub(r'XXXXXXXX', container_public_id, gtm_snippet) gtm.write(gtm_snippet) gtm.close() print(gtm_snippet) if settings.SEND_CODE_SNIPPET_EMAIL: Email.send()
def post(self): Email.send_feedback(self.request) self.redirect("/")
data['nodes'][d] = [x for x in cnodes if int(x['days_to_expire']) <= d] cnodes = [x for x in cnodes if int(x['days_to_expire']) > d] log.debug("=== data ===") log.debug(data) log.debug("=== template ===") log.debug(tpl.render(data)) log.debug("=== end ===") if CFG['email_users']: to = c if '@' not in c: to += '@you.com' email = Email('[TSM] Expiration warning for contact %s' % c, tpl, [to], bcc=CFG['bcc_address'], replyto=CFG['support_address']) email.send() log.info("email sent to %s" % to) # Now send a summary email to the admins expire = [x for x in alert.events if x['retire'] == 0 and x['days_to_expire']] data = { 'retire': retire, 'expire': [x for x in expire if int(x['days_to_expire']) < 1], 'expire_soon': [x for x in expire if int(x['days_to_expire']) >= 1], } tpl = Template('%s_escalation' % alert.search_name)
datas = ExcelReader(self.excel).data for d in datas: with self.subTest(data=d): self.sub_setUp() self.driver.find_element(*self.locator_kw).send_keys( d['search']) self.driver.find_element(*self.locator_su).click() time.sleep(2) links = self.driver.find_elements(*self.locator_result) for link in links: logger.info(link.text) self.sub_tearDown() if __name__ == '__main__': report = REPORT_PATH + '\\report.html' with open(report, 'wb') as f: runner = HTMLTestRunner(f, verbosity=2, title='从0搭建测试框架 灰蓝', description='修改html报告') runner.run(TestBaiDu('test_search')) e = Email(title='百度搜素测试报告', message='这是今天的测试报告,请查收!', receiver='*****@*****.**', server='...', sender='...', password='******', path=report) e.send()
condition_done = '[Timestamp] >= {0} AND [Source] = {1}'\ .format(get_sql_list(TIME['TODAY']), get_sql_list(PATH['SITE'])) entities_run = exist_db.get_to_runs(table_name=DB['LOG_TABLE_NAME'], condition=condition_done, source=PATH['SITE']) for entity in entities_run: entity_object = Haozu(entity) entity_object.run() with ODBC(config=keys.dbconfig_mkt) as entity_db: entity_db.upload(df=entity_object.df['df'], table_name=DB['DETAIL_TABLE'], new_id=PATH['SITE']) entity_db.upload(df=entity_object.df['info'], table_name=DB['INFO_TABLE'], new_id=PATH['SITE'], dedupe_col='Source_ID') entity_db.log(Entity=entity, Timestamp=TIME['TODAY'], Source=PATH['SITE'], start=1, end=len(entity_object.df['info'])) scrapyemail = Email() scrapyemail.send(subject='[Scrapy] ' + DB['DETAIL_TABLE'], content='Done', attachment=PATH['LOG_PATH']) scrapyemail.close() exit(0)
# Upload to database entity_db = ODBC(keys.dbconfig) entity_db.upload(df=tax_df, table_name=DB['TAX_INFO_TABLE']) entity_db.upload(df=tax_detail_df, table_name=DB['TAX_DETAIL_TABLE']) entity_db.log(start=TIME['PRE3MONTH'], end=TIME['TODAY'], Timestamp=TIME['TIMESTAMP'], Source=PATH['SITE'], Entity=row['Entity_Name']) entity_db.close() # Ensure failure of scraping process do not interrupt email and sp execution with ODBC(keys.dbconfig) as execute_db: # Update Irregular_Ind by executing stored procedure execute_db.call_sp(sp='CHN.Irregular_Tax_Refresh', table_name=DB['TAX_DETAIL_TABLE'], table_name2=DB['TAX_DETAIL_TABLE']) for index, row in access.iterrows(): # Get irregular record att = execute_db.call_sp(sp='CHN.Irregular_Tax_ETL', output=True, table_name=DB['TAX_DETAIL_TABLE'], entity_name=row['Entity_Name']) numeric_col = ['金额', '单价', '税率', '税额'] if att is not None: att[numeric_col] = att[numeric_col].apply(pd.to_numeric) invoice_send_email(entity=row['Entity_Name'], receiver=row['Email_List'], attachment=att) # Send email summary scrapyemail_summary = Email() scrapyemail_summary.send('[Scrapy]' + PATH['SITE'], 'Done', PATH['LOG_PATH'], receivers='[email protected];[email protected]') scrapyemail_summary.close() exit(0)
if feed_rss: fcntl.flock(feed_rss, fcntl.LOCK_UN) feed_rss.close() if alert.getSeverityTag() in ['CRITICAL', 'HIGH']: # Push through Pushbullet API_KEY = '' sys.path.insert(0, '/usr/lib/python2.6/site-packages') import pushybullet as pb api = pb.PushBullet(API_KEY) title = '%s: %s' % (alert.getSeverityTag(), alert.numEventsStr()) link = pb.LinkPush('https://druid', title) # Send it off! for c in api.contacts(): c.push(link) if CFG['email_admins']: if alert.search_name.startswith('Generic Alert'): subject = '[%s] %s' % (alert.getSeverityTag(), alert.numEventsStr()) else: subject = '[%s] %s' % (alert.getSeverityTag(), alert.search_name) email = Email('[DRUID] %s' % subject, tpl, CFG['admin_address']) email.send() log.info("email sent - %s" % subject)
'filespaces': cfiles, } log.debug("=== data ===") log.debug(pprint.pformat(data)) log.debug("=== template ===") log.debug(tpl.render(data)) log.debug("=== end ===") if CFG['email_users']: to = c if '@' not in c: to += '@you.com' email = Email('[TSM] Failed backups for contact %s' % c, tpl, [to], bcc=CFG['bcc_address'], replyto=CFG['support_address']) email.send() log.info("email sent to %s" % to) # Now send a summary email to the admins, but only for stuff older than ESCALATION_DATE data = { 'escalation_date': ESCALATION_DATE, 'nodes': filterByAge(nodes, ESCALATION_DATE), 'filespaces': filterByAge(filespaces, ESCALATION_DATE), } tpl = Template('%s_escalation' % alert.search_name)
for c, ev in events: if not c: # Ignore events we don't have a contact for continue log.debug("Contact: %s", c) data = { 'contact': c, 'data': [x for x in ev], } log.debug("=== data ===") log.debug(pprint.pformat(data)) log.debug("=== template ===") log.debug(tpl.render(data)) log.debug("=== end ===") if CFG['email_users']: to = c if '@' not in c: to += '@you.com' email = Email('[TSM] Unauthorized access for contact %s' % c, tpl, [to], bcc=CFG['bcc_address'], replyto=CFG['support_address']) email.send() log.info("email sent to %s" % to)
def partitionTables(master, parts, username, password, database, orientation, byPart, compression, rowGroupSize, emailAddress=""): loggerInfo = buildReportLogger("partitioning") reportName = loggerInfo[0] report = loggerInfo[1] startString = "Partitioning Tables into " + str(parts) + " Day Partitions in " + orientation + " Format" uniInfoLog(startString, report) if orientation.upper() == "PARQUET": # orientation = "PARQUET,ROWGROUPSIZE=1073741823,COMPRESSTYPE=snappy" orientation = "PARQUET,ROWGROUPSIZE=" + rowGroupSize + ",COMPRESSTYPE=" + compression hawqURI = queries.uri(master, port=5432, dbname=database, user=username, password=password) if byPart: loadList = sorted(glob.glob('./hawq-ddl/load-partbypart/*.sql')) else: loadList = sorted(glob.glob('./hawq-ddl/load-part/*.sql')) tableList = sorted(glob.glob('./hawq-ddl/hawq-part/*.sql')) with queries.Session(hawqURI) as session: for table in tableList: ddlFile = open(table, "r") tableName = (table.split("/")[3]).split(".")[0] createStatus = "Creating Table: " + tableName uniInfoLog(createStatus, report) tableDDL = ddlFile.read() tableDDL = tableDDL.replace("$PARTS", parts) tableDDL = tableDDL.replace("$ORIENTATION", orientation) result = session.query(tableDDL) createStatus = "Table Created: " + tableName uniInfoLog(createStatus, report) if emailAddress: Email.sendEmail(emailAddress, createStatus, createStatus) # Hard Coded for now because Schema is HardCoded as well startDate = 2450815 endDate = 2453005 totalDays = endDate - startDate for load in loadList: ddlFile = open(load, "r") loadDDL = ddlFile.read() if byPart: tableName = ((load.split("/")[3]).split(".")[0])[:-13] else: tableName = ((load.split("/")[3]).split(".")[0])[:-5] loadStatus = "Loading: " + tableName uniInfoLog(loadStatus, report) ddlFile = open(load, "r") loadDDL = ddlFile.read() if byPart: partCount = getPartitionCount(master, database, username, password, "inventory") partStart = startDate for partNum in range(2, partCount + 1): modDDL = loadDDL # with queries.Session(hawqURI) as session: partName = tableName + "_1_prt_" + str(partNum) # End of part is num days in the part added to the first day partEnd = partStart + (int(parts) - 1) modDDL = modDDL.replace("$PARTNAME", str(partName)) modDDL = modDDL.replace("$PARTVALUE1", str(partStart)) modDDL = modDDL.replace("$PARTVALUE2", str(partEnd)) with queries.Session(hawqURI) as session: result = session.query(modDDL) partStart = partEnd + 1 createStatus = "Table Partition Loaded: " + partName uniInfoLog(createStatus, report) createStatus = "Table Loaded: " + tableName uniInfoLog(createStatus, report) if emailAddress: Email.sendEmail(emailAddress, createStatus, createStatus) # alterniatve # SELECT partitionboundary, partitiontablename, partitionname, partitionlevel, partitionrank FROM pg_partitions WHERE tablename='catalog_returns'; else: with queries.Session(hawqURI) as session: result = session.query(loadDDL) createStatus = "Table Loaded: " + tableName uniInfoLog(createStatus, report) if emailAddress: Email.sendEmail(emailAddress, createStatus, createStatus) if (emailAddress): messageLines = [] with open(reportName, "r") as reportMsg: for line in reportMsg.readlines(): messageLines.append(line) message = " ".join(messageLines) Email.sendEmail(emailAddress, "Repartition Final Report: " + (reportName.split('/')[2])[:-4], message)
def digesting(): email = Email.get_instance() email.login() blogs.RisingStack.get() email.quit()