def index(self): h = HTML() h.h1("Hello world") h.h2("And") h.h3("Bye") indexString= str(h) return indexString
def gen_html_report(crash_dict, apk_file, simple_log): h = HTML() h.h2('Monkey Test Report') h.li(datetime.now().strftime('%Y-%m-%d %H:%M:%S')) h.li(apk_file) h.br t = h.table(border='1') r = t.tr r.th('Crash Long Msg') r.th('Crash Times') for k, v in crash_dict.iteritems(): r = t.tr r.td(k) r.td(str(v)) h.h6('Logs') h.pre() for l in simple_log: h.code(str(l)) h.pre() with open('monkey_test_report.html', 'w') as r: r.write(str(h))
def _create_page(self, link_prefix=""): h = HTML('html', '') h.h1("Cloud: %s" % self.cloud_name) head = h.head() head.link(rel="stylesheet", type="text/css", href="%sardana.css" % link_prefix) heading = h.h2() heading.a("Control Plane View", href="%sControl_Planes.html" % link_prefix) heading.text(SP * 10, escape=False) heading.a("Region View", href="%sRegions.html" % link_prefix) heading.text(SP * 10, escape=False) heading.a("Service View", href="%sServices.html" % link_prefix) heading.text(SP * 10, escape=False) heading.a("Network View", href="%sNetworks.html" % link_prefix) heading.text(SP * 10, escape=False) heading.a("Server View", href="%sServer_View.html" % link_prefix) heading.text(SP * 10, escape=False) heading.a("Server Groups View", href="%sServer_Groups.html" % link_prefix) heading.text(SP * 10, escape=False) h.br return h
def write_html(classes, digest_filename): time = ctime().split() del time[ -2] # Don't really need the time, we want to highlight only the date. h = HTML() h.h1("Digest for " + " ".join(time)) for cls in classes: h.h2(cls + ":") unordered_list = h.ul for item in classes[cls]: unordered_list.li(item) f = open(digest_filename, "w") f.write( '<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">'. encode('utf-8')) f.write(h.__unicode__().encode('utf-8')) f.close()
def gen_html_report(apk_file, result_good, result_bad): h = HTML() h.h2('Security Test Report') h.li(datetime.now().strftime('%Y-%m-%d %H:%M:%S')) h.li(apk_file) h.br t = h.table(border='1') r = t.tr r.th('Probability to Good') r.th('Probability to Malware') r = t.tr r.td(result_good) r.td(result_bad) with open('security_test_report.html', 'w') as r: r.write(str(h))
def construct_report(): """ construct an html report of all requests for the current day report includes number of request(all, successful, failed) as well as a table of failed requests """ html = HTML() html.h1('Daily Report') report = {'status': 'OK', 'html': None} current_date = date.today() date_string = "\'{0}-{1}-{2}\'".format(current_date.year, current_date.month, current_date.day) try: con_mgr = Manager() db_config_stream = pkg_resources.resource_stream('PIR', 'etc/dbconfig.yaml') engine = con_mgr.get_connection(db_config_stream, security_level=ConnectionLevel.UPDATE, sql_echo=True) # # query db to find number of total transactions for current day num_transactions = engine.execute( "SELECT COUNT(*) FROM pir_transactions WHERE trans_time::date = {0}".format(date_string)).fetchone() # query db to get all failed requests for current day failed_requests = engine.execute( "SELECT * FROM pir_transactions WHERE trans_time::date = {0} AND result_code <> '200'".format( date_string)).fetchall() # query db to get all request that returned in over 5 seconds long_requests = engine.execute( "SELECT * FROM pir_transactions WHERE trans_time::date = {0} AND return_seconds > 5".format( date_string)).fetchall() pro_test_requests = engine.execute( "SELECT COUNT(*) FROM pir_transactions WHERE trans_time::date = {0} AND remote_ip IN " "('10.16.153.171', '10.16.153.105')".format(date_string)).fetchone() pro_acceptance_requests = engine.execute( "SELECT COUNT(*) FROM pir_transactions WHERE trans_time::date = {0} AND remote_ip IN " "('10.20.201.2', '10.20.201.32')".format(date_string)).fetchone() pro_production_requests = engine.execute( "SELECT COUNT(*) FROM pir_transactions WHERE trans_time::date = {0} AND remote_ip LIKE '10.65.1%'".format( date_string)).fetchone() pro_dev_requests = engine.execute( "SELECT COUNT(*) FROM pir_transactions WHERE trans_time::date = {0} AND remote_ip LIKE '172.16.132%'".format( date_string)).fetchone() # construct summary table html.h2('Summary') summary_table = html.table(cellpadding='10') num_transactions_row = summary_table.tr num_transactions_row.td("Number of Requests") num_transactions_row.td(str(num_transactions[0])) success_row = summary_table.tr success_row.td("Number of Successful Requests") success_row.td(str(num_transactions[0] - len(failed_requests))) failure_row = summary_table.tr failure_row.td("Number of Failed Requests") failure_row.td(str(len(failed_requests))) long_row = summary_table.tr long_row.td("Number of Requests Over 5 Seconds") long_row.td(str(len(long_requests))) # construct request origin table -- this is based off of given ip addresses in shared confluence page html.h2('Prometrix Request Origin') origin_table = html.table(cellpadding='10') pro_dev_row = origin_table.tr pro_dev_row.td("Dev") pro_dev_row.td(str(pro_dev_requests[0])) pro_test_row = origin_table.tr pro_test_row.td("Test") pro_test_row.td(str(pro_test_requests[0])) pro_acceptance_row = origin_table.tr pro_acceptance_row.td("Acceptance") pro_acceptance_row.td(str(pro_acceptance_requests[0])) pro_production_row = origin_table.tr pro_production_row.td("Production") pro_production_row.td(str(pro_production_requests[0])) if long_requests: html.h2("Requests Over 5 Seconds") long_table = html.table(border="1") header_row = long_table.tr header_row.th('Transaction ID') header_row.th('Geocode Status') header_row.th('Address') header_row.th('HTTP Result Code') header_row.th('HTTP Result Message') header_row.th('IP') header_row.th('Transaction Date') header_row.th('Seconds to Return') for trans in long_requests: address = trans['street'] + ', ' + trans['city'] + ', ' + trans['state'] + ' ' + trans['postal_code'] row = long_table.tr row.td(trans['trans_id']) row.td(str(trans['geocode_status'])) row.td(address) row.td(trans['result_code']) row.td(trans['result_message']) row.td(trans['remote_ip']) row.td(str(trans['trans_time'])) row.td(str(trans['return_seconds'])) if len(long_requests) > monitor_config['monitoring_configurations'][config_level]['long_threshold']: report['status'] = 'ERROR' else: report['status'] = "WARNING" # if we have failed requests, include them in report if failed_requests: html.h2('Failed Requests') failed_table = html.table(border="1") header_row = failed_table.tr header_row.th('Transaction ID') header_row.th('Geocode Status') header_row.th('Address') header_row.th('HTTP Result Code') header_row.th('HTTP Result Message') header_row.th('IP') header_row.th('Transaction Date') header_row.th('Seconds To Return') for trans in failed_requests: address_components = [trans['street'], trans['city'], trans['state'], trans['postal_code']] address = ', '.join(filter(None, address_components)) row = failed_table.tr row.td(trans['trans_id']) row.td(str(trans['geocode_status'])) row.td(address) row.td(trans['result_code']) row.td(trans['result_message']) row.td(trans['remote_ip']) row.td(str(trans['trans_time'])) row.td(str(trans['return_seconds'])) # if there are enough failed requests, indicate an error # if there is a failed request, indicate a warning if len(failed_requests) > monitor_config['monitoring_configurations'][config_level]['failed_threshold']: report['status'] = 'ERROR' if report['status'] != 'ERROR': report['status'] = 'WARNING' except ManagerConnectionException as e: logging.error("Could not connect to database server. Caching failed. {0}".format(e)) html.p('COULD NOT CONNECT TO DATABASE') report['status'] = 'ERROR' except SQLAlchemyError as e: logging.error('Error: {0}'.format(e)) logging.error('Unable to retrieve desired information from database') html.p('SQL ERROR: {0}'.format(e)) report['status'] = 'ERROR' report['html'] = str(html) return report
tp_samples.append(tp_new) fp_samples.append(fp_new) tn_samples.append(tn_new) fn_samples.append(fn_new) return (tp_samples, fp_samples, tn_samples, fn_samples) th_list = read_th_nums(th_list) feature_importances = read_feature_importance(num_of_iters, feature_importances) feature_names = read_feature_names(feature_names) tp_samples, fp_samples, tn_samples, fn_samples = read_samples(num_of_iters, tp_samples, fp_samples, tn_samples, fn_samples, th_list) recal, precision, f1_score = read_stats(num_of_iters, recal, precision, f1_score) h.h1('While training on the problems 0-200', color = ('rgb(205, 12, 24)')) for i in range(0, num_of_iters, 2): h.h2('considering iteration number ' + str(i)) h.h3("Feature importances are:") table_data = [] table_line = [] table_line.append("feature_name") table_line.append("importance") table_data.append(table_line) h.p("feature_name \t\t\t importance") for j in range(0, len(feature_names)): table_line = [] table_line.append(feature_names[j]) table_line.append(feature_importances[i][j]) table_data.append(table_line) h.p(feature_names[j] + '\t\t\t ' + feature_importances[i][j]) h.h3("TP samples are")