コード例 #1
0
ファイル: serv.py プロジェクト: tomacorp/thermapythia
 def index(self):
   h = HTML()
   h.h1("Hello world")
   h.h2("And")
   h.h3("Bye")
   indexString= str(h)
   return indexString
コード例 #2
0
    def _create_page(self, link_prefix=""):

        h = HTML('html', '')
        h.h1("Cloud: %s" % self.cloud_name)
        head = h.head()
        head.link(rel="stylesheet",
                  type="text/css",
                  href="%sardana.css" % link_prefix)

        heading = h.h2()
        heading.a("Control Plane View",
                  href="%sControl_Planes.html" % link_prefix)
        heading.text(SP * 10, escape=False)

        heading.a("Region View", href="%sRegions.html" % link_prefix)
        heading.text(SP * 10, escape=False)

        heading.a("Service View", href="%sServices.html" % link_prefix)
        heading.text(SP * 10, escape=False)

        heading.a("Network View", href="%sNetworks.html" % link_prefix)
        heading.text(SP * 10, escape=False)

        heading.a("Server View", href="%sServer_View.html" % link_prefix)
        heading.text(SP * 10, escape=False)

        heading.a("Server Groups View",
                  href="%sServer_Groups.html" % link_prefix)
        heading.text(SP * 10, escape=False)

        h.br
        return h
コード例 #3
0
def write_html(classes, digest_filename):
    time = ctime().split()
    del time[
        -2]  # Don't really need the time, we want to highlight only the date.
    h = HTML()
    h.h1("Digest for " + " ".join(time))
    for cls in classes:
        h.h2(cls + ":")
        unordered_list = h.ul
        for item in classes[cls]:
            unordered_list.li(item)
    f = open(digest_filename, "w")
    f.write(
        '<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">'.
        encode('utf-8'))
    f.write(h.__unicode__().encode('utf-8'))
    f.close()
コード例 #4
0
def export(user1, user2, score, runtime):

    h = HTML()
    insertSources(h)

    h.text("<div class=\"container\">", escape=False)
    h.h1('Matched profile analytics', align='center')
    h.p('Common interests: ' + str(score))
    h.p('Runtime: ' + str(runtime))
    t = h.table(klass='table table-bordered table-hover table-striped')
    t.text('<thead>', escape=False)
    r = t.tr
    r.th('Name')
    r.th(user1.name)
    r.th(user2.name)

    t.text('</thead>', escape=False)

    common, unique1, unique2 = InterestUtil.compareInterests(
        user1.interests, user2.interests)

    for interest in common:
        r = t.tr(klass="success")
        r.td(interest.name)
        r.td(interest.value)
        r.td(interest.value)

    for interest in unique1:
        r = t.tr(klass="")
        r.td(interest.name)
        r.td(interest.value)
        r.td('-')

    for interest in unique2:
        r = t.tr(klass="")
        r.td(interest.name)
        r.td('-')
        r.td(interest.value)

    h.text("</div>", escape=False)

    file = open('output.html', 'w')
    file.write(str(h))
コード例 #5
0
ファイル: colour.py プロジェクト: tcsyn/check_sky
__author__ = 'chris'
from html import HTML
import subprocess
import xml.etree.ElementTree as ET

tree = ET.parse('test.xml')
root = tree.getroot()

H = HTML('html')

H.h1("Speed in milliseconds")

rrd_dump = subprocess.check_output(
    ["rrdtool", "dump", "test.rrd", "test.xml"])

for ds in root.findall('ds'):
    #	name = ds.find('name').text
    last = ds.find('last_ds').text

ms = int(last)


def colour(x):
    if x < 2000:
        return ('green')
    else:
        return ('red')


fontc = colour(ms)
コード例 #6
0
def construct_report():
    """
    construct an html report of all requests for the current day
    report includes number of request(all, successful, failed) as well as a table of failed requests
    """
    html = HTML()
    html.h1('Daily Report')
    report = {'status': 'OK', 'html': None}

    current_date = date.today()
    date_string = "\'{0}-{1}-{2}\'".format(current_date.year, current_date.month, current_date.day)

    try:
        con_mgr = Manager()
        db_config_stream = pkg_resources.resource_stream('PIR', 'etc/dbconfig.yaml')
        engine = con_mgr.get_connection(db_config_stream, security_level=ConnectionLevel.UPDATE, sql_echo=True)

        # # query db to find number of total transactions for current day
        num_transactions = engine.execute(
            "SELECT COUNT(*) FROM pir_transactions WHERE trans_time::date = {0}".format(date_string)).fetchone()

        # query db to get all failed requests for current day
        failed_requests = engine.execute(
            "SELECT * FROM pir_transactions WHERE trans_time::date = {0} AND result_code <> '200'".format(
                date_string)).fetchall()

        # query db to get all request that returned in over 5 seconds
        long_requests = engine.execute(
            "SELECT * FROM pir_transactions WHERE trans_time::date = {0} AND return_seconds > 5".format(
                date_string)).fetchall()

        pro_test_requests = engine.execute(
            "SELECT COUNT(*) FROM pir_transactions WHERE trans_time::date = {0} AND remote_ip IN "
            "('10.16.153.171', '10.16.153.105')".format(date_string)).fetchone()

        pro_acceptance_requests = engine.execute(
            "SELECT COUNT(*) FROM pir_transactions WHERE trans_time::date = {0} AND remote_ip IN "
            "('10.20.201.2', '10.20.201.32')".format(date_string)).fetchone()

        pro_production_requests = engine.execute(
            "SELECT COUNT(*) FROM pir_transactions WHERE trans_time::date = {0} AND remote_ip LIKE '10.65.1%'".format(
                date_string)).fetchone()

        pro_dev_requests = engine.execute(
            "SELECT COUNT(*) FROM pir_transactions WHERE trans_time::date = {0} AND remote_ip LIKE '172.16.132%'".format(
                date_string)).fetchone()

        # construct summary table
        html.h2('Summary')
        summary_table = html.table(cellpadding='10')
        num_transactions_row = summary_table.tr
        num_transactions_row.td("Number of Requests")
        num_transactions_row.td(str(num_transactions[0]))
        success_row = summary_table.tr
        success_row.td("Number of Successful Requests")
        success_row.td(str(num_transactions[0] - len(failed_requests)))
        failure_row = summary_table.tr
        failure_row.td("Number of Failed Requests")
        failure_row.td(str(len(failed_requests)))
        long_row = summary_table.tr
        long_row.td("Number of Requests Over 5 Seconds")
        long_row.td(str(len(long_requests)))

        # construct request origin table -- this is based off of given ip addresses in shared confluence page
        html.h2('Prometrix Request Origin')
        origin_table = html.table(cellpadding='10')
        pro_dev_row = origin_table.tr
        pro_dev_row.td("Dev")
        pro_dev_row.td(str(pro_dev_requests[0]))
        pro_test_row = origin_table.tr
        pro_test_row.td("Test")
        pro_test_row.td(str(pro_test_requests[0]))
        pro_acceptance_row = origin_table.tr
        pro_acceptance_row.td("Acceptance")
        pro_acceptance_row.td(str(pro_acceptance_requests[0]))
        pro_production_row = origin_table.tr
        pro_production_row.td("Production")
        pro_production_row.td(str(pro_production_requests[0]))

        if long_requests:
            html.h2("Requests Over 5 Seconds")

            long_table = html.table(border="1")

            header_row = long_table.tr
            header_row.th('Transaction ID')
            header_row.th('Geocode Status')
            header_row.th('Address')
            header_row.th('HTTP Result Code')
            header_row.th('HTTP Result Message')
            header_row.th('IP')
            header_row.th('Transaction Date')
            header_row.th('Seconds to Return')

            for trans in long_requests:
                address = trans['street'] + ', ' + trans['city'] + ', ' + trans['state'] + ' ' + trans['postal_code']

                row = long_table.tr
                row.td(trans['trans_id'])
                row.td(str(trans['geocode_status']))
                row.td(address)
                row.td(trans['result_code'])
                row.td(trans['result_message'])
                row.td(trans['remote_ip'])
                row.td(str(trans['trans_time']))
                row.td(str(trans['return_seconds']))

            if len(long_requests) > monitor_config['monitoring_configurations'][config_level]['long_threshold']:
                report['status'] = 'ERROR'
            else:
                report['status'] = "WARNING"

        # if we have failed requests, include them in report
        if failed_requests:
            html.h2('Failed Requests')

            failed_table = html.table(border="1")

            header_row = failed_table.tr
            header_row.th('Transaction ID')
            header_row.th('Geocode Status')
            header_row.th('Address')
            header_row.th('HTTP Result Code')
            header_row.th('HTTP Result Message')
            header_row.th('IP')
            header_row.th('Transaction Date')
            header_row.th('Seconds To Return')

            for trans in failed_requests:
                address_components = [trans['street'], trans['city'], trans['state'], trans['postal_code']]

                address = ', '.join(filter(None, address_components))

                row = failed_table.tr
                row.td(trans['trans_id'])
                row.td(str(trans['geocode_status']))
                row.td(address)
                row.td(trans['result_code'])
                row.td(trans['result_message'])
                row.td(trans['remote_ip'])
                row.td(str(trans['trans_time']))
                row.td(str(trans['return_seconds']))

            # if there are enough failed requests, indicate an error
            # if there is a failed request, indicate a warning
            if len(failed_requests) > monitor_config['monitoring_configurations'][config_level]['failed_threshold']:
                report['status'] = 'ERROR'

            if report['status'] != 'ERROR':
                report['status'] = 'WARNING'

    except ManagerConnectionException as e:
        logging.error("Could not connect to database server. Caching failed. {0}".format(e))
        html.p('COULD NOT CONNECT TO DATABASE')
        report['status'] = 'ERROR'
    except SQLAlchemyError as e:
        logging.error('Error: {0}'.format(e))
        logging.error('Unable to retrieve desired information from database')
        html.p('SQL ERROR: {0}'.format(e))
        report['status'] = 'ERROR'

    report['html'] = str(html)

    return report
コード例 #7
0
						tn_new[th_index].append(chain)
			line = input_file.readline()
		tp_samples.append(tp_new)
		fp_samples.append(fp_new)
		tn_samples.append(tn_new)
		fn_samples.append(fn_new)

	return (tp_samples, fp_samples, tn_samples, fn_samples)

th_list = read_th_nums(th_list)
feature_importances = read_feature_importance(num_of_iters, feature_importances)
feature_names = read_feature_names(feature_names)
tp_samples, fp_samples, tn_samples, fn_samples = read_samples(num_of_iters, tp_samples, fp_samples, tn_samples, fn_samples, th_list)
recal, precision, f1_score = read_stats(num_of_iters, recal, precision, f1_score)

h.h1('While training on the problems 0-200', color = ('rgb(205, 12, 24)'))
for i in range(0, num_of_iters, 2):
	h.h2('considering iteration number ' + str(i))
	h.h3("Feature importances are:")
	table_data = []
	table_line = []
	table_line.append("feature_name")
	table_line.append("importance")
	table_data.append(table_line)
	h.p("feature_name \t\t\t importance")
	for j in range(0, len(feature_names)):
		table_line = []
		table_line.append(feature_names[j])
		table_line.append(feature_importances[i][j])
		table_data.append(table_line)
		h.p(feature_names[j] + '\t\t\t ' + feature_importances[i][j])