def insert_errors_db(config, uuid, errors): conn = connect_crdb(config) conn.set_session(autocommit=True) cur = conn.cursor() name_table = config['table_errors'][0] cur.execute("INSERT INTO {} VALUES ('{}', {});".format( name_table, str(uuid), errors))
def update(config, days): osp_version_dic = reduce(lambda r, d: r.update(d) or r, config['osp_version_dic'], {}) # noqa test_name_dic = reduce(lambda r, d: r.update(d) or r, config['test_name_dic'], {}) # noqa table_name = config['table_name'][0] data = [] target = [] conn = connect_crdb(config) cur = conn.cursor() cur.execute("select test, osp_version, avg_runtime, \ grade, timestamp, \ concurrency, times from {}".format(table_name)) rows = cur.fetchall() for row in rows: if date_valid(str(row[4]), days): if test_ignore_check(row[0], config): temp = [0, 1, 2, 3, 4] temp[1] = test_name_dic[str(row[0])] temp[0] = osp_version_dic[str(row[1])] temp[2] = row[2] temp[3] = row[5] temp[4] = row[6] data.append(temp) output_grade = row[3] target.append(int(output_grade)) conn.close() clf_list = config['classifier_lists'] for name_clf in clf_list: train_classifier(data, target, name_clf)
def get_uuids_list(config, days): conn = connect_crdb(config) cur = conn.cursor() table_name = config['table_name'][0] cur.execute("select uuid, timestamp from {}".format(table_name)) # noqa rows = cur.fetchall() uuid_list = set() for row in rows: uuid = row[0] if date_valid(row[1], days): if uuid not in uuid_list: uuid_list.add(uuid) return uuid_list
def insert_values_db(config, uuid, test, osp_name, avg_runtime, time_stamp, puddle, dlrn, concurrency, times, perc95_score): conn = connect_crdb(config) conn.set_session(autocommit=True) cur = conn.cursor() classify = False cur.execute("INSERT INTO {} (uuid, test, osp_version, avg_runtime, \ timestamp, rhos_puddle, dlrn_hash, classify, concurrency, \ times, percentile95) VALUES ('{}', '{}', '{}', {}, '{}', '{}',\ '{}', '{}', {}, {}, {})".format(config['table_name'][0], str(uuid), str(test), str(osp_name), float(avg_runtime), str(time_stamp), str(puddle), str(dlrn), bool(classify), int(concurrency), int(times), float(perc95_score)))
def insert_grades_db(config, uuid, test, osp_name, avg_runtime, grade, time_stamp, puddle, dlrn, concurrency, times, perc95_score): conn = connect_crdb(config) conn.set_session(autocommit=True) cur = conn.cursor() classify = True cur.execute("INSERT INTO {} VALUES ('{}', '{}', '{}', {}, '{}', '{}', \ '{}', '{}', {}, {}, {}, {});".format(config['table_name'][0], str(uuid), str(test), str(osp_name), float(avg_runtime), str(time_stamp), str(puddle), str(dlrn), bool(classify), int(grade), int(concurrency), int(times), float(perc95_score)))
def insert_timeseriessummaries_db(config, uuid): # WIP should pass the backend object here elastic = Backend("elk.browbeatproject.org", "9200") brun = browbeat_run(elastic, uuid, timeseries=True) graphite_details = brun.get_graphite_details() graphite_url = graphite_details[0] start = graphite_details[1] end = graphite_details[2] metric_base = str(graphite_details[3]) + "." base_url = "{}/render?target={}" time_url = "&format=json&from={}&until={}" base_url = base_url.format(graphite_url, metric_base) time_url = time_url.format(start, end) final_url = base_url + "{}" + time_url conn = connect_crdb(config) conn.set_session(autocommit=True) cur = conn.cursor() cpu_system = summarize_metric(final_url, metrics_list[0]) cpu_user = summarize_metric(final_url, metrics_list[1]) cpu_softirq = summarize_metric(final_url, metrics_list[2]) cpu_wait = summarize_metric(final_url, metrics_list[3]) mem_slabunrecl = summarize_metric(final_url, metrics_list[4]) mem_used = summarize_metric(final_url, metrics_list[5]) cur.execute("INSERT INTO {} VALUES ('{}', {}, {}, {}, {}, {}, {},\ {}, {}, {}, {}, {}, {});".format(config['table_timeseries'][0], str(uuid), float(cpu_system[0]), float(cpu_system[1]), float(cpu_user[0]), float(cpu_user[1]), float(cpu_softirq[0]), float(cpu_softirq[1]), float(cpu_wait[0]), float(cpu_wait[1]), float(mem_used[0]), float(mem_used[1]), float(mem_slabunrecl[0]), float(mem_slabunrecl[1])))
def test(config, days): osp_version_dic = reduce(lambda r, d: r.update(d) or r, config['osp_version_dic'], {}) # noqa test_name_dic = reduce(lambda r, d: r.update(d) or r, config['test_name_dic'], {}) # noqa table_name = config['table_name'][0] data = [] target = [] conn = connect_crdb(config) cur = conn.cursor() cur.execute("select test, osp_version, avg_runtime, \ grade, timestamp, \ concurrency, times from {}".format(table_name)) rows = cur.fetchall() shuffle(rows) for row in rows: if date_valid(str(row[4]), days): if test_ignore_check(row[0], config): temp = [0, 1, 2, 3, 4] temp[1] = test_name_dic[str(row[0])] temp[0] = osp_version_dic[str(row[1])] temp[2] = row[2] temp[3] = row[5] temp[4] = row[6] data.append(temp) output_grade = row[3] target.append(int(output_grade)) conn.close() size_data = len(data) data_train = np.array(data[:int(0.8 * size_data)]) data_test = np.array(data[int(0.8 * size_data):]) target_train = np.array(target[:int(0.8 * size_data)]) target_test = np.array(target[int(0.8 * size_data):]) conn.close() clf_list = config['classifier_lists'] for name_clf in clf_list: trained_clf = train_classifier(data_train, target_train, name_clf) print("The results for the '{}' classifier are as follows ".format( str(name_clf))) # noqa display_results(trained_clf, data_test, target_test)
def time_summary(config, days): uuids = get_uuids_list(config, days) padding = longest_scenario_test_name(config) conn = connect_crdb(config) cur = conn.cursor() table_name = config['table_name'][0] for uuid in uuids: cur.execute("select test, osp_version, avg_runtime , \ grade, timestamp from {} \ where uuid = '{}'".format(table_name, uuid)) rows = cur.fetchall() output_string = "" for row in rows: output_string += row[0].ljust(padding) + " " + \ "%06.2f" % float(row[2]) + " " + str(row[3]) + "\n" osp_version = row[1] print_uuid = "Browbeat UUID: " header \ = (print_uuid + uuid + " OSP_version: " + osp_version + "\n") header += ("".ljust(80, "-")) + "\n" output_string = header + output_string print(output_string)
def insert_logsummary_db(config, uuid): es = Elasticsearch([{'host': 'elk.browbeatproject.org', 'port': 9200}]) query_input = { "query": { "match": { 'browbeat_uuid': uuid } }, "aggs": { "max_time": { "max": { "field": "timestamp" } }, "min_time": { "min": { "field": "timestamp" }}}} res = es.search(index="browbeat-rally-*", body=query_input) start = int(res['aggregations']['min_time']['value']) end = int(res['aggregations']['max_time']['value']) cloud_name = res['hits']['hits'][0]['_source']['cloud_name'] num_errors = compute_hits(es, start, end, cloud_name, 'error') num_warn = compute_hits(es, start, end, cloud_name, 'warning') num_debug = compute_hits(es, start, end, cloud_name, 'debug') num_notice = compute_hits(es, start, end, cloud_name, 'notice') num_info = compute_hits(es, start, end, cloud_name, 'info') conn = connect_crdb(config) conn.set_session(autocommit=True) cur = conn.cursor() cur.execute("INSERT INTO {} VALUES ('{}', \ {}, {}, {}, {}, {});".format(config['table_logsummary'][0], str(uuid), int(num_errors), int(num_warn), int(num_debug), int(num_notice), int(num_info)))