def index(self): """Home page with the latest test results""" # Migration (harstorage v1.0) migration_handler = MongoDB(collection="migration") if hasattr(c, "message"): return render("/error.html") status = migration_handler.collection.find_one({"status": "ok"}) if status is None: redirect("/migration/status") # MongoDB handler mdb_handler = MongoDB() if hasattr(c, "message"): return render("/error.html") # Read aggregated data from database # Aggregation is based on unique labels, urls and latest timestamps latest_results = mdb_handler.collection.group( key=["label", "url"], condition=None, initial={"timestamp": "1970-01-01 01:00:00"}, reduce="\ function(doc, prev) { \ if (doc.timestamp > prev.timestamp) { \ prev.timestamp = doc.timestamp; \ } \ }") key = lambda timestamp: timestamp["timestamp"] latest_results = sorted(latest_results, key=key, reverse=True) # Numner of records c.rowcount = len(latest_results) # Populate data table with the latest test results c.metrics_table = [[], [], [], [], [], []] fields = [ "timestamp", "label", "url", "total_size", "requests", "full_load_time" ] for group in latest_results: condition = { "label": group["label"], "timestamp": group["timestamp"] } result = mdb_handler.collection.find_one(condition, fields=fields) c.metrics_table[0].append(result["timestamp"]) c.metrics_table[1].append(result["label"]) c.metrics_table[2].append(result["url"]) c.metrics_table[3].append(result["total_size"]) c.metrics_table[4].append(result["requests"]) c.metrics_table[5].append( round(result["full_load_time"] / 1000.0, 1)) return render("/home/core.html")
def index(self): """Home page with the latest test results""" # Migration (harstorage v1.0) migration_handler = MongoDB(collection="migration") if hasattr(c, "message"): return render("/error.html") status = migration_handler.collection.find_one({"status": "ok"}) if status is None: redirect("/migration/status") # MongoDB handler mdb_handler = MongoDB() if hasattr(c, "message"): return render("/error.html") # Read aggregated data from database # Aggregation is based on unique labels, urls and latest timestamps latest_results = mdb_handler.collection.group( key=["label", "url"], condition=None, initial={"timestamp": "1970-01-01 01:00:00"}, reduce="\ function(doc, prev) { \ if (doc.timestamp > prev.timestamp) { \ prev.timestamp = doc.timestamp; \ } \ }") key = lambda timestamp: timestamp["timestamp"] latest_results = sorted(latest_results, key=key, reverse=True) # Numner of records c.rowcount = len(latest_results) # Populate data table with the latest test results c.metrics_table = [[], [], [], [], [], []] fields = ["timestamp", "label", "url", "total_size", "requests", "full_load_time"] for group in latest_results: condition = { "label": group["label"], "timestamp": group["timestamp"] } result = mdb_handler.collection.find_one(condition, fields=fields) c.metrics_table[0].append(result["timestamp"]) c.metrics_table[1].append(result["label"]) c.metrics_table[2].append(result["url"]) c.metrics_table[3].append(result["total_size"]) c.metrics_table[4].append(result["requests"]) c.metrics_table[5].append(round(result["full_load_time"] / 1000.0, 1)) return render("/home/core.html")
def create(self): """Render form with list of labels and timestamps""" # MongoDB handler md_handler = MongoDB() if hasattr(c, "message"): return render("/error.html") # List of labels c.labels = list() for label in md_handler.collection.distinct("label"): c.labels.append(label) return render("/create/core.html")
def dashboard(self): """Page with test results""" enabled = html.literal(config["app_conf"]["dashboard_enabled"]) if enabled != 'true': c.message = str(); c.message += "Dashboard not enabled!" return render("/error.html") filename = os.path.join(config["app_conf"]["dashboard_config_dir"], config["app_conf"]["dashboard_config_filename"]) with open(filename) as json_file: configData = json.load(json_file) c.configData = html.literal(configData) return render("/dashboard/core.html")
def document(self): """Render the error document""" resp = request.environ.get("pylons.original_response") c.message = literal(resp.status) return render("/error.html")
def harviewer(self): """HAR Viewer iframe""" # HAR Viewer customization via cookie response.set_cookie("phaseInterval", "-1", max_age=365 * 24 * 3600) return render("/harviewer.html")
def harviewer(self): """HAR Viewer iframe""" # HAR Viewer customization via cookie response.set_cookie("phaseInterval", "-1", max_age=365*24*3600 ) return render("/harviewer.html")
def details(self): """Page with test results""" # Try to fetch data for selecetor box try: c.label = request.GET["url"] c.mode = "url" # Use label parameter instead of URL parameter except: c.label = request.GET["label"] c.mode = "label" # Generate context for selector self._set_options_in_selector(c.mode, c.label) # Define url for data aggregation if c.mode == "label": c.query = "/superposed/display?" + \ "step_1_label=" + c.label + \ "&step_1_start_ts=" + min(c.timestamp) + \ "&step_1_end_ts=" + max(c.timestamp) c.histo = "true" else: c.histo = "false" c.query = "None" return render("/details/core.html")
def dashboardLocation(self): """Page with test results""" filename = os.path.join(config["app_conf"]["dashboard_config_dir"], config["app_conf"]["dashboard_config_filename"]) with open(filename) as json_file: configData = json.load(json_file) c.configData = html.literal(configData) return render("/dashboard/location/core.html")
def migration(self): # MongoDB handler mdb_handler = MongoDB() if hasattr(c, "message"): return render("/error.html") for document in mdb_handler.collection.find(fields=["_id", "har"]): id = document["_id"] har = HAR(document["har"], True) har.analyze() domains_req_ratio = dict() domains_weight_ratio = dict() for key, value in har.domains.items(): domains_req_ratio[key] = value[0] domains_weight_ratio[key] = value[1] data = { "full_load_time": har.full_load_time, "onload_event": har.onload_event, "start_render_time": har.start_render_time, "time_to_first_byte": har.time_to_first_byte, "total_dns_time": har.total_dns_time, "total_transfer_time": har.total_transfer_time, "total_server_time": har.total_server_time, "avg_connecting_time": har.avg_connecting_time, "avg_blocking_time": har.avg_blocking_time, "total_size": har.total_size, "text_size": har.text_size, "media_size": har.media_size, "cache_size": har.cache_size, "requests": har.requests, "redirects": har.redirects, "bad_requests": har.bad_requests, "domains": len(har.domains), "weights_ratio": har.weight_ratio(), "requests_ratio": har.req_ratio(), "domains_ratio": har.domains } mdb_handler.collection.update({"_id": id}, {"$set": data}) migration_handler = MongoDB(collection="migration") migration_handler.collection.insert({"status": "ok"}) redirect("/")
def wrapper(*args): result, ext = function(*args) if result == True: try: if request.headers["automated"] == "true": return "Successful" except KeyError: redirect("/results/details?label=" + ext) else: try: if request.headers["automated"] == "true": return ext except KeyError: c.error = ext return render("/upload.html")
def wrapper(*args): result, ext = function(*args) if result: try: if request.headers["automated"] == "true": return "Successful" except KeyError: redirect("/results/details?label=" + ext) else: try: if request.headers["automated"] == "true": return ext except KeyError: c.error = ext return render("/upload.html")
def migration(self): # MongoDB handler mdb_handler = MongoDB() if hasattr(c, "message"): return render("/error.html") for document in mdb_handler.collection.find(fields=["_id", "har"]): id = document["_id"] har = HAR(document["har"], True) har.analyze() domains_req_ratio = dict() domains_weight_ratio = dict() for key, value in har.domains.items(): domains_req_ratio[key] = value[0] domains_weight_ratio[key] = value[1] data = {"full_load_time": har.full_load_time, "onload_event": har.onload_event, "start_render_time": har.start_render_time, "time_to_first_byte": har.time_to_first_byte, "total_dns_time": har.total_dns_time, "total_transfer_time": har.total_transfer_time, "total_server_time": har.total_server_time, "avg_connecting_time": har.avg_connecting_time, "avg_blocking_time": har.avg_blocking_time, "total_size": har.total_size, "text_size": har.text_size, "media_size": har.media_size, "cache_size": har.cache_size, "requests": har.requests, "redirects": har.redirects, "bad_requests": har.bad_requests, "domains": len(har.domains), "weights_ratio": har.weight_ratio(), "requests_ratio": har.req_ratio(), "domains_ratio": har.domains} mdb_handler.collection.update({"_id": id}, {"$set": data}) migration_handler = MongoDB(collection="migration") migration_handler.collection.insert({"status": "ok"}) redirect(h.url_for('/'))
def details(self): """Page with test results""" # Try to fetch data for selecetor box try: c.label = request.GET["url"] c.mode = "url" # Use label parameter instead of URL parameter except: c.label = request.GET["label"] c.mode = "label" # Try to fetch time filter try: timeFilter = request.GET["timeFilter"] except: timeFilter = "30" if timeFilter == "7": c.startTs = strftime("%Y-%m-%d %H:%M:%S", gmtime(time.time()-168*60*60)) elif timeFilter == "30": c.startTs = strftime("%Y-%m-%d %H:%M:%S", gmtime(time.time()-720*60*60)) elif timeFilter == "60": c.startTs = strftime("%Y-%m-%d %H:%M:%S", gmtime(time.time()-1440*60*60)) else: c.startTs = "1970-01-01 01:00:00" # Generate context for selector self._set_options_in_selector(c.mode, c.label, c.startTs) # Define url for data aggregation if c.mode == "label": c.query = "/superposed/display?" + \ "step_1_label=" + c.label + \ "&step_1_label_hidden=" + c.label + \ "&step_1_start_ts=" + c.startTs + \ "&step_1_end_ts=" + max(c.timestamp) c.histo = "true" else: c.histo = "false" c.query = "None" return render("/details/core.html")
def index(self): """Home page with the latest test results""" # Migration (harstorage v1.0) migration_handler = MongoDB(collection="migration") if hasattr(c, "message"): return render("/error.html") status = migration_handler.collection.find_one({"status": "ok"}) if status is None: redirect("/migration/status") # MongoDB handler mdb_handler = MongoDB() if hasattr(c, "message"): return render("/error.html") # Read aggregated data from database # Aggregation is based on unique labels, urls and latest timestamps ''' Replaced the original grouping with an aggregate function. This function actually returns all of the fields needed such that we also do not needed to make any subsequent requests back to MongoDB to retrieve details on the list ''' latest_results = mdb_handler.collection.aggregate([{ "$group": { "_id": { "label": "$label", "url": "$url" }, "timestamp": { "$last": "$timestamp" }, "total_size": { "$last": "$total_size" }, "requests": { "$last": "$requests" }, "full_load_time": { "$last": "$full_load_time" } } }, { "$sort": { "timestamp": -1 } }]) ''' Get the number of records Since we changed the initial request, we need to deal with the json array differently as well. ''' c.rowcount = len(latest_results["result"]) # Populate data table with the latest test results c.metrics_table = [[], [], [], [], [], []] ''' for group in latest_results["result"]: condition = {"label": group["_id"]["label"], "timestamp": group["timestamp"]} result = mdb_handler.collection.find_one(condition, fields=fields) c.metrics_table[0].append(result["timestamp"]) c.metrics_table[1].append(result["label"]) c.metrics_table[2].append(result["url"]) c.metrics_table[3].append(result["total_size"]) c.metrics_table[4].append(result["requests"]) c.metrics_table[5].append(round(result["full_load_time"] / 1000.0, 1)) ''' # loop through our results and return them for result in latest_results["result"]: c.metrics_table[0].append(result["timestamp"]) c.metrics_table[1].append(result["_id"]["label"]) c.metrics_table[2].append(result["_id"]["url"]) c.metrics_table[3].append(result["total_size"]) c.metrics_table[4].append(result["requests"]) c.metrics_table[5].append( round(result["full_load_time"] / 1000.0, 1)) return render("/home/core.html")
def display(self): """Render page with column chart and data table""" # MongoDB handler md_handler = MongoDB() if hasattr(c, "message"): return render("/error.html") # Checkbox options c.chart_type = request.GET.get("chart", None) c.table = request.GET.get("table", "false") init = request.GET.get("metric", "true") c.chart = "true" if c.chart_type else "false" # Metric option c.metric = request.GET.get("metric", "Average") # Number of records if c.chart == "true" and c.table == "true" and init != "true": c.rowcount = len(request.GET) / 3 - 1 else: c.rowcount = len(request.GET) / 3 # Data containers METRICS = ( "full_load_time", "requests", "total_size", "ps_scores", "onload_event", "start_render_time", "time_to_first_byte", "total_dns_time", "total_transfer_time", "total_server_time", "avg_connecting_time", "avg_blocking_time", "text_size", "media_size", "cache_size", "redirects", "bad_requests", "domains", "api_time") c.headers = [ "Label", "Full Load Time (ms)", "Total Requests", "Total Size (kB)", "Page Speed Score", "onLoad Event (ms)", "Start Render Time (ms)", "Time to First Byte (ms)", "Total DNS Time (ms)", "Total Transfer Time (ms)", "Total Server Time (ms)", "Avg. Connecting Time (ms)", "Avg. Blocking Time (ms)", "Text Size (kB)", "Media Size (kB)", "Cache Size (kB)", "Redirects", "Bad Rquests", "Domains", "API Time (ms)"] TITLES = [ "Full Load Time", "Total Requests", "Total Size", "Page Speed Score", "onLoad Event", "Start Render Time", "Time to First Byte", "Total DNS Time", "Total Transfer Time", "Total Server Time", "Avg. Connecting Time", "Avg. Blocking Time", "Text Size", "Media Size", "Cache Size", "Redirects", "Bad Rquests", "Domains", "API Time"] # Set of metrics to exclude (due to missing data) exclude = set() data = dict() for metric in METRICS: data[metric] = list() data["label"] = list() # Data table c.metrics_table = list() c.metrics_table.append(list()) # Test results from database for row in range(c.rowcount): # Parameters from GET request label = request.GET["step_" + str(row+1) + "_label"] start_ts = request.GET["step_" + str(row+1) + "_start_ts"] end_ts = request.GET["step_" + str(row+1) + "_end_ts"] # Label c.metrics_table[0].append(label) data["label"].append(row) data["label"][row] = label # Fetch test results condition = {"label": label, "timestamp": {"$gte": start_ts, "$lte": end_ts}} documents = md_handler.collection.find(condition, fields = METRICS) for metric in METRICS: data[metric].append(row) data[metric][row] = list() for document in documents: for metric in METRICS: if metric != "ps_scores": data[metric][row].append(document[metric]) else: data[metric][row].append(document[metric]["Total Score"]) # Aggregation c.points = str() for row in range(c.rowcount): c.points += data["label"][row] + "#" column = 1 agg_handler = Aggregator() for metric in METRICS: c.metrics_table.append(list()) c.points = c.points[:-1] + ";" for row in range(c.rowcount): if c.metric == "Average": value = agg_handler.average(data[metric][row]) elif c.metric == "Minimum": value = agg_handler.minimum(data[metric][row]) elif c.metric == "Maximum": value = agg_handler.maximum(data[metric][row]) elif c.metric == "90th Percentile": value = agg_handler.percentile(data[metric][row], 0.9) elif c.metric == "Median": value = agg_handler.percentile(data[metric][row], 0.5) if value == "n/a": exclude.add(metric) else: c.points += str(value) + "#" c.metrics_table[column].append(value) column += 1 # Update list of titles if "onload_event" in exclude: TITLES.pop(TITLES.index("onLoad Event")) if "start_render_time" in exclude: TITLES.pop(TITLES.index("Start Render Time")) header = str() for title in TITLES: header += title + "#" c.points = header[:-1] + ";" + c.points[:-1] return render("/display/core.html")
def status(self): return render("/migration.html")
def histogram(self): """Render chart with histograms""" # MongoDB handler md_handler = MongoDB() if hasattr(c, "message"): return render("/error.html") # Options c.label = request.GET["label"] c.metric = request.GET["metric"] # Metrics METRICS = [("full_load_time", "Full Load Time"), ("onload_event", "onLoad Event"), ("start_render_time", "Start Render Time"), ("time_to_first_byte", "Time to First Byte"), ("total_dns_time", "Total DNS Time"), ("total_transfer_time", "Total Transfer Time"), ("total_server_time", "Total Server Time"), ("avg_connecting_time", "Avg. Connecting Time"), ("avg_blocking_time", "Avg. Blocking Time")] time_metrics = ["full_load_time", "onload_event", "start_render_time", "time_to_first_byte"] c.metrics = list() # Read data from database condition = {"label": c.label} fields = (metric for metric, title in METRICS) documents = md_handler.collection.find(condition, fields=fields) full_data = list(document for document in documents) for metric, title in METRICS: try: data = (result[metric] for result in full_data) histogram = Histogram(data) if metric in time_metrics: ranges = histogram.ranges(True) else: ranges = histogram.ranges() frequencies = histogram.frequencies() if metric == c.metric: c.data = "" for occ_range in ranges: c.data += occ_range + "#" c.data = c.data[:-1] + ";" for frequency in frequencies: c.data += str(frequency) + "#" c.data = c.data[:-1] + ";" c.title = title c.metrics.append((metric, title)) except IndexError: pass except TypeError: pass except ValueError: pass if len(c.metrics): return render("/histogram/core.html") else: c.message = "Sorry! You haven't enough data." return render("/error.html")
def index(self): """Home page with the latest test results""" # Migration (harstorage v1.0) migration_handler = MongoDB(collection="migration") if hasattr(c, "message"): return render("/error.html") status = migration_handler.collection.find_one({"status": "ok"}) if status is None: redirect("/migration/status") # MongoDB handler mdb_handler = MongoDB() if hasattr(c, "message"): return render("/error.html") # Read aggregated data from database # Aggregation is based on unique labels, urls and latest timestamps ''' Replaced the original grouping with an aggregate function. This function actually returns all of the fields needed such that we also do not needed to make any subsequent requests back to MongoDB to retrieve details on the list ''' latest_results = mdb_handler.collection.aggregate( [{"$group":{"_id": {"label":"$label", "url":"$url"}, "timestamp":{"$last":"$timestamp"}, "total_size":{"$last":"$total_size"}, "requests":{"$last":"$requests"}, "full_load_time": {"$last":"$full_load_time"}}}, {"$sort":{"timestamp" : -1}}]) ''' Get the number of records Since we changed the initial request, we need to deal with the json array differently as well. ''' c.rowcount = len(latest_results["result"]) # Populate data table with the latest test results c.metrics_table = [[], [], [], [], [], []] ''' for group in latest_results["result"]: condition = {"label": group["_id"]["label"], "timestamp": group["timestamp"]} result = mdb_handler.collection.find_one(condition, fields=fields) c.metrics_table[0].append(result["timestamp"]) c.metrics_table[1].append(result["label"]) c.metrics_table[2].append(result["url"]) c.metrics_table[3].append(result["total_size"]) c.metrics_table[4].append(result["requests"]) c.metrics_table[5].append(round(result["full_load_time"] / 1000.0, 1)) ''' # loop through our results and return them for result in latest_results["result"]: c.metrics_table[0].append(result["timestamp"]) c.metrics_table[1].append(result["_id"]["label"]) c.metrics_table[2].append(result["_id"]["url"]) c.metrics_table[3].append(result["total_size"]) c.metrics_table[4].append(result["requests"]) c.metrics_table[5].append(round(result["full_load_time"] / 1000.0, 1)) return render("/home/core.html")
def display(self): """Render page with column chart and data table""" # MongoDB handler md_handler = MongoDB() if hasattr(c, "message"): return render("/error.html") # Checkbox options c.chart_type = request.GET.get("chart", None) c.table = request.GET.get("table", "false") init = request.GET.get("metric", "true") c.chart = "true" if c.chart_type else "false" # Aggregation option c.agg_type = request.GET.get("metric", "Average") # Number of records if c.chart == "true" and c.table == "true" and init != "true": c.rowcount = len(request.GET) / 3 - 1 else: c.rowcount = len(request.GET) / 3 # Data table c.headers = ["Label", "Full Load Time (ms)", "Total Requests", "Total Size (kB)", "Page Speed Score", "onLoad Event (ms)", "Start Render Time (ms)", "Time to First Byte (ms)", "Total DNS Time (ms)", "Total Transfer Time (ms)", "Total Server Time (ms)", "Avg. Connecting Time (ms)", "Avg. Blocking Time (ms)", "Text Size (kB)", "Media Size (kB)", "Cache Size (kB)", "Redirects", "Bad Rquests", "Domains"] c.metrics_table = list() c.metrics_table.append(list()) # Chart points c.points = str() # Aggregator aggregator = Aggregator() # Test results from database for row_index in range(c.rowcount): # Parameters from GET request label = request.GET["step_" + str(row_index + 1) + "_label"] start_ts = request.GET["step_" + str(row_index + 1) + "_start_ts"] end_ts = request.GET["step_" + str(row_index + 1) + "_end_ts"] # Add label c.metrics_table[0].append(label) c.points += label + "#" # Fetch test results condition = { "label": label, "timestamp": {"$gte": start_ts, "$lte": end_ts} } documents = md_handler.collection.find(condition, fields=aggregator.METRICS) # Add data row to aggregator aggregator.add_row(label, row_index, documents) # Aggregated data per column column = 1 for metric in aggregator.METRICS: c.metrics_table.append(list()) c.points = c.points[:-1] + ";" for row_index in range(c.rowcount): data_list = aggregator.data[metric][row_index] value = aggregator.get_aggregated_value(data_list, c.agg_type, metric) c.points += str(value) + "#" c.metrics_table[column].append(value) column += 1 # Names of series titles = str() for title in aggregator.TITLES: titles += title + "#" # Final chart points c.points = titles[:-1] + ";" + c.points[:-1] c.points = aggregator.exclude_missing(c.points) return render("/display/core.html")
def dailyReport(self): """Page with test results""" return render("/dailyReport.html")