def plot_volume_or_prods(start_response, feedtype, host, col): """handler.""" utcnow = datetime.datetime.utcnow() - datetime.timedelta(hours=36) since = utcnow.strftime("%Y-%m-%dT%H:%M:%SZ") req = requests.get(("http://rtstatstest/services/host/%s/hourly.json" "?feedtype=%s&since=%s") % (host, feedtype, since)) if req.status_code != 200: headers = [("Content-type", "text/plain")] start_response("200 OK", headers) return [b"API Service Failure..."] j = req.json() df = pd.DataFrame(j["data"], columns=j["columns"]) df["valid"] = pd.to_datetime(df["valid"]) df["path"] = df["origin"] + "_v_" + df["relay"] df["nbytes"] /= 1024.0 * 1024.0 * 1024.0 # convert to GiB _ = plt.figure(figsize=(11, 7)) ax = plt.axes([0.1, 0.1, 0.6, 0.8]) pdf = df[["valid", "path", col]].pivot("valid", "path", col) pdf = pdf.fillna(0) floor = np.zeros(len(pdf.index)) colors = plt.get_cmap("rainbow")(np.linspace(0, 1, len(pdf.columns))) for i, path in enumerate(pdf.columns): tokens = path.split("_v_") lbl = "%s\n-> %s" % (tokens[0], tokens[1]) if tokens[0] == tokens[1]: lbl = "%s [SRC]" % (tokens[0], ) ax.bar( pdf.index.values, pdf[path].values, width=1 / 24.0, bottom=floor, fc=colors[i], label=lbl, align="center", ) floor += pdf[path].values ax.legend(bbox_to_anchor=(1.01, 1), loc=2, borderaxespad=0.0, fontsize=12) ax.set_ylabel("GiB" if col == "nbytes" else "Number of Products") fancy_labels(ax) ax.set_title(("%s [%s]\n%s through %s UTC") % ( host, feedtype, df["valid"].min().strftime("%Y%m%d/%H%M"), df["valid"].max().strftime("%Y%m%d/%H%M"), )) ax.grid(True) headers = [("Content-type", "image/png")] start_response("200 OK", headers) bio = BytesIO() plt.savefig(bio) bio.seek(0) return [bio.read()]
def handle_volume_stats_plot(start_response, hostname, period): """handler.""" headers = [("Content-type", "image/png")] start_response("200 OK", headers) req = requests.get(("http://rtstatstest/services/host/%s/" "%s.json") % (hostname, period)) if req.status_code != 200: return [b"API Service Failure..."] j = req.json() df = pd.DataFrame(j["data"], columns=j["columns"]) df["nbytes"] /= 1024 * 1024 df["valid"] = pd.to_datetime(df["valid"]) _ = plt.figure(figsize=(11, 7)) ax = plt.axes([0.1, 0.1, 0.6, 0.8]) gdf = (df[["valid", "feedtype", "nbytes"]].groupby(["valid", "feedtype"]).sum()) gdf.reset_index(inplace=True) pdf = gdf.pivot("valid", "feedtype", "nbytes") pdf = pdf.fillna(0) floor = np.zeros(len(pdf.index)) colors = plt.get_cmap("rainbow")(np.linspace(0, 1, len(pdf.columns))) for i, feedtype in enumerate(pdf.columns): ec = colors[i] if period == "hourly" else "k" ax.bar( pdf.index.values, pdf[feedtype].values, width=1 / 24.0, bottom=floor, fc=colors[i], ec=ec, label=feedtype, align="center", ) floor += pdf[feedtype].values ax.legend(bbox_to_anchor=(1.01, 1), loc=2, borderaxespad=0.0, fontsize=12) ax.set_title(("%s\n%s to %s UTC") % ( hostname, df["valid"].min().strftime("%Y%m%d/%H%M"), df["valid"].max().strftime("%Y%m%d/%H%M"), )) ax.grid(True) fancy_labels(ax) ax.set_ylabel("Data Volume [MiB]") headers = [("Content-type", "image/png")] start_response("200 OK", headers) bio = BytesIO() plt.savefig(bio) bio.seek(0) return [bio.read()]
def plot_latency_histogram(start_response, feedtype, host): """handler.""" req = requests.get(("http://rtstatstest/services/host/%s/rtstats.json" "?feedtype=%s") % (host, feedtype)) if req.status_code != 200: headers = [("Content-type", "text/plain")] start_response("200 OK", headers) return [b"API Service Failure..."] j = req.json() df = pd.DataFrame(j["data"], columns=j["columns"]) df["entry_added"] = pd.to_datetime(df["entry_added"]) (_, ax) = plt.subplots(1, 1, figsize=(11, 7)) data = df["avg_latency"].values desc = df["avg_latency"].describe(percentiles=[0.75, 0.90, 0.95, 0.99]) for v, c in zip([75, 90, 95, 99], ["r", "b", "g", "k"]): value = desc["%s%%" % (v, )] ax.axvline(value, label="%s%% %.2fs" % (v, value), color=c, lw=2) ax.hist(data, 50, normed=False, weights=np.zeros_like(data) + 100.0 / data.size) ax.set_title(("%s [%s]\n%s to %s UTC") % ( host, feedtype, df["entry_added"].min().strftime("%Y%m%d/%H%M"), df["entry_added"].max().strftime("%Y%m%d/%H%M"), )) ax.grid(True) ax.legend(loc="best") ax.set_ylabel("Percent [%]") ax.set_xlabel("Latency [s]") headers = [("Content-type", "image/png")] start_response("200 OK", headers) bio = BytesIO() plt.savefig(bio) bio.seek(0) return [bio.read()]
def plot_latency(start_response, feedtype, host, logopt): """handler.""" req = requests.get( ("http://rtstatstest/services/host/%s/rtstats.json") % (host, )) if req.status_code != 200: headers = [("Content-type", "text/plain")] start_response("200 OK", headers) return [b"API Service Failure..."] j = req.json() df = pd.DataFrame(j["data"], columns=j["columns"]) df = df[df["feedtype"] == feedtype] df["entry_added"] = pd.to_datetime(df["entry_added"]) _ = plt.figure(figsize=(11, 7)) ax = plt.axes([0.1, 0.1, 0.6, 0.8]) for _, grp in df.groupby("feedtype_path_id"): row = grp.iloc[0] path = "%s\n-> %s" % (row["origin"], row["relay"]) ax.plot(grp["entry_added"], grp["avg_latency"], label=path) ax.legend(bbox_to_anchor=(1.01, 1), loc=2, borderaxespad=0.0, fontsize=12) ax.set_title(("%s [%s]\n%s to %s UTC") % ( host, feedtype, df["entry_added"].min().strftime("%Y%m%d/%H%M"), df["entry_added"].max().strftime("%Y%m%d/%H%M"), )) ax.grid(True) if logopt.upper() == "LOG": ax.set_yscale("log") fancy_labels(ax) ax.set_ylabel("Average Latency [s]") headers = [("Content-type", "image/png")] start_response("200 OK", headers) bio = BytesIO() plt.savefig(bio) bio.seek(0) return [bio.read()]
def plot_volume_long(start_response, feedtype, host, period, col="nbytes"): """handler.""" service = "hourly" barwidth = 1 / 24.0 if period == "-b%2086400": service = "daily" barwidth = 1.0 elif period == "-b%20604800": service = "weekly" barwidth = 7.0 sys.stderr.write(repr(period)) req = requests.get(("http://rtstatstest/services/host/%s/%s.json" "?feedtype=%s") % (host, service, feedtype)) if req.status_code != 200: headers = [("Content-type", "text/plain")] start_response("200 OK", headers) return [b"API Service Failure..."] j = req.json() df = pd.DataFrame(j["data"], columns=j["columns"]) df["valid"] = pd.to_datetime(df["valid"]) df["path"] = df["origin"] + "_v_" + df["relay"] df["nbytes"] /= 1024.0 * 1024.0 * 1024.0 # convert to GiB fig = plt.figure(figsize=(11, 7)) ax = plt.axes([0.1, 0.1, 0.6, 0.8]) pdf = df[["valid", "path", col]].pivot("valid", "path", col) pdf = pdf.fillna(0) floor = np.zeros(len(pdf.index)) colors = plt.get_cmap("rainbow")(np.linspace(0, 1, len(pdf.columns))) for i, path in enumerate(pdf.columns): tokens = path.split("_v_") lbl = "%s\n-> %s" % (tokens[0], tokens[1]) if tokens[0] == tokens[1]: lbl = "%s [SRC]" % (tokens[0], ) ax.bar( pdf.index.values, pdf[path].values, width=barwidth, bottom=floor, fc=colors[i], ec=colors[i], label=lbl, align="center", ) floor += pdf[path].values ax.legend(bbox_to_anchor=(1.01, 1), loc=2, borderaxespad=0.0, fontsize=12) ax.set_ylabel("GiB" if col == "nbytes" else "Number of Products") fancy_labels(ax) ax.set_title(("%s [%s]\n%s through %s UTC") % ( host, feedtype, df["valid"].min().strftime("%Y%m%d/%H%M"), df["valid"].max().strftime("%Y%m%d/%H%M"), )) ax.grid(True) fig.text(0.01, 0.01, "Backend JSON timing: %.2fs" % (j["query_time[secs]"], )) headers = [("Content-type", "image/png")] start_response("200 OK", headers) bio = BytesIO() plt.savefig(bio) bio.seek(0) return [bio.read()]