def syncFortio(url, table, selector=None): dataurl = url + "/fortio/data/" data = requests.get(dataurl) fd, datafile = tempfile.mkstemp() out = os.fdopen(fd, "wt") cnt = 0 for fl in converDataToList(data.text): gd = fetch(dataurl + fl) st = gd['StartTime'] if selector is not None: if selector.startswith("^"): if not st.startswith(selector[1:]): continue elif selector not in gd["Labels"]: continue sd = datetime.strptime(st[:19], "%Y-%m-%dT%H:%M:%S") print("Fetching prometheus metrics for", sd, end=' ') if gd['errorPercent'] > 10: print("... Run resulted in", gd['errorPercent'], "% errors") continue # give 30s after start of test prom_start = calendar.timegm(sd.utctimetuple()) + 30 p = prom.Prom("http://prometheus.local", 120, start=prom_start) prom_metrics = p.fetch_cpu_and_mem() if len(prom_metrics) == 0: print("... Not found") continue else: print("") gd.update(prom_metrics) out.write(json.dumps(gd) + "\n") cnt += 1 out.close() print("Wrote {} records to {}".format(cnt, datafile)) p = subprocess.Popen("bq insert {table} {datafile}".format( table=table, datafile=datafile).split()) ret = p.wait() print(p.stdout) print(p.stderr) return ret
def sync_fortio(url, table, selector=None, promUrl="", csv=None, csv_output=""): listurl = url + "/fortio/data/" listdata = requests.Response() try: listdata = requests.get(listurl) except requests.exceptions.RequestException as e: # TODO handling connection refused issue after logging available print(e) sys.exit(1) fd, datafile = tempfile.mkstemp(suffix=".json") out = os.fdopen(fd, "wt") stats = [] cnt = 0 dataurl = url + "/data/" data = [] for fl in convert_data_to_list(listdata.text): gd = fetch(dataurl + fl) if gd is None: continue st = gd['StartTime'] if selector is not None: if selector.startswith("^"): if not st.startswith(selector[1:]): continue elif selector not in gd["Labels"]: continue if promUrl: sd = datetime.strptime(st[:19], "%Y-%m-%dT%H:%M:%S") print("Fetching prometheus metrics for", sd, gd["Labels"]) if gd['errorPercent'] > 10: print("... Run resulted in", gd['errorPercent'], "% errors") continue min_duration = METRICS_START_SKIP_DURATION + METRICS_END_SKIP_DURATION if min_duration > gd['ActualDuration']: print("... {} duration={}s is less than minimum {}s".format( gd["Labels"], gd['ActualDuration'], min_duration)) continue prom_start = calendar.timegm( sd.utctimetuple()) + METRICS_START_SKIP_DURATION duration = min(gd['ActualDuration'] - min_duration, METRICS_SUMMARY_DURATION) p = prom.Prom(promUrl, duration, start=prom_start) prom_metrics = p.fetch_cpu_and_mem() if not prom_metrics: print("... Not found") continue else: print("") gd.update(prom_metrics) data.append(gd) out.write(json.dumps(gd) + "\n") stats.append(gd) cnt += 1 out.close() print("Wrote {} json records to {}".format(cnt, datafile)) if csv is not None: write_csv(csv, data, csv_output) if table: return write_table(table, datafile) return 0
def sync_fortio(url, table, selector=None, promUrl="", csv=None, csv_output="", namespace=NAMESPACE): get_fortioclient_pod_cmd = "kubectl -n {namespace} get pods | grep fortioclient".format( namespace=namespace) fortioclient_pod_name = getoutput(get_fortioclient_pod_cmd).split(" ")[0] temp_dir_path = tempfile.gettempdir() + "/fortio_json_data" get_fortio_json_cmd = "kubectl cp -c shell {namespace}/{fortioclient}:/var/lib/fortio {tempdir}"\ .format(namespace=namespace, fortioclient=fortioclient_pod_name, tempdir=temp_dir_path) run_command(get_fortio_json_cmd) fd, datafile = tempfile.mkstemp(suffix=".json") out = os.fdopen(fd, "wt") stats = [] cnt = 0 data = [] for filename in os.listdir(temp_dir_path): print(filename) with open(os.path.join(temp_dir_path, filename), 'r') as f: try: data_dict = json.load(f, strict=False) one_char = f.read(1) if not one_char: print("json file is not empty") except json.JSONDecodeError as e: print(f.read()) while True: line = f.readline() print(line) if "" == line: print("file finished!") break print(e) gd = convert_data(data_dict) if gd is None: continue st = gd['StartTime'] if selector is not None: if selector.startswith("^"): if not st.startswith(selector[1:]): continue elif selector not in gd["Labels"]: continue if promUrl: sd = datetime.strptime(st[:19], "%Y-%m-%dT%H:%M:%S") print("Fetching prometheus metrics for", sd, gd["Labels"]) if gd['errorPercent'] > 10: print("... Run resulted in", gd['errorPercent'], "% errors") continue min_duration = METRICS_START_SKIP_DURATION + METRICS_END_SKIP_DURATION if min_duration > gd['ActualDuration']: print( "... {} duration={}s is less than minimum {}s".format( gd["Labels"], gd['ActualDuration'], min_duration)) continue prom_start = calendar.timegm( sd.utctimetuple()) + METRICS_START_SKIP_DURATION duration = min(gd['ActualDuration'] - min_duration, METRICS_SUMMARY_DURATION) p = prom.Prom(promUrl, duration, start=prom_start) prom_metrics = p.fetch_istio_proxy_cpu_and_mem() if not prom_metrics: print("... Not found") continue else: print("") gd.update(prom_metrics) data.append(gd) out.write(json.dumps(gd) + "\n") stats.append(gd) cnt += 1 out.close() print("Wrote {} json records to {}".format(cnt, datafile)) if csv is not None: write_csv(csv, data, csv_output) if table: return write_table(table, datafile) return 0
def syncFortio(url, table, selector=None, promUrl="prometheus.local", csv=None): listurl = url + "/fortio/data/" listdata = requests.get(listurl) fd, datafile = tempfile.mkstemp() out = os.fdopen(fd, "wt") stats = [] cnt = 0 dataurl = url + "/data/" data = [] for fl in converDataToList(listdata.text): gd = fetch(dataurl + fl) if gd is None: continue st = gd['StartTime'] if selector is not None: if selector.startswith("^"): if not st.startswith(selector[1:]): continue elif selector not in gd["Labels"]: continue sd = datetime.strptime(st[:19], "%Y-%m-%dT%H:%M:%S") print("Fetching prometheus metrics for", sd, gd["Labels"]) if gd['errorPercent'] > 10: print("... Run resulted in", gd['errorPercent'], "% errors") continue if METRICS_START_SKIP_DURATION > gd['ActualDuration']: print("... {} duration={}s is less than minimum {}s".format( gd["Labels"], gd['ActualDuration'], METRICS_START_SKIP_DURATION)) continue prom_start = calendar.timegm( sd.utctimetuple()) + METRICS_START_SKIP_DURATION duration = min(gd['ActualDuration'] - METRICS_START_SKIP_DURATION, METRICS_SUMMARY_DURATION) p = prom.Prom(promUrl, duration, start=prom_start) prom_metrics = p.fetch_cpu_and_mem() if len(prom_metrics) == 0: print("... Not found") continue else: print("") gd.update(prom_metrics) data.append(gd) out.write(json.dumps(gd) + "\n") stats.append(gd) cnt += 1 out.close() print("Wrote {} records to {}".format(cnt, datafile)) if csv is not None: write_csv(csv, data) if table: return write_table(table, datafile) return 0