def streams(): kc = KronosClient(current_app.config['KRONOS_URL'], namespace=current_app.config['KRONOS_NAMESPACE']) kstreams = kc.get_streams(namespace=current_app.config['KRONOS_NAMESPACE']) kstreams = sorted(kstreams) return { 'streams': kstreams, }
def streams(): client = KronosClient(app.config['KRONOS_URL'], namespace=app.config['KRONOS_NAMESPACE']) kronos_streams = client.get_streams(namespace=app.config['KRONOS_NAMESPACE']) kronos_streams = sorted(kronos_streams) return { 'streams': kronos_streams, }
def main(args): client = KronosClient(args.kronos_url) headers = [ 'stream', 'total_events', 'events_per_day', 'events_per_sec', 'payload_total_bytes', 'payload_avg_bytes', 'payload_med_bytes', 'payload_95_bytes', 'payload_99_bytes', 'schema', ] if args.csv: csv_file = open(args.csv, 'w') writer = csv.DictWriter(csv_file, headers) writer.writeheader() else: print '-' * 79 for stream in client.get_streams(): total_events = 0 payloads = [] for event in client.get(stream, args.start, args.end): payloads.append(len(ujson.dumps(event))) total_events += 1 if total_events == 0: indent('%s has no events' % stream, 2) print '-' * 79 continue timeframe_sec = (args.end - args.start).total_seconds() schema = client.infer_schema(stream)['schema'] context = dict( zip(headers, [ stream, total_events, (float(total_events) / timeframe_sec) * 60 * 60 * 24, float(total_events) / timeframe_sec, np.sum(payloads), np.mean(payloads), np.median(payloads), np.percentile(payloads, 95), np.percentile(payloads, 99), schema, ])) if args.csv: writer.writerow(context) else: indent(output % context, 2) print '-' * 79
def main(args): client = KronosClient(args.kronos_url) headers = [ 'stream', 'total_events', 'events_per_day', 'events_per_sec', 'payload_total_bytes', 'payload_avg_bytes', 'payload_med_bytes', 'payload_95_bytes', 'payload_99_bytes', 'schema', ] if args.csv: csv_file = open(args.csv, 'w') writer = csv.DictWriter(csv_file, headers) writer.writeheader() else: print '-' * 79 for stream in client.get_streams(): total_events = 0 payloads = [] for event in client.get(stream, args.start, args.end): payloads.append(len(ujson.dumps(event))) total_events += 1 if total_events == 0: indent('%s has no events' % stream, 2) print '-' * 79 continue timeframe_sec = (args.end - args.start).total_seconds() schema = client.infer_schema(stream)['schema'] context = dict(zip(headers, [ stream, total_events, (float(total_events) / timeframe_sec) * 60 * 60 * 24, float(total_events) / timeframe_sec, np.sum(payloads), np.mean(payloads), np.median(payloads), np.percentile(payloads, 95), np.percentile(payloads, 99), schema, ])) if args.csv: writer.writerow(context) else: indent(output % context, 2) print '-' * 79
def main(args): client = KronosClient(args.kronos_url) if args.fetch_timeout: start = args.start end = args.end limit = None else: start = 0 end = datetime.utcnow() limit = 1000 for stream in client.get_streams(namespace=args.namespace): if not (args.read_latency or args.fetch_timeout): print stream elif check_stream(client, args.namespace, stream, start, end, limit, args.fetch_timeout, args.read_latency): print stream