示例#1
0
def stream_group_subscribe(rconnect,
                           stream_type,
                           metric_names,
                           group_name,
                           instance_start=None,
                           instance_end=None,
                           instance_list=None):
    args = stream_args(request.args)

    if instance_list is not None:
        instances = instance_list
    else:
        instances = [str(x) for x in range(instance_start, instance_end)]

    stream_names = []
    for metric in metric_names:
        for inst in instances:
            this_stream_name = "%s:%s:%s:%s" % (group_name, inst, metric,
                                                stream_type)
            stream_names.append(this_stream_name)

    def event_stream():
        for data in redis_api.subscribe_streams(rconnect, stream_names,
                                                **args):
            min_end_time = get_min_end_time(data)
            values = front_end_key_api(data)
            ret = {}
            ret["values"] = values
            ret["min_end_time"] = min_end_time
            ret = "data: %s\n\n" % json.dumps(ret)
            yield ret

    # sometiems this won't work if the client disconnects from the stream un-gracefully
    # TODO: how to detect?
    return Response(event_stream(), mimetype="text/event-stream")
示例#2
0
def ps_series(connection, ID):
	config = connection[1]
	
	# Make a request for time range
	args = stream_args(request.args)
	start_t = args['start']    # Start time
	if start_t is None:
		start_t = datetime.now(timezone('UTC')) - timedelta(days=100)  # Start time
		start_t = calendar.timegm(start_t.timetuple()) *1e3 + start_t.microsecond/1e3 # convert to unix ms

	stop_t  = args['stop']     # Stop time

	# Catch for if no stop time exists
	if (stop_t == None): 
		now = datetime.now(timezone('UTC')) # Get the time now in UTC
		stop_t = calendar.timegm(now.timetuple()) *1e3 + now.microsecond/1e3 # convert to unix ms

        # remove the timing keys from the dict
        table_args = request.args.to_dict()
        table_args.pop('start', None)
        table_args.pop('stop', None)
        table_args.pop('n_data', None)
        table_args.pop('now', None)

	data = postgres_query([ID], start_t, stop_t, *connection, **table_args)

	# Format the data from database query
	data_list = []

	for row in data:
		value = None
		for i in range(len(config["value_names"])):
			accessor = "val%i" % i
 			if row[accessor] is not None:
				value = row[accessor]
				break
		else: # no good data here, ignore this time value
			continue
				
		# Throw out values > 1e30 which seem to be an error
		if value > 1e30:
			continue
	

		# Add the data to the list
		data_list.append( [row['sample_time'], value] )
	
	# Setup the return dictionary
	ret = {
		ID: data_list
	}

	return jsonify(values=ret)
示例#3
0
def stream_subscribe(rconnect, name):
    args = stream_args(request.args)

    def event_stream():
        for data in redis_api.subscribe_streams(rconnect, [name], **args):
            min_end_time = get_min_end_time(data)
            ret = {}
            ret["values"] = data
            ret["min_end_time"] = min_end_time
            ret = "data: %s\n\n" % json.dumps(ret)
            yield ret

    # sometiems this won't work if the client disconnects from the stream un-gracefully
    # TODO: how to detect?
    return Response(event_stream(), mimetype="text/event-stream")
示例#4
0
def stream_group_online(rconnect, stream_type, metric_names, group_name,
                        instances, args):
    args = stream_args(request.args)

    stream_names = []
    for metric in metric_names:
        for inst in instances:
            this_stream_name = "%s:%s:%s:%s" % (group_name, inst, metric,
                                                stream_type)
            stream_names.append(this_stream_name)

    data = redis_api.get_streams(rconnect, stream_names, **args)

    # get the least most updated stream for the front end
    min_end_time = get_min_end_time(data)

    values = front_end_key_api(data)

    return jsonify(values=values, min_end_time=min_end_time)
示例#5
0
def stream_group(connect,
                 stream_type,
                 metric_names,
                 group_name,
                 instance_start=None,
                 instance_end=None,
                 instance_list=None):
    args = stream_args(request.args)

    if instance_list is not None:
        instances = instance_list
    else:
        instances = [str(x) for x in range(instance_start, instance_end)]

    if stream_type == "archived":
        return stream_group_archived(connect, stream_type, metric_names,
                                     group_name, instances, args)
    else:
        return stream_group_online(connect, stream_type, metric_names,
                                   group_name, instances, args)
示例#6
0
def stream_group_archived(connection, stream_type, metric_names, group_name,
                          instances, args):
    connection, config = connection
    args = stream_args(request.args)

    # first check if the table exists
    cursor = connection.cursor(cursor_factory=RealDictCursor)

    existing_metrics = []
    for metric in metric_names:
        query = "SELECT POSTGRES_TABLE from RUNCON_PRD.MONITOR_MAP where CHANNEL_ID = {INSTANCE} AND GROUP_NAME = '{GROUP_NAME}' "\
                "AND METRIC = '{METRIC_NAME}'"
        query_builder = {
            "INSTANCE": instances[0],
            "METRIC_NAME": metric,
            "GROUP_NAME": group_name
        }
        q = query.format(**query_builder)
        try:
            cursor.execute(q)
        except:
            cursor.execute("ROLLBACK")
            connection.commit()
            raise
        table_data = cursor.fetchall()
        if len(table_data) > 0:
            existing_metrics.append(metric)
    metric_names = existing_metrics

    start = args["start"]
    stop = args["stop"]
    if start is None:
        start = datetime.now(timezone('UTC')) - timedelta(
            days=100)  # Start time
        start = calendar.timegm(start.timetuple(
        )) * 1e3 + start.microsecond / 1e3  # convert to unix ms
    if stop is None:
        stop = datetime.now(timezone('UTC'))
        stop = calendar.timegm(stop.timetuple(
        )) * 1e3 + stop.microsecond / 1e3  # convert to unix ms

    ret = {}
    for metric in metric_names:
        ret[metric] = {}
        for inst in instances:
            ret[metric][inst] = []

    # build the query
    query = ";".join([
        postgres_api.postgres_querymaker(instances,
                                         start,
                                         stop,
                                         config,
                                         name=group_name,
                                         metric=metric,
                                         avg="mean") for metric in metric_names
    ])
    if len(query) > 0:
        try:
            cursor.execute(query)
        except:
            cursor.execute("ROLLBACK")
            connection.commit()
            raise

        data = cursor.fetchall()
    else:
        data = []

    for line in data:
        ID = str(line["id"])
        val = line["val0"]
        time = line["sample_time"]
        ret[metric_names[0]][ID].append((time, val))

    #for line in data:
    #    ret[metric][instance].appen

    return jsonify(values=ret)
示例#7
0
def stream(rconnect, name):
    args = stream_args(request.args)
    data = redis_api.get_streams(rconnect, [name], **args)
    min_end_time = get_min_end_time(data)

    return jsonify(values=data, min_end_time=min_end_time)