def async_usage_data(device_id, unit, channel, start_seconds, end_seconds): """ Return data from start_seconds to end_seconds from influxdb. Used for asynchronous energy usage display of many points (up to millions). """ dbcon = InfluxDBClient(INFLUXDB_HOST, INFLUXDB_PORT, INFLUXDB_USER, INFLUXDB_PASSWORD, INFLUXDB_DATABASE) # Set the time frame to the past year if start/end not specified if start_seconds == '0' and end_seconds == '0': # Get how many points there are in the past year query_str = query_string(unit, device_id, channel=channel, value='COUNT') if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string(unit, device_id, channel=channel, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] end = datetime.datetime.utcnow() end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') # Set the time frame to the past start epoch to now elif start_seconds != '0' and end_seconds == '0': start = datetime.datetime.utcfromtimestamp(float(start_seconds)) start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.utcnow() end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string(unit, device_id, channel=channel, value='COUNT', start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string(unit, device_id, channel=channel, start_str=start_str, end_str=end_str, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] else: start = datetime.datetime.utcfromtimestamp(float(start_seconds)) start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.utcfromtimestamp(float(end_seconds)) end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string(unit, device_id, channel=channel, value='COUNT', start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string(unit, device_id, channel=channel, start_str=start_str, end_str=end_str, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] start = datetime.datetime.strptime( influx_time_str_to_milliseconds(first_point), '%Y-%m-%dT%H:%M:%S.%f') start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') logger.debug('Count = {}'.format(count_points)) logger.debug('Start = {}'.format(start)) logger.debug('End = {}'.format(end)) # How many seconds between the start and end period time_difference_seconds = (end - start).total_seconds() logger.debug('Difference seconds = {}'.format(time_difference_seconds)) # If there are more than 700 points in the time frame, we need to group # data points into 700 groups with points averaged in each group. if count_points > 700: # Average period between input reads seconds_per_point = time_difference_seconds / count_points logger.debug('Seconds per point = {}'.format(seconds_per_point)) # How many seconds to group data points in group_seconds = int(time_difference_seconds / 700) logger.debug('Group seconds = {}'.format(group_seconds)) try: query_str = query_string(unit, device_id, channel=channel, value='MEAN', start_str=start_str, end_str=end_str, group_sec=group_seconds) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw return jsonify(raw_data['series'][0]['values']) except Exception as e: logger.error("URL for 'async_data' raised and error: " "{err}".format(err=e)) return '', 204 else: try: query_str = query_string(unit, device_id, channel=channel, start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw return jsonify(raw_data['series'][0]['values']) except Exception as e: logger.error("URL for 'async_usage' raised and error: " "{err}".format(err=e)) return '', 204
def async_data(device_id, device_type, measurement_id, start_seconds, end_seconds): """ Return data from start_seconds to end_seconds from influxdb. Used for asynchronous graph display of many points (up to millions). """ if device_type == 'tag': notes_list = [] tag = NoteTags.query.filter(NoteTags.unique_id == device_id).first() start = datetime.datetime.utcfromtimestamp(float(start_seconds)) if end_seconds == '0': end = datetime.datetime.utcnow() else: end = datetime.datetime.utcfromtimestamp(float(end_seconds)) notes = Notes.query.filter( and_(Notes.date_time >= start, Notes.date_time <= end)).all() for each_note in notes: if tag.unique_id in each_note.tags.split(','): notes_list.append([ each_note.date_time.strftime( "%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note ]) if notes_list: return jsonify(notes_list) else: return '', 204 dbcon = InfluxDBClient(INFLUXDB_HOST, INFLUXDB_PORT, INFLUXDB_USER, INFLUXDB_PASSWORD, INFLUXDB_DATABASE) if device_type in ['input', 'math', 'output', 'pid']: measure = DeviceMeasurements.query.filter( DeviceMeasurements.unique_id == measurement_id).first() else: measure = None if not measure: return "Could not find measurement" if measure: conversion = Conversion.query.filter( Conversion.unique_id == measure.conversion_id).first() else: conversion = None channel, unit, measurement = return_measurement_info(measure, conversion) # Set the time frame to the past year if start/end not specified if start_seconds == '0' and end_seconds == '0': # Get how many points there are in the past year query_str = query_string(unit, device_id, measure=measurement, channel=channel, value='COUNT') if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string(unit, device_id, measure=measurement, channel=channel, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] end = datetime.datetime.utcnow() end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') # Set the time frame to the past start epoch to now elif start_seconds != '0' and end_seconds == '0': start = datetime.datetime.utcfromtimestamp(float(start_seconds)) start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.utcnow() end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string(unit, device_id, measure=measurement, channel=channel, value='COUNT', start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string(unit, device_id, measure=measurement, channel=channel, start_str=start_str, end_str=end_str, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] else: start = datetime.datetime.utcfromtimestamp(float(start_seconds)) start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.utcfromtimestamp(float(end_seconds)) end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string(unit, device_id, measure=measurement, channel=channel, value='COUNT', start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string(unit, device_id, measure=measurement, channel=channel, start_str=start_str, end_str=end_str, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] start = datetime.datetime.strptime( influx_time_str_to_milliseconds(first_point), '%Y-%m-%dT%H:%M:%S.%f') start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') logger.debug('Count = {}'.format(count_points)) logger.debug('Start = {}'.format(start)) logger.debug('End = {}'.format(end)) # How many seconds between the start and end period time_difference_seconds = (end - start).total_seconds() logger.debug('Difference seconds = {}'.format(time_difference_seconds)) # If there are more than 700 points in the time frame, we need to group # data points into 700 groups with points averaged in each group. if count_points > 700: # Average period between input reads seconds_per_point = time_difference_seconds / count_points logger.debug('Seconds per point = {}'.format(seconds_per_point)) # How many seconds to group data points in group_seconds = int(time_difference_seconds / 700) logger.debug('Group seconds = {}'.format(group_seconds)) try: query_str = query_string(unit, device_id, measure=measurement, channel=channel, value='MEAN', start_str=start_str, end_str=end_str, group_sec=group_seconds) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw return jsonify(raw_data['series'][0]['values']) except Exception as e: logger.error("URL for 'async_data' raised and error: " "{err}".format(err=e)) return '', 204 else: try: query_str = query_string(unit, device_id, measure=measurement, channel=channel, start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw return jsonify(raw_data['series'][0]['values']) except Exception as e: logger.error("URL for 'async_data' raised and error: " "{err}".format(err=e)) return '', 204