def return_point_timestamp(dev_id, unit, period, measurement=None, channel=None): dbcon = InfluxDBClient(INFLUXDB_HOST, INFLUXDB_PORT, INFLUXDB_USER, INFLUXDB_PASSWORD, INFLUXDB_DATABASE) query_str = query_string(unit, dev_id, measure=measurement, channel=channel, value='LAST', past_sec=period) if query_str == 1: return [None, None] try: raw_data = dbcon.query(query_str).raw number = len(raw_data['series'][0]['values']) time_raw = raw_data['series'][0]['values'][number - 1][0] value = raw_data['series'][0]['values'][number - 1][1] value = '{:.3f}'.format(float(value)) # Convert date-time to epoch (potential bottleneck for data) dt = date_parse(time_raw) timestamp = calendar.timegm(dt.timetuple()) * 1000 return [timestamp, value] except KeyError: return [None, None] except Exception: return [None, None]
def return_point_timestamp(dev_id, unit, period, measurement=None, channel=None): current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE current_app.config['INFLUXDB_TIMEOUT'] = 5 dbcon = influx_db.connection query_str = query_string( unit, dev_id, measure=measurement, channel=channel, value='LAST', past_sec=period) if query_str == 1: return [None, None] try: raw_data = dbcon.query(query_str).raw number = len(raw_data['series'][0]['values']) time_raw = raw_data['series'][0]['values'][number - 1][0] value = raw_data['series'][0]['values'][number - 1][1] value = '{:.3f}'.format(float(value)) # Convert date-time to epoch (potential bottleneck for data) dt = date_parse(time_raw) timestamp = calendar.timegm(dt.timetuple()) * 1000 return [timestamp, value] except KeyError: return [None, None] except Exception: return [None, None]
def return_point_timestamp(measure, dev_id, period): current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE dbcon = influx_db.connection query_str = query_string( measure, dev_id, value='LAST', past_sec=period) if query_str == 1: return [None, None] try: raw_data = dbcon.query(query_str).raw number = len(raw_data['series'][0]['values']) time_raw = raw_data['series'][0]['values'][number - 1][0] value = raw_data['series'][0]['values'][number - 1][1] value = '{:.3f}'.format(float(value)) # Convert date-time to epoch (potential bottleneck for data) dt = date_parse(time_raw) timestamp = calendar.timegm(dt.timetuple()) * 1000 return [timestamp, value] except KeyError: return [None, None] except Exception as e: return [None, None]
def return_point_timestamp(dev_id, unit, period, measurement=None, channel=None): data = query_string(unit, dev_id, measure=measurement, channel=channel, value='LAST', past_sec=period) if not data: return [None, None] try: number = len(data) time_raw = data[number - 1][0] value = data[number - 1][1] value = f'{float(value):.3f}' # Convert date-time to epoch (potential bottleneck for data) dt = date_parse(time_raw) timestamp = calendar.timegm(dt.timetuple()) * 1000 return [timestamp, value] except KeyError: return [None, None] except Exception: return [None, None]
def last_data(input_measure, input_id, input_period): """Return the most recent time and value from influxdb""" if not str_is_float(input_period): return '', 204 current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE dbcon = influx_db.connection try: query_str = query_string(input_measure, input_id, value='LAST', past_sec=input_period) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw number = len(raw_data['series'][0]['values']) time_raw = raw_data['series'][0]['values'][number - 1][0] value = raw_data['series'][0]['values'][number - 1][1] value = '{:.3f}'.format(float(value)) # Convert date-time to epoch (potential bottleneck for data) dt = date_parse(time_raw) timestamp = calendar.timegm(dt.timetuple()) * 1000 live_data = '[{},{}]'.format(timestamp, value) return Response(live_data, mimetype='text/json') except KeyError: logger.debug("No Data returned form influxdb") return '', 204 except Exception as e: logger.exception("URL for 'last_data' raised and error: " "{err}".format(err=e)) return '', 204
def export_data(measurement, unique_id, start_seconds, end_seconds): """ Return data from start_seconds to end_seconds from influxdb. Used for exporting data. """ current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE dbcon = influx_db.connection output = Output.query.filter(Output.unique_id == unique_id).first() input = Input.query.filter(Input.unique_id == unique_id).first() math = Math.query.filter(Math.unique_id == unique_id).first() if output: name = output.name elif input: name = input.name elif math: name = math.name else: name = None utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now() start = datetime.datetime.fromtimestamp(float(start_seconds)) start += utc_offset_timedelta start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.fromtimestamp(float(end_seconds)) end += utc_offset_timedelta end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string( measurement, unique_id, start_str=start_str, end_str=end_str) if query_str == 1: flash('Invalid query string', 'error') return redirect(url_for('routes_page.page_export')) raw_data = dbcon.query(query_str).raw if not raw_data or 'series' not in raw_data: flash('No measurements to export in this time period', 'error') return redirect(url_for('routes_page.page_export')) # Generate column names col_1 = 'timestamp (UTC)' col_2 = '{name} {meas} ({id})'.format( name=name, meas=measurement, id=unique_id) csv_filename = '{id}_{meas}.csv'.format(id=unique_id, meas=measurement) # Populate list of dictionary entries for each column to convert to CSV # and send to the user to download csv_data = [] for each_data in raw_data['series'][0]['values']: csv_data.append({col_1: str(each_data[0][:-4]).replace('T', ' '), col_2: each_data[1]}) return send_csv(csv_data, csv_filename, [col_1, col_2])
def export_data(measurement, unique_id, start_seconds, end_seconds): """ Return data from start_seconds to end_seconds from influxdb. Used for exporting data. """ current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE dbcon = influx_db.connection if measurement == 'duration_sec': name = Output.query.filter(Output.unique_id == unique_id).first().name else: name = Input.query.filter(Input.unique_id == unique_id).first().name utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now() start = datetime.datetime.fromtimestamp(float(start_seconds)) start += utc_offset_timedelta start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.fromtimestamp(float(end_seconds)) end += utc_offset_timedelta end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string(measurement, unique_id, start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw if not raw_data: return '', 204 def iter_csv(data_in): line = StringIO.StringIO() writer = csv.writer(line) write_header = ('timestamp (UTC)', '{name} {meas} ({id})'.format(name=name.encode('utf8'), meas=measurement, id=unique_id)) writer.writerow(write_header) for csv_line in data_in: writer.writerow((csv_line[0][:-4], csv_line[1])) line.seek(0) yield line.read() line.truncate(0) response = Response(iter_csv(raw_data['series'][0]['values']), mimetype='text/csv') response.headers[ 'Content-Disposition'] = 'attachment; filename={id}_{meas}.csv'.format( id=unique_id, meas=measurement) return response
def generate_thermal_image_from_timestamp(unique_id, timestamp): """Return a file from the note attachment directory""" ts_now = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') camera_path = assure_path_exists( os.path.join(PATH_CAMERAS, '{uid}'.format(uid=unique_id))) filename = 'Still-{uid}-{ts}.jpg'.format( uid=unique_id, ts=ts_now).replace(" ", "_") save_path = assure_path_exists(os.path.join(camera_path, 'thermal')) assure_path_exists(save_path) path_file = os.path.join(save_path, filename) dbcon = InfluxDBClient( INFLUXDB_HOST, INFLUXDB_PORT, INFLUXDB_USER, INFLUXDB_PASSWORD, INFLUXDB_DATABASE) input_dev = Input.query.filter(Input.unique_id == unique_id).first() pixels = [] success = True start = int(int(timestamp) / 1000.0) # Round down end = start + 1 # Round up start_timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(start)) end_timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(end)) for each_channel in range(input_dev.channels): measurement = 'channel_{chan}'.format( chan=each_channel) query_str = query_string(measurement, unique_id, start_str=start_timestamp, end_str=end_timestamp) if query_str == 1: logger.error('Invalid query string') success = False else: raw_data = dbcon.query(query_str).raw if not raw_data or 'series' not in raw_data or not raw_data['series']: logger.error('No measurements to export in this time period') success = False else: pixels.append(raw_data['series'][0]['values'][0][1]) # logger.error("generate_thermal_image_from_timestamp: success: {}, pixels: {}".format(success, pixels)) if success: generate_thermal_image_from_pixels(pixels, 8, 8, path_file) return send_file(path_file, mimetype='image/jpeg') else: return "Could not generate image"
def generate_thermal_image_from_timestamp(unique_id, timestamp): """Return a file from the note attachment directory""" ts_now = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') camera_path = assure_path_exists( os.path.join(PATH_CAMERAS, '{uid}'.format(uid=unique_id))) filename = 'Still-{uid}-{ts}.jpg'.format( uid=unique_id, ts=ts_now).replace(" ", "_") save_path = assure_path_exists(os.path.join(camera_path, 'thermal')) assure_path_exists(save_path) path_file = os.path.join(save_path, filename) current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE current_app.config['INFLUXDB_TIMEOUT'] = 5 dbcon = influx_db.connection input_dev = Input.query.filter(Input.unique_id == unique_id).first() pixels = [] success = True start = int(int(timestamp) / 1000.0) # Round down end = start + 1 # Round up start_timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(start)) end_timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000000000Z', time.gmtime(end)) for each_channel in range(input_dev.channels): measurement = 'channel_{chan}'.format( chan=each_channel) query_str = query_string(measurement, unique_id, start_str=start_timestamp, end_str=end_timestamp) if query_str == 1: logger.error('Invalid query string') success = False else: raw_data = dbcon.query(query_str).raw if not raw_data or 'series' not in raw_data: logger.error('No measurements to export in this time period') success = False else: pixels.append(raw_data['series'][0]['values'][0][1]) # logger.error("generate_thermal_image_from_timestamp: success: {}, pixels: {}".format(success, pixels)) if success: generate_thermal_image_from_pixels(pixels, 8, 8, path_file) return send_file(path_file, mimetype='image/jpeg') else: return "Could not generate image"
def past_data(input_measure, input_id, past_seconds): """Return data from past_seconds until present from influxdb""" if not str_is_float(past_seconds): return '', 204 if input_measure == 'tag': notes_list = [] tag = NoteTags.query.filter(NoteTags.unique_id == input_id).first() notes = Notes.query.filter(Notes.date_time >= ( datetime.datetime.utcnow() - datetime.timedelta(seconds=int(past_seconds)))).all() for each_note in notes: if tag.unique_id in each_note.tags.split(','): notes_list.append([ each_note.date_time.strftime( "%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note ]) if notes_list: return jsonify(notes_list) else: return '', 204 else: current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE current_app.config['INFLUXDB_TIMEOUT'] = 5 dbcon = influx_db.connection try: query_str = query_string(input_measure, input_id, past_sec=past_seconds) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw if raw_data: return jsonify(raw_data['series'][0]['values']) else: return '', 204 except Exception as e: logger.debug("URL for 'past_data' raised and error: " "{err}".format(err=e)) return '', 204
def past_data(sensor_measure, sensor_id, past_seconds): """Return data from past_seconds until present from influxdb""" current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE dbcon = influx_db.connection try: query_str = query_string(sensor_measure, sensor_id, past_sec=past_seconds) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw if raw_data: return jsonify(raw_data['series'][0]['values']) else: return '', 204 except Exception as e: logger.debug("URL for 'past_data' raised and error: " "{err}".format(err=e)) return '', 204
def async_data(device_id, device_type, measurement_id, start_seconds, end_seconds): """ Return data from start_seconds to end_seconds from influxdb. Used for asynchronous graph display of many points (up to millions). """ if device_type == 'tag': notes_list = [] tag = NoteTags.query.filter(NoteTags.unique_id == device_id).first() start = datetime.datetime.utcfromtimestamp(float(start_seconds)) if end_seconds == '0': end = datetime.datetime.utcnow() else: end = datetime.datetime.utcfromtimestamp(float(end_seconds)) notes = Notes.query.filter( and_(Notes.date_time >= start, Notes.date_time <= end)).all() for each_note in notes: if tag.unique_id in each_note.tags.split(','): notes_list.append( [each_note.date_time.strftime("%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note]) if notes_list: return jsonify(notes_list) else: return '', 204 current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE current_app.config['INFLUXDB_TIMEOUT'] = 5 dbcon = influx_db.connection if device_type in ['input', 'math', 'pid']: measure = DeviceMeasurements.query.filter(DeviceMeasurements.unique_id == measurement_id).first() elif device_type == 'output': measure = Output.query.filter(Output.unique_id == device_id).first() else: measure = None if not measure: return "Could not find measurement" if measure: conversion = Conversion.query.filter( Conversion.unique_id == measure.conversion_id).first() else: conversion = None channel, unit, measurement = return_measurement_info( measure, conversion) # Set the time frame to the past year if start/end not specified if start_seconds == '0' and end_seconds == '0': # Get how many points there are in the past year query_str = query_string( unit, device_id, measure=measurement, channel=channel, value='COUNT') if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string( unit, device_id, measure=measurement, channel=channel, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] end = datetime.datetime.utcnow() end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') # Set the time frame to the past start epoch to now elif start_seconds != '0' and end_seconds == '0': start = datetime.datetime.utcfromtimestamp(float(start_seconds)) start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.utcnow() end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string( unit, device_id, measure=measurement, channel=channel, value='COUNT', start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string( unit, device_id, measure=measurement, channel=channel, start_str=start_str, end_str=end_str, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] else: start = datetime.datetime.utcfromtimestamp(float(start_seconds)) start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.utcfromtimestamp(float(end_seconds)) end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string( unit, device_id, measure=measurement, channel=channel, value='COUNT', start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string( unit, device_id, measure=measurement, channel=channel, start_str=start_str, end_str=end_str, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] start = datetime.datetime.strptime(first_point[:26], '%Y-%m-%dT%H:%M:%S.%f') start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') logger.debug('Count = {}'.format(count_points)) logger.debug('Start = {}'.format(start)) logger.debug('End = {}'.format(end)) # How many seconds between the start and end period time_difference_seconds = (end - start).total_seconds() logger.debug('Difference seconds = {}'.format(time_difference_seconds)) # If there are more than 700 points in the time frame, we need to group # data points into 700 groups with points averaged in each group. if count_points > 700: # Average period between input reads seconds_per_point = time_difference_seconds / count_points logger.debug('Seconds per point = {}'.format(seconds_per_point)) # How many seconds to group data points in group_seconds = int(time_difference_seconds / 700) logger.debug('Group seconds = {}'.format(group_seconds)) try: query_str = query_string( unit, device_id, measure=measurement, channel=channel, value='MEAN', start_str=start_str, end_str=end_str, group_sec=group_seconds) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw return jsonify(raw_data['series'][0]['values']) except Exception as e: logger.error("URL for 'async_data' raised and error: " "{err}".format(err=e)) return '', 204 else: try: query_str = query_string( unit, device_id, measure=measurement, channel=channel, start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw return jsonify(raw_data['series'][0]['values']) except Exception as e: logger.error("URL for 'async_data' raised and error: " "{err}".format(err=e)) return '', 204
def async_data(device_id, device_type, measurement_id, start_seconds, end_seconds): """ Return data from start_seconds to end_seconds from influxdb. Used for asynchronous graph display of many points (up to millions). """ if device_type == 'tag': notes_list = [] tag = NoteTags.query.filter(NoteTags.unique_id == device_id).first() start = datetime.datetime.utcfromtimestamp(float(start_seconds)) if end_seconds == '0': end = datetime.datetime.utcnow() else: end = datetime.datetime.utcfromtimestamp(float(end_seconds)) notes = Notes.query.filter( and_(Notes.date_time >= start, Notes.date_time <= end)).all() for each_note in notes: if tag.unique_id in each_note.tags.split(','): notes_list.append([ each_note.date_time.strftime( "%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note ]) if notes_list: return jsonify(notes_list) else: return '', 204 dbcon = InfluxDBClient(INFLUXDB_HOST, INFLUXDB_PORT, INFLUXDB_USER, INFLUXDB_PASSWORD, INFLUXDB_DATABASE) if device_type in ['input', 'math', 'output', 'pid']: measure = DeviceMeasurements.query.filter( DeviceMeasurements.unique_id == measurement_id).first() else: measure = None if not measure: return "Could not find measurement" if measure: conversion = Conversion.query.filter( Conversion.unique_id == measure.conversion_id).first() else: conversion = None channel, unit, measurement = return_measurement_info(measure, conversion) # Set the time frame to the past year if start/end not specified if start_seconds == '0' and end_seconds == '0': # Get how many points there are in the past year query_str = query_string(unit, device_id, measure=measurement, channel=channel, value='COUNT') if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string(unit, device_id, measure=measurement, channel=channel, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] end = datetime.datetime.utcnow() end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') # Set the time frame to the past start epoch to now elif start_seconds != '0' and end_seconds == '0': start = datetime.datetime.utcfromtimestamp(float(start_seconds)) start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.utcnow() end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string(unit, device_id, measure=measurement, channel=channel, value='COUNT', start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string(unit, device_id, measure=measurement, channel=channel, start_str=start_str, end_str=end_str, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] else: start = datetime.datetime.utcfromtimestamp(float(start_seconds)) start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.utcfromtimestamp(float(end_seconds)) end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string(unit, device_id, measure=measurement, channel=channel, value='COUNT', start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string(unit, device_id, measure=measurement, channel=channel, start_str=start_str, end_str=end_str, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] start = datetime.datetime.strptime( influx_time_str_to_milliseconds(first_point), '%Y-%m-%dT%H:%M:%S.%f') start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') logger.debug('Count = {}'.format(count_points)) logger.debug('Start = {}'.format(start)) logger.debug('End = {}'.format(end)) # How many seconds between the start and end period time_difference_seconds = (end - start).total_seconds() logger.debug('Difference seconds = {}'.format(time_difference_seconds)) # If there are more than 700 points in the time frame, we need to group # data points into 700 groups with points averaged in each group. if count_points > 700: # Average period between input reads seconds_per_point = time_difference_seconds / count_points logger.debug('Seconds per point = {}'.format(seconds_per_point)) # How many seconds to group data points in group_seconds = int(time_difference_seconds / 700) logger.debug('Group seconds = {}'.format(group_seconds)) try: query_str = query_string(unit, device_id, measure=measurement, channel=channel, value='MEAN', start_str=start_str, end_str=end_str, group_sec=group_seconds) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw return jsonify(raw_data['series'][0]['values']) except Exception as e: logger.error("URL for 'async_data' raised and error: " "{err}".format(err=e)) return '', 204 else: try: query_str = query_string(unit, device_id, measure=measurement, channel=channel, start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw return jsonify(raw_data['series'][0]['values']) except Exception as e: logger.error("URL for 'async_data' raised and error: " "{err}".format(err=e)) return '', 204
def export_data(unique_id, measurement_id, start_seconds, end_seconds): """ Return data from start_seconds to end_seconds from influxdb. Used for exporting data. """ dbcon = InfluxDBClient(INFLUXDB_HOST, INFLUXDB_PORT, INFLUXDB_USER, INFLUXDB_PASSWORD, INFLUXDB_DATABASE, timeout=100) output = Output.query.filter(Output.unique_id == unique_id).first() input_dev = Input.query.filter(Input.unique_id == unique_id).first() math = Math.query.filter(Math.unique_id == unique_id).first() if output: name = output.name elif input_dev: name = input_dev.name elif math: name = math.name else: name = None device_measurement = DeviceMeasurements.query.filter( DeviceMeasurements.unique_id == measurement_id).first() if device_measurement: conversion = Conversion.query.filter( Conversion.unique_id == device_measurement.conversion_id).first() else: conversion = None channel, unit, measurement = return_measurement_info( device_measurement, conversion) utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now() start = datetime.datetime.fromtimestamp(float(start_seconds)) start += utc_offset_timedelta start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.fromtimestamp(float(end_seconds)) end += utc_offset_timedelta end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string(unit, unique_id, measure=measurement, channel=channel, start_str=start_str, end_str=end_str) if query_str == 1: flash('Invalid query string', 'error') return redirect(url_for('routes_page.page_export')) raw_data = dbcon.query(query_str).raw if not raw_data or 'series' not in raw_data or not raw_data['series']: flash('No measurements to export in this time period', 'error') return redirect(url_for('routes_page.page_export')) # Generate column names col_1 = 'timestamp (UTC)' col_2 = '{name} {meas} ({id})'.format(name=name, meas=measurement, id=unique_id) csv_filename = '{id}_{name}_{meas}.csv'.format(id=unique_id, name=name, meas=measurement) from flask import Response import csv from io import StringIO def iter_csv(data): """ Stream CSV file to user for download """ line = StringIO() writer = csv.writer(line) writer.writerow([col_1, col_2]) for csv_line in data: writer.writerow( [str(csv_line[0][:-4]).replace('T', ' '), csv_line[1]]) line.seek(0) yield line.read() line.truncate(0) line.seek(0) response = Response(iter_csv(raw_data['series'][0]['values']), mimetype='text/csv') response.headers[ 'Content-Disposition'] = 'attachment; filename="{}"'.format( csv_filename) return response
def past_data(unique_id, measure_type, measurement_id, past_seconds): """Return data from past_seconds until present from influxdb""" if not str_is_float(past_seconds): return '', 204 if measure_type == 'tag': notes_list = [] tag = NoteTags.query.filter(NoteTags.unique_id == unique_id).first() notes = Notes.query.filter(Notes.date_time >= ( datetime.datetime.utcnow() - datetime.timedelta(seconds=int(past_seconds)))).all() for each_note in notes: if tag.unique_id in each_note.tags.split(','): notes_list.append([ each_note.date_time.strftime( "%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note ]) if notes_list: return jsonify(notes_list) else: return '', 204 elif measure_type in ['input', 'math', 'output', 'pid']: dbcon = InfluxDBClient(INFLUXDB_HOST, INFLUXDB_PORT, INFLUXDB_USER, INFLUXDB_PASSWORD, INFLUXDB_DATABASE) if measure_type in ['input', 'math', 'output', 'pid']: measure = DeviceMeasurements.query.filter( DeviceMeasurements.unique_id == measurement_id).first() else: measure = None if not measure: return "Could not find measurement" if measure: conversion = Conversion.query.filter( Conversion.unique_id == measure.conversion_id).first() else: conversion = None channel, unit, measurement = return_measurement_info( measure, conversion) if hasattr( measure, 'measurement_type') and measure.measurement_type == 'setpoint': setpoint_pid = PID.query.filter( PID.unique_id == measure.device_id).first() if setpoint_pid and ',' in setpoint_pid.measurement: pid_measurement = setpoint_pid.measurement.split(',')[1] setpoint_measurement = DeviceMeasurements.query.filter( DeviceMeasurements.unique_id == pid_measurement).first() if setpoint_measurement: conversion = Conversion.query.filter( Conversion.unique_id == setpoint_measurement.conversion_id).first() _, unit, measurement = return_measurement_info( setpoint_measurement, conversion) try: query_str = query_string(unit, unique_id, measure=measurement, channel=channel, past_sec=past_seconds) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw if 'series' in raw_data and raw_data['series']: return jsonify(raw_data['series'][0]['values']) else: return '', 204 except Exception as e: logger.debug("URL for 'past_data' raised and error: " "{err}".format(err=e)) return '', 204
def last_data(unique_id, measure_type, measurement_id, period): """Return the most recent time and value from influxdb""" if not str_is_float(period): return '', 204 if measure_type in ['input', 'math', 'output', 'pid']: dbcon = InfluxDBClient(INFLUXDB_HOST, INFLUXDB_PORT, INFLUXDB_USER, INFLUXDB_PASSWORD, INFLUXDB_DATABASE) if measure_type in ['input', 'math', 'output', 'pid']: measure = DeviceMeasurements.query.filter( DeviceMeasurements.unique_id == measurement_id).first() else: return '', 204 if measure: conversion = Conversion.query.filter( Conversion.unique_id == measure.conversion_id).first() else: conversion = None channel, unit, measurement = return_measurement_info( measure, conversion) if hasattr( measure, 'measurement_type') and measure.measurement_type == 'setpoint': setpoint_pid = PID.query.filter( PID.unique_id == measure.device_id).first() if setpoint_pid and ',' in setpoint_pid.measurement: pid_measurement = setpoint_pid.measurement.split(',')[1] setpoint_measurement = DeviceMeasurements.query.filter( DeviceMeasurements.unique_id == pid_measurement).first() if setpoint_measurement: conversion = Conversion.query.filter( Conversion.unique_id == setpoint_measurement.conversion_id).first() _, unit, measurement = return_measurement_info( setpoint_measurement, conversion) try: if period != '0': query_str = query_string(unit, unique_id, measure=measurement, channel=channel, value='LAST', past_sec=period) else: query_str = query_string(unit, unique_id, measure=measurement, channel=channel, value='LAST') if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw number = len(raw_data['series'][0]['values']) time_raw = raw_data['series'][0]['values'][number - 1][0] value = raw_data['series'][0]['values'][number - 1][1] value = float(value) # Convert date-time to epoch (potential bottleneck for data) dt = date_parse(time_raw) timestamp = calendar.timegm(dt.timetuple()) * 1000 live_data = '[{},{}]'.format(timestamp, value) return Response(live_data, mimetype='text/json') except KeyError: logger.debug("No Data returned form influxdb") return '', 204 except IndexError: logger.debug("No Data returned form influxdb") return '', 204 except Exception as e: logger.exception("URL for 'last_data' raised and error: " "{err}".format(err=e)) return '', 204
def last_data(unique_id, measure_type, measurement_id, period): """Return the most recent time and value from influxdb""" if not str_is_float(period): return '', 204 if measure_type in ['input', 'math', 'output', 'pid']: current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE current_app.config['INFLUXDB_TIMEOUT'] = 5 dbcon = influx_db.connection if measure_type in ['input', 'math', 'pid']: measure = DeviceMeasurements.query.filter( DeviceMeasurements.unique_id == measurement_id).first() elif measure_type == 'output': measure = Output.query.filter( Output.unique_id == unique_id).first() else: return '', 204 if measure: conversion = Conversion.query.filter( Conversion.unique_id == measure.conversion_id).first() else: conversion = None channel, unit, measurement = return_measurement_info( measure, conversion) if hasattr(measure, 'measurement_type') and measure.measurement_type == 'setpoint': setpoint_pid = PID.query.filter(PID.unique_id == measure.device_id).first() if setpoint_pid and ',' in setpoint_pid.measurement: pid_measurement = setpoint_pid.measurement.split(',')[1] setpoint_measurement = DeviceMeasurements.query.filter( DeviceMeasurements.unique_id == pid_measurement).first() if setpoint_measurement: conversion = Conversion.query.filter( Conversion.unique_id == setpoint_measurement.conversion_id).first() _, unit, measurement = return_measurement_info(setpoint_measurement, conversion) try: if period != '0': query_str = query_string( unit, unique_id, measure=measurement, channel=channel, value='LAST', past_sec=period) else: query_str = query_string( unit, unique_id, measure=measurement, channel=channel, value='LAST') if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw number = len(raw_data['series'][0]['values']) time_raw = raw_data['series'][0]['values'][number - 1][0] value = raw_data['series'][0]['values'][number - 1][1] value = float(value) # Convert date-time to epoch (potential bottleneck for data) dt = date_parse(time_raw) timestamp = calendar.timegm(dt.timetuple()) * 1000 live_data = '[{},{}]'.format(timestamp, value) return Response(live_data, mimetype='text/json') except KeyError: logger.debug("No Data returned form influxdb") return '', 204 except Exception as e: logger.exception("URL for 'last_data' raised and error: " "{err}".format(err=e)) return '', 204
def past_data(unique_id, measure_type, measurement_id, past_seconds): """Return data from past_seconds until present from influxdb""" if not str_is_float(past_seconds): return '', 204 if measure_type == 'tag': notes_list = [] tag = NoteTags.query.filter(NoteTags.unique_id == unique_id).first() notes = Notes.query.filter( Notes.date_time >= (datetime.datetime.utcnow() - datetime.timedelta(seconds=int(past_seconds)))).all() for each_note in notes: if tag.unique_id in each_note.tags.split(','): notes_list.append( [each_note.date_time.strftime("%Y-%m-%dT%H:%M:%S.000000000Z"), each_note.name, each_note.note]) if notes_list: return jsonify(notes_list) else: return '', 204 elif measure_type in ['input', 'math', 'output', 'pid']: current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE current_app.config['INFLUXDB_TIMEOUT'] = 5 dbcon = influx_db.connection if measure_type in ['input', 'math', 'pid']: measure = DeviceMeasurements.query.filter( DeviceMeasurements.unique_id == measurement_id).first() elif measure_type == 'output': measure = Output.query.filter( Output.unique_id == unique_id).first() else: measure = None if not measure: return "Could not find measurement" if measure: conversion = Conversion.query.filter( Conversion.unique_id == measure.conversion_id).first() else: conversion = None channel, unit, measurement = return_measurement_info( measure, conversion) if hasattr(measure, 'measurement_type') and measure.measurement_type == 'setpoint': setpoint_pid = PID.query.filter(PID.unique_id == measure.device_id).first() if setpoint_pid and ',' in setpoint_pid.measurement: pid_measurement = setpoint_pid.measurement.split(',')[1] setpoint_measurement = DeviceMeasurements.query.filter( DeviceMeasurements.unique_id == pid_measurement).first() if setpoint_measurement: conversion = Conversion.query.filter( Conversion.unique_id == setpoint_measurement.conversion_id).first() _, unit, measurement = return_measurement_info(setpoint_measurement, conversion) try: query_str = query_string( unit, unique_id, measure=measurement, channel=channel, past_sec=past_seconds) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw if 'series' in raw_data: return jsonify(raw_data['series'][0]['values']) else: return '', 204 except Exception as e: logger.debug("URL for 'past_data' raised and error: " "{err}".format(err=e)) return '', 204
def export_data(unique_id, measurement_id, start_seconds, end_seconds): """ Return data from start_seconds to end_seconds from influxdb. Used for exporting data. """ current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE current_app.config['INFLUXDB_TIMEOUT'] = 5 dbcon = influx_db.connection output = Output.query.filter(Output.unique_id == unique_id).first() input_dev = Input.query.filter(Input.unique_id == unique_id).first() math = Math.query.filter(Math.unique_id == unique_id).first() if output: name = output.name elif input_dev: name = input_dev.name elif math: name = math.name else: name = None device_measurement = DeviceMeasurements.query.filter( DeviceMeasurements.unique_id == measurement_id).first() if device_measurement: conversion = Conversion.query.filter( Conversion.unique_id == device_measurement.conversion_id).first() else: conversion = None channel, unit, measurement = return_measurement_info( device_measurement, conversion) utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now() start = datetime.datetime.fromtimestamp(float(start_seconds)) start += utc_offset_timedelta start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.fromtimestamp(float(end_seconds)) end += utc_offset_timedelta end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string( unit, unique_id, measure=measurement, channel=channel, start_str=start_str, end_str=end_str) if query_str == 1: flash('Invalid query string', 'error') return redirect(url_for('routes_page.page_export')) raw_data = dbcon.query(query_str).raw if not raw_data or 'series' not in raw_data: flash('No measurements to export in this time period', 'error') return redirect(url_for('routes_page.page_export')) # Generate column names col_1 = 'timestamp (UTC)' col_2 = '{name} {meas} ({id})'.format( name=name, meas=measurement, id=unique_id) csv_filename = '{id}_{meas}.csv'.format(id=unique_id, meas=measurement) # Populate list of dictionary entries for each column to convert to CSV # and send to the user to download csv_data = [] for each_data in raw_data['series'][0]['values']: csv_data.append({col_1: str(each_data[0][:-4]).replace('T', ' '), col_2: each_data[1]}) return send_csv(csv_data, csv_filename, [col_1, col_2])
def async_data(measurement, unique_id, start_seconds, end_seconds): """ Return data from start_seconds to end_seconds from influxdb. Used for asynchronous graph display of many points (up to millions). """ current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE dbcon = influx_db.connection # Set the time frame to the past year if start/end not specified if start_seconds == '0' and end_seconds == '0': # Get how many points there are in the past year query_str = query_string(measurement, unique_id, value='COUNT') if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string(measurement, unique_id, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] end = datetime.datetime.utcnow() end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') # Set the time frame to the past start epoch to now elif start_seconds != '0' and end_seconds == '0': start = datetime.datetime.utcfromtimestamp(float(start_seconds)) start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.utcnow() end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string(measurement, unique_id, value='COUNT', start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string(measurement, unique_id, start_str=start_str, end_str=end_str, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] else: start = datetime.datetime.utcfromtimestamp(float(start_seconds)) start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.utcfromtimestamp(float(end_seconds)) end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string(measurement, unique_id, value='COUNT', start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string(measurement, unique_id, start_str=start_str, end_str=end_str, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] start = datetime.datetime.strptime(first_point[:26], '%Y-%m-%dT%H:%M:%S.%f') start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') logger.debug('Count = {}'.format(count_points)) logger.debug('Start = {}'.format(start)) logger.debug('End = {}'.format(end)) # How many seconds between the start and end period time_difference_seconds = (end - start).total_seconds() logger.debug('Difference seconds = {}'.format(time_difference_seconds)) # If there are more than 700 points in the time frame, we need to group # data points into 700 groups with points averaged in each group. if count_points > 700: # Average period between input reads seconds_per_point = time_difference_seconds / count_points logger.debug('Seconds per point = {}'.format(seconds_per_point)) # How many seconds to group data points in group_seconds = int(time_difference_seconds / 700) logger.debug('Group seconds = {}'.format(group_seconds)) try: query_str = query_string(measurement, unique_id, value='MEAN', start_str=start_str, end_str=end_str, group_sec=group_seconds) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw return jsonify(raw_data['series'][0]['values']) except Exception as e: logger.error("URL for 'async_data' raised and error: " "{err}".format(err=e)) return '', 204 else: try: query_str = query_string(measurement, unique_id, start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw return jsonify(raw_data['series'][0]['values']) except Exception as e: logger.error("URL for 'async_data' raised and error: " "{err}".format(err=e)) return '', 204
def async_usage_data(device_id, unit, channel, start_seconds, end_seconds): """ Return data from start_seconds to end_seconds from influxdb. Used for asynchronous graph display of many points (up to millions). """ current_app.config['INFLUXDB_USER'] = INFLUXDB_USER current_app.config['INFLUXDB_PASSWORD'] = INFLUXDB_PASSWORD current_app.config['INFLUXDB_DATABASE'] = INFLUXDB_DATABASE current_app.config['INFLUXDB_TIMEOUT'] = 5 dbcon = influx_db.connection # Set the time frame to the past year if start/end not specified if start_seconds == '0' and end_seconds == '0': # Get how many points there are in the past year query_str = query_string( unit, device_id, channel=channel, value='COUNT') if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string( unit, device_id, channel=channel, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] end = datetime.datetime.utcnow() end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') # Set the time frame to the past start epoch to now elif start_seconds != '0' and end_seconds == '0': start = datetime.datetime.utcfromtimestamp(float(start_seconds)) start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.utcnow() end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string( unit, device_id, channel=channel, value='COUNT', start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string( unit, device_id, channel=channel, start_str=start_str, end_str=end_str, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] else: start = datetime.datetime.utcfromtimestamp(float(start_seconds)) start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') end = datetime.datetime.utcfromtimestamp(float(end_seconds)) end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') query_str = query_string( unit, device_id, channel=channel, value='COUNT', start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw count_points = raw_data['series'][0]['values'][0][1] # Get the timestamp of the first point in the past year query_str = query_string( unit, device_id, channel=channel, start_str=start_str, end_str=end_str, limit=1) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw first_point = raw_data['series'][0]['values'][0][0] start = datetime.datetime.strptime(first_point[:26], '%Y-%m-%dT%H:%M:%S.%f') start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') logger.debug('Count = {}'.format(count_points)) logger.debug('Start = {}'.format(start)) logger.debug('End = {}'.format(end)) # How many seconds between the start and end period time_difference_seconds = (end - start).total_seconds() logger.debug('Difference seconds = {}'.format(time_difference_seconds)) # If there are more than 700 points in the time frame, we need to group # data points into 700 groups with points averaged in each group. if count_points > 700: # Average period between input reads seconds_per_point = time_difference_seconds / count_points logger.debug('Seconds per point = {}'.format(seconds_per_point)) # How many seconds to group data points in group_seconds = int(time_difference_seconds / 700) logger.debug('Group seconds = {}'.format(group_seconds)) try: query_str = query_string( unit, device_id, channel=channel, value='MEAN', start_str=start_str, end_str=end_str, group_sec=group_seconds) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw return jsonify(raw_data['series'][0]['values']) except Exception as e: logger.error("URL for 'async_data' raised and error: " "{err}".format(err=e)) return '', 204 else: try: query_str = query_string( unit, device_id, channel=channel, start_str=start_str, end_str=end_str) if query_str == 1: return '', 204 raw_data = dbcon.query(query_str).raw return jsonify(raw_data['series'][0]['values']) except Exception as e: logger.error("URL for 'async_data' raised and error: " "{err}".format(err=e)) return '', 204