def parse(measurement: Measurement) -> Point: point = Point(measurement.name) if measurement.groups is not None: for (key, value) in measurement.groups.items(): point.tag(key, value) point.field("value", measurement.value).time(measurement.time) return point
def test_write_points_unicode(self): bucket = self.create_test_bucket() measurement = "h2o_feet_ěščřĚŠČŘ" field_name = "field_ěščř" utf8_val = "Přerov 🍺" tag = "tag_ěščř" tag_value = "tag_value_ěščř" p = Point(measurement) p.field(field_name, utf8_val) p.tag(tag, tag_value) record_list = [p] self.write_client.write(bucket.name, self.org, record_list) query = 'from(bucket:"' + bucket.name + '") |> range(start: 1970-01-01T00:00:00.000000001Z)' flux_result = self.client.query_api().query(query) self.assertEqual(1, len(flux_result)) rec = flux_result[0].records[0] self.assertEqual(self.id_tag, rec["id"]) self.assertEqual(self.customer_tag, rec["customer"]) self.assertEqual("LA", rec[self.data_center_key]) self.assertEqual(measurement, rec.get_measurement()) self.assertEqual(utf8_val, rec.get_value()) self.assertEqual(field_name, rec.get_field())
def notify(self, notifyevent, control, settings, pelletdb, in_data, grill_platform): if time.time() - self.last_updated < 1: return from influxdb_client import Point name = settings['globals']['grill_name'] if len(name) == 0: name = 'Smoker' def get_or_default(data, k, default): if data is not None and k in data: return data[k] return default p = Point(name).time(time=datetime.utcnow()) \ .field("GrillTemp", float(get_or_default(in_data, 'GrillTemp', 0.0))) \ .field('GrillSetPoint', float(get_or_default(in_data, 'GrillSetPoint', 0.0))) \ .field('Probe1Temp', float(get_or_default(in_data, 'Probe1Temp', 0.0))) \ .field('Probe1SetPoint', float(get_or_default(in_data, 'Probe1SetPoint', 0.0))) \ .field('Probe2Temp', float(get_or_default(in_data, 'Probe2Temp', 0))) \ .field('Probe2SetPoint', float(get_or_default(in_data, 'Probe2SetPoint', 0.0))) \ .field("Mode", str(get_or_default(control, "mode", 'unknown'))) \ .field('PelletLevel', int(get_or_default(get_or_default(pelletdb, 'current', {}), 'hopper_level', 100))) if grill_platform is not None: outputs = grill_platform.GetOutputStatus() for key in outputs: p = p.field(key, int(outputs[key])) if notifyevent and 'GRILL_STATE' != notifyevent: p = p.field('Event', str(notifyevent)) self.queue.append(p) self.last_updated = time.time()
def collect_influx(self, influx: InfluxDB) -> None: ''' Pushes data to InfluxDB. ''' if not self.name: return self.battery_manager.collect_influx(influx) ts = datetime.now(timezone.utc) temp_point = Point('temperature').tag('inverter', self.name).time(ts, write_precision=InfluxWritePrecision.S) have_temp = False if self.readings.temperature_heatsink is not None: have_temp = True temp_point = temp_point.field('heatsink', self.readings.temperature_heatsink) if self.readings.temperature_heatsink_batt is not None: have_temp = True temp_point = temp_point.field('heatsink_battery_actuator', self.readings.temperature_heatsink_batt) if self.readings.temperature_core is not None: have_temp = True temp_point = temp_point.field('core', self.readings.temperature_core) if self.battery_manager.readings.temperature is not None: have_temp = True temp_point = temp_point.field('battery', self.battery_manager.readings.temperature) if have_temp: influx.add_points(temp_point)
def influxdb2_publish(event, data): # influxdb_client supports InfluxDB backends 1.8/2.0+ - v1.8 includes a v2 API layer. from influxdb_client import InfluxDBClient, Point, WritePrecision from influxdb_client.client.write_api import SYNCHRONOUS try: client = InfluxDBClient(url=args.influxdb2_url, token=args.influxdb2_token, org=args.influxdb2_org, debug=args.influxdb2_debug) # WritePrecision.S necessary since we are using the report's timestamp, which is epoch in seconds. point = Point(event).tag("source", "weatherflow-udp-listener").time( data['timestamp'], WritePrecision.S) # add all keys / values to data point for key in data.keys(): point.field(key, data[key]) if args.influxdb2_debug: print("added field %s : %s" % (key, data[key])) if args.influxdb2_debug or args.verbose: print("publishing event %s to influxdb" % (event)) # write to API write_api = client.write_api(write_options=SYNCHRONOUS) write_api.write(bucket=args.influxdb2_bucket, record=point) except Exception as e: print("Failed to connect to InfluxDB: %s" % e)
def write(self,bucket,measurement : str, time , field_list : list, tag_list : list = [], **kwargs): ''' :param bucket : the bucket on which write the data :param measurement: name of measurement :param time: timestamp :param field_list: field list : containing tuple (key,value) :param tag_list: tag_lisit : containing tuple (key,value) optionnal parameter :return: ''' point = Point(measurement) point.time(time,WritePrecision.MS) if not field_list : # TODO : Create an exception NoDataException raise Exception("Not point to write in database.") for field_tuple in field_list: point.field(field_tuple[0],field_tuple[1]) for tag_tuple in tag_list: point.tag(tag_tuple[0], tag_tuple[1]) self.write_api.write(bucket=bucket, record=point, org= self.org, **kwargs)
def _write_point(self): self.write_client = self.client.write_api(write_options=SYNCHRONOUS) bucket = self.create_test_bucket() measurement = "h2o_feet" field_name = "water_level" val = "1.0" tag = "location" tag_value = "creek level" p = Point(measurement) p.field(field_name, val) p.tag(tag, tag_value) record_list = [p] self.write_client.write(bucket.name, self.org, record_list) query = 'from(bucket:"' + bucket.name + '") |> range(start: 1970-01-01T00:00:00.000000001Z)' flux_result = self.client.query_api().query(query) self.assertEqual(1, len(flux_result)) rec = flux_result[0].records[0] self.assertEqual(self.id_tag, rec["id"]) self.assertEqual(self.customer_tag, rec["customer"]) self.assertEqual("LA", rec[self.data_center_key]) self.delete_test_bucket(bucket)
def _format_line(self, measurement, data): p = Point(measurement).tag('location', 'lt').time(time=datetime.utcnow()) for (key, val) in data.items(): p.field(key, val) return p
def _ruuvi_data_to_influx(data: RuuviTagData) -> Point: point = Point("ruuvi_measurements").tag("mac", data.mac).time(data.time) for field_name, val in data._asdict().items(): if field_name != "mac" and field_name != "time" and val is not None: point.field(field_name, val) return point
def add_data_point(self, field_name, field_value, tags=None): point = Point(self.name) point.field(field_name, field_value) if tags is not None: for tag_key, tag_value in tags.items(): point = point.tag(tag_key, tag_value) self.records.append(point)
def influxdb_write(measurement, points): records = [] for timestamp, values in points: point = Point(measurement).time(timestamp) for name, value in values.items(): point.field(name, value) records.append(point) async_result = write_api.write(bucket=BUCKET, record=records) async_result.get()
def map_data(self, fields: dict, tags: dict): point = Point('ups_status') for k in fields.keys(): point.field(k, fields[k]) for k in tags.keys(): point.tag(k, tags[k]) return point
def _post_telemetry(self) -> None: """Post telemetry points""" if self.telem_logger is not None: p = Point('sensor_fusion') p.field('blind_target_bias_mag', np.abs(self.blind_target_bias)) p.field('blind_target_bias_angle', np.degrees(np.angle(self.blind_target_bias))) p.tag('units', 'degrees') p.tag('class', type(self).__name__) p.time(datetime.utcnow()) self.telem_logger.post_points(p)
def process_camera_frame(self) -> Tuple[Time, Angle, Angle]: """Get frame from camera and find target using computer vision Args: telem: Dict into which telemetry channels will be added Returns: Tuple containing: - The approximate time that the camera frame was captured. - The position of the target within the camera frame where the first element is the X position and the second is the Y position and the origin is the center of the camera frame. """ # This time isn't going to be exceptionally accurate, but unfortunately most cameras do not # provide a means of determining the exact time when the frame was captured by the sensor. # There are probably ways to estimate the frame time more accurately but this is likely # good enough. target_time = Time.now() frame = self.camera.get_frame(timeout=self.camera_timeout) if frame is None: raise self.IndeterminatePosition( 'Timeout waiting for frame from camera') keypoints = find_features(frame) if not keypoints: self.preview_window.show_annotated_frame(frame) raise self.IndeterminatePosition( 'No target detected in most recent frame') # select the keypoint that is most likely to be the target of interest target_keypoint = self._select_one_keypoint(keypoints) self.preview_window.show_annotated_frame(frame, keypoints, target_keypoint) # convert target position units from pixels to degrees target_x_px, target_y_px = self._get_keypoint_xy(target_keypoint) target_x = Angle(target_x_px * self.camera.pixel_scale * self.camera.binning * u.deg) target_y = Angle(target_y_px * self.camera.pixel_scale * self.camera.binning * u.deg) if self.telem_logger is not None: p = Point('camera_target') p.field('x', target_x.deg) p.field('y', target_y.deg) p.tag('units', 'degrees') p.tag('class', type(self).__name__) p.time(target_time.to_datetime()) self.telem_logger.post_points(p) return target_time, target_x, target_y
def report_data_list(self, category, host, data): try: point = Point(category) point.tag("host", host) for field_key, field_value in data: point.field(field_key, field_value) point.time(datetime.datetime.utcnow(), WritePrecision.NS) self._write_client.write(self._settings.bucket, self._settings.tenant, point) except Exception as e: logger.error(f"Failed to report data to InfluxDB: {e}")
def _process_data_row(rowIn, tblFlds, tblName): point = Point(tblName) point.time(rowIn['timestamp'], WritePrecision.NS) for key in tblFlds: if tblFlds[key] == 'field': point.field(key, rowIn[key]) elif tblFlds[key] == 'tag': point.tag(key, rowIn[key]) return point
def save(self, measurement: str, fields: dict, tags: dict): point = Point(measurement) for key in fields: point.field(key, fields[key]) for key in tags: point.tag(key, tags[key]) point.time(datetime.utcnow(), WritePrecision.NS) self.__write_api__.write(INFLUX_BUCKET, INFLUX_ORG, point)
def publishData(measurement, tags, time, fields): p = Point(measurement).time(time) #log.info("Will plublish %s" % (str(p))) for key, value in tags.items(): p.tag(key, value) for key, value in fields.items(): p.field(key, value) write_api.write(bucket=INFLUX_DB_NAME, record=p) log.info("Published data point in influxdb.")
def update_influx(self): pp = [] for date, fields in DATA.italy.to_dict('index').items(): p = Point("italia") p.time(date.value) for k, v in fields.items(): if type(v) is str: p.tag(k, v) if not np.isnan(v): p.field(k, v) pp.append(p) self.write.write("coviddi", "coviddi", pp)
def point(reg): fields = struct.unpack('I32B', reg) words = fields[1:] words_idx = [w for w in range(31) if w % 5 != 0] p = Point('frame') p.time(fields[0]) for idx in words_idx: p.field(f'word_{idx:02d}', words[idx]) frame_number = (words[2] >> 2) & 0x3 p.tag('frame_number', frame_number) if frame_number == 0 and words[7] == 0b11110100: p.tag('start_of_group', True) return p
def work(self): self.log.info('Polling and submitting') t = now() data = self.sma.read() p = Point('sma').time(t) for field, value, unit in objects.fields(data): if value is not None: p.field(field, value) self.log.debug(f'Result: {p.to_line_protocol()}') self.influx_write_api.write( bucket=self.config.influxdb_bucket, record=p, )
def write(self, results): points = [] for result in results: point = Point(result[Check.Result.NAME]) \ .tag("host", result[Check.Result.HOST]) \ .time(result[Check.Result.TIME].isoformat()) if Check.Result.DEVICE in result: point = point.tag("device", result[Check.Result.DEVICE]) for field in result[Check.Result.FIELDS]: point = point.field(field[Check.Field.NAME], field[Check.Field.VALUE]) if Check.Field.UNIT in field: point = point.field(f"{field[Check.Field.NAME]}_unit", field[Check.Field.UNIT]) points.append(point) self.write_api.write(self.bucket, self.org, points)
def get_telem_points(self) -> List[Point]: """Called by telemetry logger. See `TelemSource` abstract base class.""" point = Point('gamepad') # attributes of this object to be captured as fields in the telemetry measurement names = ['left_x', 'left_y', 'right_x', 'right_y', 'int_x', 'int_y', 'integrator_mode'] for name in names: point.field(name, self.__dict__[name]) point.time(datetime.utcnow()) point_raw = Point.from_dict({ 'measurement': 'gamepad_events', 'fields': self.state, 'time': datetime.utcnow(), }) return [point, point_raw]
def writeData(self, database, name, data, tag=None): self.connect() write_api = self.client.write_api(write_options=SYNCHRONOUS) try: p = Point(name) if tag is not None: p.tag(*tag) for key, value in data.items(): p.field(key, value) logger.debug("Writing Data to DB: %s", p) write_api.write(bucket=database, record=p) except Exception: logger.exception("Exception writing data: %s", p) self.client = None
def write_point(measurement): point = Point(measurement["name"]) for tag in measurement["tags"]: point = point.tag(tag, measurement["tags"][tag]) for field in measurement["fields"]: point = point.field(field, measurement["fields"][field]) point = point.time(datetime.utcnow(), WritePrecision.NS) write_api.write(bucket, org, point)
def on_mqtt_message(client, userdata, msg): print("Received '%s' - '%s'" % (msg.topic, str(msg.payload))) try: data = json.loads(msg.payload) p = Point(msg.topic) for key in data: if key == "timestamp": if isinstance(data["timestamp"], numbers.Number): p.time(data["timestamp"]) else: p.time( datetime.strptime(data["timestamp"], "%Y-%m-%d %H:%M:%S")) else: p.field(key, data[key]) influx_write.write(bucket=config.bucket, record=p) except json.decoder.JSONDecodeError: print("### Message payload is invalid JSON! ###")
def send(self, vals, batteryAPI): client = InfluxDBClient(url=self.influx_url, token=f'{self.influx_user}:{self.influx_pass}', org='-') bucket = f'{self.influx_database}/{self.influx_retention_policy}' write_api = client.write_api() inverterDetails = vals.copy() inverterDetails.pop('Serial', None) point = Point("solax").tag("inverter", vals['name']) for x, y in inverterDetails.items(): point.field(x, y) # print(point.to_line_protocol()) # print(inverterDetails) write_api.write(bucket=bucket, record=point) write_api.__del__() client.close()
def send_dict_influxdb(user, pointName, mainObject, field=False): """ format dict to send data at influxDb :param user: name on the user :param pointName: name on the point :param mainObject: object to iterate :param field: optional tag """ if field: toIterate = mainObject.get(field) else: toIterate = mainObject for key in toIterate: point = Point(pointName).tag("host", user) if field: point.tag("data", field) point.field(key, toIterate.get(key)).time(datetime.utcnow(), WritePrecision.NS) write_api.write(bucket, org, point)
def saveToInfluxDB(payload): measurementName = payload["label"] + " (" + payload["room"] + ")" dbPayload = Point(measurementName) # payload.lastStatusUpdate is in LOCAL time ("Europe/Berlin") # point.time() expects time in UTC localDatetime = payload["lastStatusUpdate"] utcDatetime = TimeUtil.convertDatetimeToUtc(localDatetime) logging.info("Converted lastStatusUpdate ('" + payload["label"] + "') local time (" + str(localDatetime) + ") to UTC (" + str(utcDatetime) + ")") dbPayload.time(utcDatetime) for fieldName, value in payload.items(): if fieldName != "label" and fieldName != "room" and fieldName != "lastStatusUpdate": dbPayload.field(fieldName, value) write_api.write(bucket=bucket, record=dbPayload)
def test_write_using_default_tags(self): bucket = self.create_test_bucket() measurement = "h2o_feet" field_name = "water_level" val = "1.0" val2 = "2.0" tag = "location" tag_value = "creek level" p = Point(measurement) p.field(field_name, val) p.tag(tag, tag_value) p.time(1) p2 = Point(measurement) p2.field(field_name, val2) p2.tag(tag, tag_value) p2.time(2) record_list = [p, p2] self.write_client.write(bucket.name, self.org, record_list) query = 'from(bucket:"' + bucket.name + '") |> range(start: 1970-01-01T00:00:00.000000001Z)' flux_result = self.client.query_api().query(query) self.assertEqual(1, len(flux_result)) rec = flux_result[0].records[0] rec2 = flux_result[0].records[1] self.assertEqual(self.id_tag, rec["id"]) self.assertEqual(self.customer_tag, rec["customer"]) self.assertEqual("LA", rec[self.data_center_key]) self.assertEqual(self.id_tag, rec2["id"]) self.assertEqual(self.customer_tag, rec2["customer"]) self.assertEqual("LA", rec2[self.data_center_key]) self.delete_test_bucket(bucket)