def getSensePoints(self, imperial_or_metric, bucket): dt = datetime.now(tz=pytz.timezone('US/Pacific')).isoformat() point = Point(measurement_name="sense") point.time(time=dt) # % relative point.field("humidity", self.sense.get_humidity()) if imperial_or_metric == "imperial": point.field( "temperature_from_humidity", convertCToF(self.sense.get_temperature_from_humidity())) point.field( "temperature_from_pressure", convertCToF(self.sense.get_temperature_from_pressure())) point.field("pressure", convertmbToPSI(self.sense.get_pressure())) else: point.field("temperature_from_humidity", self.sense.get_temperature_from_humidity()) point.field("temperature_from_pressure", self.sense.get_temperature_from_pressure()) point.field("pressure", self.sense.get_pressure()) point.field("orientation_radians", self.sense.get_orientation_radians()) point.field("orientation_degress", self.sense.get_orientation_degrees()) # magnetic intensity in microteslas point.field("compass_raw", self.sense.get_compass_raw()) # rotational intensity in radians per second point.field("gyroscope_raw", self.sense.get_gyroscope_raw()) # acceleration intensity in Gs point.field("accelerometer_raw", self.sense.get_accelerometer_raw()) return [{"bucket": bucket, "point": point}]
def getGRCPoints(self, bucket): points = [] dt = datetime.now(tz=pytz.timezone('US/Pacific')).isoformat() mag = self.explainMagnitude() mining = self.getMiningInfo() for prj in mag.magnitude: point = Point(measurement_name='magnitude') point.time(time=dt) point.tag('project_name', prj.project) point.field('rac', prj.rac) point.field('magnitude', prj.magnitude) points.append({"bucket": bucket, "point": point}) mining_point = Point(measurement_name='mining') mining_point.time(time=dt) mining_point.tag('CPID', mining.CPID) mining_point.field('blocks', mining.blocks) mining_point.field('magnitude', mining.current_magnitude) mining_point.field('current_difficulty', mining.difficulty.current) mining_point.field('pending_reward', mining.BoincRewardPending) mining_point.field('stake_weight', mining.stakeweight.valuesum) mining_point.field('time_to_stake', mining.time_to_stake_days) mining_point.field('staking_efficiency', mining.staking_efficiency) points.append({"bucket": bucket, "point": mining_point}) return points
def write(self,bucket,measurement : str, time , field_list : list, tag_list : list = [], **kwargs): ''' :param bucket : the bucket on which write the data :param measurement: name of measurement :param time: timestamp :param field_list: field list : containing tuple (key,value) :param tag_list: tag_lisit : containing tuple (key,value) optionnal parameter :return: ''' point = Point(measurement) point.time(time,WritePrecision.MS) if not field_list : # TODO : Create an exception NoDataException raise Exception("Not point to write in database.") for field_tuple in field_list: point.field(field_tuple[0],field_tuple[1]) for tag_tuple in tag_list: point.tag(tag_tuple[0], tag_tuple[1]) self.write_api.write(bucket=bucket, record=point, org= self.org, **kwargs)
def _post_telemetry(self) -> None: """Post telemetry points""" if self.telem_logger is not None: p = Point('sensor_fusion') p.field('blind_target_bias_mag', np.abs(self.blind_target_bias)) p.field('blind_target_bias_angle', np.degrees(np.angle(self.blind_target_bias))) p.tag('units', 'degrees') p.tag('class', type(self).__name__) p.time(datetime.utcnow()) self.telem_logger.post_points(p)
def process_camera_frame(self) -> Tuple[Time, Angle, Angle]: """Get frame from camera and find target using computer vision Args: telem: Dict into which telemetry channels will be added Returns: Tuple containing: - The approximate time that the camera frame was captured. - The position of the target within the camera frame where the first element is the X position and the second is the Y position and the origin is the center of the camera frame. """ # This time isn't going to be exceptionally accurate, but unfortunately most cameras do not # provide a means of determining the exact time when the frame was captured by the sensor. # There are probably ways to estimate the frame time more accurately but this is likely # good enough. target_time = Time.now() frame = self.camera.get_frame(timeout=self.camera_timeout) if frame is None: raise self.IndeterminatePosition( 'Timeout waiting for frame from camera') keypoints = find_features(frame) if not keypoints: self.preview_window.show_annotated_frame(frame) raise self.IndeterminatePosition( 'No target detected in most recent frame') # select the keypoint that is most likely to be the target of interest target_keypoint = self._select_one_keypoint(keypoints) self.preview_window.show_annotated_frame(frame, keypoints, target_keypoint) # convert target position units from pixels to degrees target_x_px, target_y_px = self._get_keypoint_xy(target_keypoint) target_x = Angle(target_x_px * self.camera.pixel_scale * self.camera.binning * u.deg) target_y = Angle(target_y_px * self.camera.pixel_scale * self.camera.binning * u.deg) if self.telem_logger is not None: p = Point('camera_target') p.field('x', target_x.deg) p.field('y', target_y.deg) p.tag('units', 'degrees') p.tag('class', type(self).__name__) p.time(target_time.to_datetime()) self.telem_logger.post_points(p) return target_time, target_x, target_y
def save(self, measurement: str, fields: dict, tags: dict): point = Point(measurement) for key in fields: point.field(key, fields[key]) for key in tags: point.tag(key, tags[key]) point.time(datetime.utcnow(), WritePrecision.NS) self.__write_api__.write(INFLUX_BUCKET, INFLUX_ORG, point)
def _process_data_row(rowIn, tblFlds, tblName): point = Point(tblName) point.time(rowIn['timestamp'], WritePrecision.NS) for key in tblFlds: if tblFlds[key] == 'field': point.field(key, rowIn[key]) elif tblFlds[key] == 'tag': point.tag(key, rowIn[key]) return point
def report_data_list(self, category, host, data): try: point = Point(category) point.tag("host", host) for field_key, field_value in data: point.field(field_key, field_value) point.time(datetime.datetime.utcnow(), WritePrecision.NS) self._write_client.write(self._settings.bucket, self._settings.tenant, point) except Exception as e: logger.error(f"Failed to report data to InfluxDB: {e}")
def update_influx(self): pp = [] for date, fields in DATA.italy.to_dict('index').items(): p = Point("italia") p.time(date.value) for k, v in fields.items(): if type(v) is str: p.tag(k, v) if not np.isnan(v): p.field(k, v) pp.append(p) self.write.write("coviddi", "coviddi", pp)
def point(reg): fields = struct.unpack('I32B', reg) words = fields[1:] words_idx = [w for w in range(31) if w % 5 != 0] p = Point('frame') p.time(fields[0]) for idx in words_idx: p.field(f'word_{idx:02d}', words[idx]) frame_number = (words[2] >> 2) & 0x3 p.tag('frame_number', frame_number) if frame_number == 0 and words[7] == 0b11110100: p.tag('start_of_group', True) return p
def panel_info_to_influx_points(panel_info): points = [] for control in panel_info['controls']: point = Point("cellar_panel_read") point.tag("slot", control["slot"]) point.tag("vessel", control["label"]) point.tag("batch_number", control['batch_info']['Batch #']) point.field("temp", control['temp']) point.field("set_point", control['set_point']) point.field("valve_open", control['valve_open']) point.field("days_in_vessel", control['batch_info']['Days in Vessel']) point.time(panel_info['read_at'], WritePrecision.NS) points.append(point) return points
def get_telem_points(self) -> List[Point]: """Called by telemetry logger. See `TelemSource` abstract base class.""" point = Point('gamepad') # attributes of this object to be captured as fields in the telemetry measurement names = ['left_x', 'left_y', 'right_x', 'right_y', 'int_x', 'int_y', 'integrator_mode'] for name in names: point.field(name, self.__dict__[name]) point.time(datetime.utcnow()) point_raw = Point.from_dict({ 'measurement': 'gamepad_events', 'fields': self.state, 'time': datetime.utcnow(), }) return [point, point_raw]
def on_mqtt_message(client, userdata, msg): print("Received '%s' - '%s'" % (msg.topic, str(msg.payload))) try: data = json.loads(msg.payload) p = Point(msg.topic) for key in data: if key == "timestamp": if isinstance(data["timestamp"], numbers.Number): p.time(data["timestamp"]) else: p.time( datetime.strptime(data["timestamp"], "%Y-%m-%d %H:%M:%S")) else: p.field(key, data[key]) influx_write.write(bucket=config.bucket, record=p) except json.decoder.JSONDecodeError: print("### Message payload is invalid JSON! ###")
def write_point(measurement): point = Point(measurement["name"]) for tag in measurement["tags"]: point = point.tag(tag, measurement["tags"][tag]) for field in measurement["fields"]: point = point.field(field, measurement["fields"][field]) point = point.time(datetime.utcnow(), WritePrecision.NS) write_api.write(bucket, org, point)
def saveToInfluxDB(payload): measurementName = payload["label"] + " (" + payload["room"] + ")" dbPayload = Point(measurementName) # payload.lastStatusUpdate is in LOCAL time ("Europe/Berlin") # point.time() expects time in UTC localDatetime = payload["lastStatusUpdate"] utcDatetime = TimeUtil.convertDatetimeToUtc(localDatetime) logging.info("Converted lastStatusUpdate ('" + payload["label"] + "') local time (" + str(localDatetime) + ") to UTC (" + str(utcDatetime) + ")") dbPayload.time(utcDatetime) for fieldName, value in payload.items(): if fieldName != "label" and fieldName != "room" and fieldName != "lastStatusUpdate": dbPayload.field(fieldName, value) write_api.write(bucket=bucket, record=dbPayload)
def test_write_using_default_tags(self): bucket = self.create_test_bucket() measurement = "h2o_feet" field_name = "water_level" val = "1.0" val2 = "2.0" tag = "location" tag_value = "creek level" p = Point(measurement) p.field(field_name, val) p.tag(tag, tag_value) p.time(1) p2 = Point(measurement) p2.field(field_name, val2) p2.tag(tag, tag_value) p2.time(2) record_list = [p, p2] self.write_client.write(bucket.name, self.org, record_list) query = 'from(bucket:"' + bucket.name + '") |> range(start: 1970-01-01T00:00:00.000000001Z)' flux_result = self.client.query_api().query(query) self.assertEqual(1, len(flux_result)) rec = flux_result[0].records[0] rec2 = flux_result[0].records[1] self.assertEqual(self.id_tag, rec["id"]) self.assertEqual(self.customer_tag, rec["customer"]) self.assertEqual("LA", rec[self.data_center_key]) self.assertEqual(self.id_tag, rec2["id"]) self.assertEqual(self.customer_tag, rec2["customer"]) self.assertEqual("LA", rec2[self.data_center_key]) self.delete_test_bucket(bucket)
def _finish_control_cycle( self, cycle_period: Optional[float], mount_state: MountState, rate_command: Optional[SlewRateCommand] = None, rate_command_time_error: Optional[float] = None, callback_override: bool = False, ) -> "Tracker.StopReason": """Final tasks to perform at the end of each control cycle.""" # list of telemetry points to be populated points = [] # timestamp to use for all telemetry points that don't correspond to sensor readings # or other events that occur at well-defined times cycle_timestamp = datetime.utcnow() # coordinate system transformations position_mount_topo = self.mount_model.encoders_to_topocentric( mount_state.position) try: # get target position for the same time as mount state was queried position_target = self.target.get_position( mount_state.time_queried) except Target.IndeterminatePosition: stop_reason = self._check_stopping_conditions() else: # on-sky separation between target and mount positions error_magnitude = separation(position_target.topo, position_mount_topo) stop_reason = self._check_stopping_conditions(error_magnitude) if self.telem_logger is not None: error_enc = { axis: float( smallest_allowed_error( mount_state.position[axis].deg, position_target.enc[axis].deg, self.mount.no_cross_encoder_positions()[axis].deg, )) for axis in self.axes } # target position pt = Point('target_position') pt.field('azimuth', position_target.topo.az.deg) pt.field('altitude', position_target.topo.alt.deg) for axis in self.axes: pt.field(f'encoder_{axis}', position_target.enc[axis].deg) pt.tag('units', 'degrees') pt.tag('class', type(self).__name__) pt.time(position_target.time.to_datetime()) points.append(pt) # mount position error pt = Point('mount_position_error') pt.field('magnitude', error_magnitude.deg) for axis in self.axes: pt.field(f'enoder_{axis}', error_enc[axis]) pt.tag('units', 'degrees') pt.tag('class', type(self).__name__) pt.time(cycle_timestamp) points.append(pt) if self.telem_logger is not None: pt = Point('control_cycle_stats') pt.field('period', cycle_period) pt.field('cycle_count', self.num_iterations) pt.field('callback_override', callback_override) pt.tag('class', type(self).__name__) pt.time(cycle_timestamp) points.append(pt) # mount positions pt = Point('mount_position') for axis in self.axes: pt.field(f'encoder_{axis}', mount_state.position[axis].deg) pt.field('azimuth', position_mount_topo.az.deg) pt.field('altitude', position_mount_topo.alt.deg) pt.tag('units', 'degrees') pt.tag('class', type(self).__name__) pt.time(mount_state.time_queried.to_datetime()) points.append(pt) # mount slew rate pt = Point('mount_rate') for axis in self.axes: pt.field(f'axis_{axis}', mount_state.rates[axis]) pt.tag('units', 'degrees/s') pt.tag('class', type(self).__name__) pt.time(mount_state.time_queried.to_datetime()) points.append(pt) # controller commands if rate_command is not None: pt = Point('controller_commands') for axis in self.axes: pt.field(f'rate_axis_{axis}', rate_command.rates[axis]) if rate_command_time_error is not None: pt.field('time_error', rate_command_time_error.sec) pt.tag('units', 'degrees/s') pt.tag('class', type(self).__name__) pt.time(cycle_timestamp) points.append(pt) self.telem_logger.post_points(points) self.num_iterations += 1 return stop_reason
# point.time() expects time in UTC localDatetime = payload["lastStatusUpdate"] utcDatetime = TimeUtil.convertDatetimeToUtc(localDatetime) logging.info("Converted lastStatusUpdate ('" + payload["label"] + "') local time (" + str(localDatetime) + ") to UTC (" + str(utcDatetime) + ")") dbPayload.time(utcDatetime) for fieldName, value in payload.items(): if fieldName != "label" and fieldName != "room" and fieldName != "lastStatusUpdate": dbPayload.field(fieldName, value) write_api.write(bucket=bucket, record=dbPayload) #print("Saved payload in DB.") if __name__ == "__main__": logging.info("Saving single test value in DB...") testPayload = Point("testMeasurement") testPayload.field("testField", 0.5) # Point.time expects datetime in UTC #logging.info("Current UTC time: " + str(datetime.now(pytz.utc))) testPayload.time(datetime.now(pytz.utc)) write_api.write(bucket="test_database", record=testPayload)
# with pysnooper.snoop(): write_api = client.write_api(write_options=SYNCHRONOUS) for i in range(0, 100): points = [] #local = time.localtime() for x in range(0, 5): points.append(Point("biz_intel").tag("region", random.choice(regions)) \ .tag("app",random.choice(apps)) \ # .tag("host",host) \ .field("user_sessions", random.choice(user_sessions)) \ .field("num_transactions",random.choice(num_xactions)) \ .time(time.time_ns())) #print(local) write_api.write(bucket=bucket, org=org, record=points) # print(points[0].time()) sleep(5) p_alt = f"biz_intel,region={random.choice(regions)},app={random.choice(apps)},host={host} user_sessions={random.choice(user_sessions)},num_transactions={random.choice(num_xactions)} " point = Point("biz_intel").tag("region", random.choice(regions)) \ .tag("app",random.choice(apps)) \ .tag("host",host) \ .field("user_sessions", random.choice(user_sessions)) \ .field("num_transactions",random.choice(num_xactions)) \ print(point.time()) # client.__del__()
def get_gateway_details(gateway): #print(gateway) gateway_id = gateway['ids']['gateway_id'] point = Point("TTN_Gateways").tag("gateway_id", gateway_id).tag("name", gateway['name']) if 'antennas' in gateway: for dimension in ['latitude', 'longitude', 'altitude']: if dimension in gateway['antennas'][0]['location']: value = gateway['antennas'][0]['location'][dimension] else: value = 0 point = point.tag( dimension, value ) #body['gateway']['antennas'][0]['location'][dimension] = antenna_locations[dimension] #point = point.tag('latitude',gateway['antennas'][0]['location']['latitude']).tag('longitude',gateway['antennas'][0]['location']['longitude']).tag('altitude',gateway['antennas'][0]['location']['altitude']) #for key,value in gateway['antennas'][0]['location']: # point = point.tag(key,value) gateway_stats = (requests.get(base_uri + "/api/v3/gs/gateways/" + gateway_id + "/connection/stats", params=gateway_stats_params, headers=http_headers)).json() #https://eu1.cloud.thethings.network/api/v3/gs/gateways/fort-digital-80029c641ef8/connection/stats if 'attributes' in gateway: for key, value in gateway['attributes'].items(): point = point.tag(key, value) #Need to consider how to handle last_status_received_at not updating but not getting a 'gateway not connected' message yet to mark a site as 'down' #Can probably handle this in the query? if "connected_at" in gateway_stats: #print(gateway_stats) point = point.field("status", 1) if 'last_status_received_at' in gateway_stats: point = point.time(gateway_stats['last_status_received_at']) if 'uplink_count' in gateway_stats: point = point.field("uplink_count", will_it_float(gateway_stats['uplink_count'])) if 'downlink_count' in gateway_stats: point = point.field("downlink_count", will_it_float(gateway_stats['downlink_count'])) if 'last_status' in gateway_stats: if 'metrics' in gateway_stats['last_status']: for key, value in gateway_stats['last_status'][ 'metrics'].items(): point = point.field(key, will_it_float(value)) #Could use the latest antenna location to automatically update gateway location as its ignored from UDP gateway_stats['last_status']['antenna_locations']['latitude/longitude/altitude'] #print(gateway_stats) if 'antenna_locations' in gateway_stats['last_status']: if 'antennas' not in gateway or gateway['antennas'][0][ 'location']['latitude'] != gateway_stats[ 'last_status']['antenna_locations'][0][ 'latitude'] or gateway['antennas'][0][ 'location']['longitude'] != gateway_stats[ 'last_status']['antenna_locations'][0][ 'longitude']: update_gateway( gateway_id, gateway_stats['last_status']['antenna_locations'][0]) else: #Gateway Not Connected point = point.field("status", 0) print(point.to_line_protocol()) #flushstdout sys.stdout.flush()
power_ok = True write_api = client.write_api(write_options=SYNCHRONOUS) for server_data in ups_data: for ups in server_data: if ups == False: continue if ups['ups.status'] != 'OL' or ups['battery.charge'] < 100: power_ok = False point = Point("ups") point.tag("ups_name", ups['name']) for entry in ups: if isinstance(ups[entry], (float, int)): point.field(entry, ups[entry]) point.time(datetime.utcnow(), WritePrecision.NS) try: write_api.write(bucket=bucket, org=org, record=point) print(datetime.now().strftime("%d/%m/%Y@%H:%M:%S"), end=':') print('data sent for: ' + ups['name']) except Exception as e: print(datetime.now().strftime("%d/%m/%Y@%H:%M:%S"), end=':') print('Could not send data to influx for:' + ups['name']) print(e) print(influx_url, token, org, bucket) if power_ok: time.sleep(general_config.getint('timing', 'OK_INTERVAL')) else: time.sleep(general_config.getint('timing', 'NOT_OK_INTERVAL'))