Esempio n. 1
0
    def getGRCPoints(self, bucket):
        points = []
        dt = datetime.now(tz=pytz.timezone('US/Pacific')).isoformat()
        mag = self.explainMagnitude()
        mining = self.getMiningInfo()
        for prj in mag.magnitude:
            point = Point(measurement_name='magnitude')
            point.time(time=dt)
            point.tag('project_name', prj.project)
            point.field('rac', prj.rac)
            point.field('magnitude', prj.magnitude)
            points.append({"bucket": bucket, "point": point})

        mining_point = Point(measurement_name='mining')
        mining_point.time(time=dt)
        mining_point.tag('CPID', mining.CPID)
        mining_point.field('blocks', mining.blocks)
        mining_point.field('magnitude', mining.current_magnitude)
        mining_point.field('current_difficulty', mining.difficulty.current)
        mining_point.field('pending_reward', mining.BoincRewardPending)
        mining_point.field('stake_weight', mining.stakeweight.valuesum)
        mining_point.field('time_to_stake', mining.time_to_stake_days)
        mining_point.field('staking_efficiency', mining.staking_efficiency)
        points.append({"bucket": bucket, "point": mining_point})
        return points
Esempio n. 2
0
    def test_write_point_different_precision(self):
        bucket = self.create_test_bucket()

        _point1 = Point("h2o_feet").tag("location", "coyote_creek").field("level water_level", 5.0).time(5,
                                                                                                         WritePrecision.S)
        _point2 = Point("h2o_feet").tag("location", "coyote_creek").field("level water_level", 6.0).time(6,
                                                                                                         WritePrecision.US)

        _point_list = [_point1, _point2]

        async_results = self.write_client.write(bucket.name, self.org, _point_list)
        self.assertEqual(2, len(async_results))
        for async_result in async_results:
            async_result.get()

        query = f'from(bucket:"{bucket.name}") |> range(start: 1970-01-01T00:00:00.000000001Z) '\
                '|> sort(columns: [\"_time\"], desc: false)'

        flux_result = self.client.query_api().query(query)

        self.assertEqual(1, len(flux_result))

        records = flux_result[0].records

        self.assertEqual(2, len(records))
        self.assertEqual(records[0].get_time(),
                         datetime.datetime(1970, 1, 1, 0, 0, 0, 6, tzinfo=datetime.timezone.utc))
        self.assertEqual(records[1].get_time(),
                         datetime.datetime(1970, 1, 1, 0, 0, 5, tzinfo=datetime.timezone.utc))
async def main():
    """
    Configure Retries - for more info see https://github.com/inyutin/aiohttp_retry
    """
    retry_options = ExponentialRetry(attempts=3)
    async with InfluxDBClientAsync(
            url="http://localhost:8086",
            token="my-token",
            org="my-org",
            client_session_type=RetryClient,
            client_session_kwargs={"retry_options": retry_options}) as client:
        """
        Write data:
        """
        print(f"\n------- Written data: -------\n")
        write_api = client.write_api()
        _point1 = Point("async_m").tag("location",
                                       "Prague").field("temperature", 25.3)
        _point2 = Point("async_m").tag("location",
                                       "New York").field("temperature", 24.3)
        successfully = await write_api.write(bucket="my-bucket",
                                             record=[_point1, _point2])
        print(f" > successfully: {successfully}")
        """
        Query: Stream of FluxRecords
        """
        print(f"\n------- Query: Stream of FluxRecords -------\n")
        query_api = client.query_api()
        records = await query_api.query_stream(
            'from(bucket:"my-bucket") '
            '|> range(start: -10m) '
            '|> filter(fn: (r) => r["_measurement"] == "async_m")')
        async for record in records:
            print(record)
Esempio n. 4
0
 def waterMessage(self, time, reading, nodeSettings ):
     payload = emonSuite.PayloadWater()
     if( emonSuite.EmonSerial.ParseWaterPayload(reading,payload) ):
         try:
             for sensor in range(payload.numFlowSensors):
                 p = Point("water").tag("sensor",f"water/flowCount/{payload.subnode}/{sensor}")\
                                 .tag("sensorGroup",nodeSettings[payload.subnode]["name"])\
                                 .tag("sensorName",nodeSettings[payload.subnode][f"f{sensor}"])\
                                 .field("value", payload.flowCount[sensor]*nodeSettings[payload.subnode][f"f{sensor}_litresPerPulse"]).time(time)
                 self.write_api.write(bucket=self.bucket, record=p)
             for sensor in range(payload.numHeightSensors):
                 p = Point("tank").tag("sensor", f"water/height/{payload.subnode}/{sensor}")\
                                 .tag("sensorGroup",nodeSettings[payload.subnode]["name"])\
                                 .tag("sensorName",nodeSettings[payload.subnode][f"h{sensor}"])\
                                 .field("value", payload.waterHeight[sensor]/1).time(time)
                 self.write_api.write(bucket=self.bucket, record=p)
             p = Point("supplyV").tag("sensor", f"supplyV/water/{payload.subnode}")\
                                 .tag("sensorGroup",nodeSettings[payload.subnode]["name"])\
                                 .tag("sensorName",nodeSettings[payload.subnode]["name"])\
                                 .field("value", payload.supplyV/1000).time(time)
             self.write_api.write(bucket=self.bucket, record=p)
             if(':' in reading):
                 self.publishRSSI( time, nodeSettings[payload.subnode]['name'], reading )
         except Exception as ex:
             self.printException("waterException", reading, ex)
Esempio n. 5
0
def data_convert_printer(data):
    converted_data = []
    state_data = data.get("state")
    temperature_data = data.get("temperature")

    printer_state_point = Point("printer_state").tag("X-API-KEY", X_API_KEY)\
        .field("flags_cancelling", state_data.get("flags").get("cancelling"))\
        .field("flags_closedOrError", state_data.get("flags").get("error"))\
        .field("flags_finishing", state_data.get("flags").get("finishing"))\
        .field("flags_operational", state_data.get("flags").get("operational"))\
        .field("flags_paused", state_data.get("flags").get("paused"))\
        .field("flags_pausing", state_data.get("flags").get("pausing"))\
        .field("flags_printing", state_data.get("flags").get("printing"))\
        .field("flags_ready", state_data.get("flags").get("ready"))\
        .field("flags_resuming", state_data.get("flags").get("resuming"))\
        .field("flags_sdReady", state_data.get("flags").get("sdReady"))\
        .field("text", state_data.get("text"))
    converted_data.append(printer_state_point)

    printer_temperature_point = Point("printer_temperature").tag("X-API-KEY", X_API_KEY)\
        .field("bed_actual", temperature_data.get("bed").get("actual") if temperature_data.get("bed").get("actual") is not None else 0.0)\
        .field("bed_offset", temperature_data.get("bed").get("offset") if temperature_data.get("bed").get("offset") is not None else 0.0)\
        .field("bed_target", temperature_data.get("bed").get("target") if temperature_data.get("bed").get("target") is not None else 0.0)\
        .field("tool0_actual", temperature_data.get("tool0").get("actual") if temperature_data.get("tool0").get("actual") is not None else 0.0)\
        .field("tool0_offset", temperature_data.get("tool0").get("offset") if temperature_data.get("tool0").get("offset") is not None else 0.0)\
        .field("tool0_target", temperature_data.get("tool0").get("target") if temperature_data.get("tool0").get("target") is not None else 0.0)
    converted_data.append(printer_temperature_point)

    return converted_data
Esempio n. 6
0
 async def _prepare_data(self, measurement: str):
     _point1 = Point(measurement).tag("location",
                                      "Prague").field("temperature", 25.3)
     _point2 = Point(measurement).tag("location", "New York").field(
         "temperature", 24.3)
     await self.client.write_api().write(bucket="my-bucket",
                                         record=[_point1, _point2])
    def test_write_point_different_precision(self):
        bucket = self.create_test_bucket()

        point1 = Point('test_precision') \
            .field('power', 10) \
            .tag('powerFlow', 'low') \
            .time(datetime.datetime(2020, 4, 20, 6, 30, tzinfo=datetime.timezone.utc), WritePrecision.S)

        point2 = Point('test_precision') \
            .field('power', 20) \
            .tag('powerFlow', 'high') \
            .time(datetime.datetime(2020, 4, 20, 5, 30, tzinfo=datetime.timezone.utc), WritePrecision.MS)

        writer = self.client.write_api(write_options=SYNCHRONOUS)
        writer.write(bucket.name, self.org, [point1, point2])

        result = self.query_api.query(
            f"from(bucket:\"{bucket.name}\") |> range(start: 1970-01-01T00:00:00.000000001Z) |> last() "
            "|> sort(columns: [\"_time\"], desc: false)", self.org)

        self.assertEqual(len(result), 2)
        self.assertEqual(len(result[0].records), 1)
        self.assertEqual(len(result[1].records), 1)
        self.assertEqual(
            result[0].records[0].get_time(),
            datetime.datetime(2020, 4, 20, 5, 30,
                              tzinfo=datetime.timezone.utc))
        self.assertEqual(
            result[1].records[0].get_time(),
            datetime.datetime(2020, 4, 20, 6, 30,
                              tzinfo=datetime.timezone.utc))
Esempio n. 8
0
def influxDB(influxdbip, token, measurement, cycle):
    print("influxDB写入")
    a = 1
    bucket = "test"
    token = "HTvG6oIApfABybjjYd_6Jehf8AEWkLStYw0qftanx9ijF05-UsLZ9pVqI604PwuRlhv8IkuIZshYaqVFTC0DXA=="
    client = InfluxDBClient(url=influxdbip, token=token, org="su")
    write_api = client.write_api(write_options=SYNCHRONOUS)
    # query_api = client.query_api()
    cycle = (int(cycle) / 1000)  #单位ms
    # cycle=(cycle)
    flash("开始写入influxDB", "influx")
    while 1:
        try:
            ss = 1
            xx = 2
            p = Point(measurement).tag("location", "108厂房").field("温度", ss)
            q = Point(measurement).tag("location", "beijing").field("2", xx)
            write_api.write(bucket=bucket, org="su", record=[p, q])
            # print("2222")
            time.sleep(cycle)
        except a == 0:
            pass  # Stop writing

        except Exception as e:
            print(e)
            break
Esempio n. 9
0
def send_data(data_table, lake_prefix, bucket="lakeinfo/autogen", lake_temp=None):
    """Writes data to influxdb client in env properties."""
    client = InfluxDBClient.from_env_properties()
    # client = InfluxDBClient(url=getenv("INFLUXDB_V2_URL"), org=getenv(
    #     "INFLUXDB_V2_ORG"), token=getenv("INFLUXDB_V2_TOKEN"))
    write_api = client.write_api(write_options=SYNCHRONOUS)

    last_point = data_table[-1]
    print(last_point)
    points = [

        Point("{}_level".format(lake_prefix)).tag("units", "ft").field("value", last_point['lake_level']).field(
            "valueNum", float(last_point['lake_level'])),  # .time(last_point['timestamp']),
        Point("{}_turbine_release".format(lake_prefix)).tag("units", "cfps").field(
            "valueNum", last_point['turbine_release_cfs']).field("value", float(last_point['turbine_release_cfs'])),  # .time(last_point['timestamp']),
        Point("{}_spillway_release".format(lake_prefix)).tag("units", "cfps").field(
            "valueNum", last_point['spillway_release_cfs']).field("value", float(last_point['spillway_release_cfs'])),  # .time(last_point['timestamp']),
        Point("{}_total_release".format(lake_prefix)).tag("units", "cfps").field(
            "valueNum", last_point['total_release_cfs']).field("value", float(last_point['total_release_cfs']))  # .time(last_point['timestamp']),
    ]

    if lake_temp:
        points.append(Point("{}_temperature".format(lake_prefix)).tag("units", "ºF").field(
            "valueNum", lake_temp).field("value", lake_temp))

    for i in points:
        write_api.write(bucket, 'patrickjmcd', i)
        print("Wrote {}".format(i._name))
Esempio n. 10
0
def write_data(alias, name, value):
    url, token, org, bucket = get_config("influx")
    # print(url, token, org)
    client = InfluxDBClient(url=url, token=token)
    write_api = client.write_api(write_options=SYNCHRONOUS)

    if alias == "temp":
        p = Point("event").tag("hostname", name).field("temperature", value)
    elif alias == "humidity":
        p = Point("event").tag("hostname", name).field("humidity", value)
    write_api.write(bucket=bucket, org=org, record=p)
    print(name, value, bucket)
Esempio n. 11
0
    def collect_influx(self, influx: InfluxDB) -> None:
        '''
        Pushes the current data to influx.
        '''
        ts = datetime.now(timezone.utc)
        wpres = InfluxWritePrecision.S

        overview_fields: Dict[str, float] = dict()
        if self.readings.battery_voltage is not None:
            overview_fields['voltage'] = self.readings.battery_voltage
        if self.readings.battery_power is not None:
            overview_fields['power'] = self.readings.battery_power
        if self.readings.battery_state is not None:
            overview_fields['state'] = self.readings.battery_state
        if self.readings.soc_min is not None:
            overview_fields['soc_min'] = self.readings.soc_min
        if self.readings.soc_target is not None:
            overview_fields['soc_target'] = self.readings.soc_target
        if self.readings.soc is not None:
            overview_fields['soc'] = self.readings.soc
        if self.readings.soh is not None:
            overview_fields['soh'] = self.readings.soh
        if self.readings.temperature is not None:
            overview_fields['temperature'] = self.readings.temperature
        if self.readings.bat_status is not None:
            overview_fields['status'] = self.readings.bat_status
        if self.readings.impedance_fine is not None:
            overview_fields['impedance_fine'] = self.readings.impedance_fine
        if self.readings.discharged_amp_hours is not None:
            overview_fields['discharged_amp_hours'] = self.readings.discharged_amp_hours
        if self.readings.stored_energy is not None:
            overview_fields['stored_energy'] = self.readings.stored_energy
        if len(overview_fields) > 0:
            overview = Point('battery_overview').tag('inverter', self.parent.name).time(ts, write_precision=wpres)
            for ov_name, ov_value in overview_fields.items():
                overview = overview.field(ov_name, ov_value)
            influx.add_points(overview)

        if len(self.batteries) > 0:
            modules: Dict[int, Point] = dict()

            for battery in self.batteries.values():
                if battery:
                    if battery.cycle_count is not None and battery.num not in modules:  # add not none checks for all!
                        modules[battery.num] = Point('battery_module').tag('inverter', self.parent.name) \
                            .tag('module', battery.num)

                    if battery.cycle_count is not None:
                        modules[battery.num] = modules[battery.num].field('cycles', battery.cycle_count)

            influx.add_points(modules.values())
Esempio n. 12
0
def push_to_influx(measurement=None):
    assert measurement

    if not influx_config.enabled:
        return

    points = [
        Point("power").field("usage", measurement.power_usage).time(
            measurement.timestamp),
        Point("power").field("delivery", measurement.power_delivery).time(
            measurement.timestamp),
    ]

    influx_write_api.write(influx_config.BUCKET, influx_config.ORG, points)
Esempio n. 13
0
def refreshLatencyGraphs(secondsToRun):
	startTime = datetime.now()
	with open('statsByParentNode.json', 'r') as j:
		parentNodes = json.loads(j.read())
	
	with open('statsByDevice.json', 'r') as j:
		devices = json.loads(j.read())
	
	print("Retrieving device statistics")
	devices = getLatencies(devices, secondsToRun)
	
	print("Computing parent node statistics")
	parentNodes = getParentNodeStats(parentNodes, devices)
	
	print("Writing data to InfluxDB")
	bucket = influxDBBucket
	org = influxDBOrg
	token = influxDBtoken
	url="http://localhost:8086"
	client = InfluxDBClient(
		url=url,
		token=token,
		org=org
	)
	write_api = client.write_api(write_options=SYNCHRONOUS)
	
	queriesToSend = []
	for device in devices:
		if device['tcpLatency'] != None:
			p = Point('Latency').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag("Type", "Device").field("TCP Latency", device['tcpLatency'])
			queriesToSend.append(p)

	for parentNode in parentNodes:
		if parentNode['tcpLatency'] != None:
			p = Point('Latency').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("TCP Latency", parentNode['tcpLatency'])
			queriesToSend.append(p)
			
	write_api.write(bucket=bucket, record=queriesToSend)
	print("Added " + str(len(queriesToSend)) + " points to InfluxDB.")
	client.close()
	
	#with open('statsByParentNode.json', 'w') as infile:
	#	json.dump(parentNodes, infile)
	
	#with open('statsByDevice.json', 'w') as infile:
	#	json.dump(devices, infile)
	
	endTime = datetime.now()
	durationSeconds = round((endTime - startTime).total_seconds())
	print("Graphs updated within " + str(durationSeconds) + " seconds.")
Esempio n. 14
0
def read_and_write_data():
    client = TimeseriesClient.from_env_properties()

    # write single points
    _start_points = datetime.now(UTC)
    _point1 = (Point("test_single_point").tag("location", "Prague").field(
        "temperature", 25.3).time(_start_points))
    _point2 = (Point("test_single_point").tag("location", "New York").field(
        "temperature", 24.3).time(_start_points))
    client.write_points(project=PROJECT, points=[_point1, _point2])

    # write a dataframe
    _start_dataframe = datetime.now(UTC)
    _dataframe = pd.DataFrame(
        data=[["coyote_creek", 1.0], ["coyote_creek", 2.0]],
        index=[_start_dataframe, _start_dataframe + timedelta(hours=1)],
        columns=["location", "water_level"],
    )
    client.write_a_dataframe(
        project=PROJECT,
        measurement_name="test_dataframe",
        dataframe=_dataframe,
        tag_columns=["location"],
    )

    # read single points
    points = client.get_points(
        project=PROJECT,
        fields={"_measurement": "test_single_point"},
        start_time=_start_points,
    )
    logger.info("Request Points:")
    results = []
    for table in points:
        for record in table.records:
            results.append((record.get_value(), record.get_field()))

    print(results)

    # read a dataframe
    dataframe = client.get_dataframe(
        project=PROJECT,
        fields={
            "_measurement": "test_dataframe",
            "_field": "water_level"
        },
        start_time=_start_dataframe,
    )
    logger.info("Requested Dataframe:")
    logger.info(dataframe)
Esempio n. 15
0
    def _write_point(self):
        self.write_client = self.client.write_api(write_options=SYNCHRONOUS)

        bucket = self.create_test_bucket()

        measurement = "h2o_feet"
        field_name = "water_level"
        val = "1.0"
        tag = "location"
        tag_value = "creek level"

        p = Point(measurement)
        p.field(field_name, val)
        p.tag(tag, tag_value)

        record_list = [p]

        self.write_client.write(bucket.name, self.org, record_list)

        query = 'from(bucket:"' + bucket.name + '") |> range(start: 1970-01-01T00:00:00.000000001Z)'
        flux_result = self.client.query_api().query(query)
        self.assertEqual(1, len(flux_result))
        rec = flux_result[0].records[0]

        self.assertEqual(self.id_tag, rec["id"])
        self.assertEqual(self.customer_tag, rec["customer"])
        self.assertEqual("LA", rec[self.data_center_key])

        self.delete_test_bucket(bucket)
Esempio n. 16
0
    def test_write_points_unicode(self):
        bucket = self.create_test_bucket()

        measurement = "h2o_feet_ěščřĚŠČŘ"
        field_name = "field_ěščř"
        utf8_val = "Přerov 🍺"
        tag = "tag_ěščř"
        tag_value = "tag_value_ěščř"

        p = Point(measurement)
        p.field(field_name, utf8_val)
        p.tag(tag, tag_value)
        record_list = [p]

        self.write_client.write(bucket.name, self.org, record_list)

        query = 'from(bucket:"' + bucket.name + '") |> range(start: 1970-01-01T00:00:00.000000001Z)'
        flux_result = self.client.query_api().query(query)
        self.assertEqual(1, len(flux_result))
        rec = flux_result[0].records[0]

        self.assertEqual(self.id_tag, rec["id"])
        self.assertEqual(self.customer_tag, rec["customer"])
        self.assertEqual("LA", rec[self.data_center_key])

        self.assertEqual(measurement, rec.get_measurement())
        self.assertEqual(utf8_val, rec.get_value())
        self.assertEqual(field_name, rec.get_field())
def parse_row(row: OrderedDict):
    """Parse row of CSV file into Point with structure:

        taxi-trip-data,DOLocationID=152,PULocationID=79,dispatching_base_num=B02510 dropoff_datetime="2019-01-01 01:27:24" 1546304267000000000

    CSV format:
        dispatching_base_num,pickup_datetime,dropoff_datetime,PULocationID,DOLocationID,SR_Flag
        B00001,2019-01-01 00:30:00,2019-01-01 02:51:55,,,
        B00001,2019-01-01 00:45:00,2019-01-01 00:54:49,,,
        B00001,2019-01-01 00:15:00,2019-01-01 00:54:52,,,
        B00008,2019-01-01 00:19:00,2019-01-01 00:39:00,,,
        B00008,2019-01-01 00:27:00,2019-01-01 00:37:00,,,
        B00008,2019-01-01 00:48:00,2019-01-01 01:02:00,,,
        B00008,2019-01-01 00:50:00,2019-01-01 00:59:00,,,
        B00008,2019-01-01 00:51:00,2019-01-01 00:56:00,,,
        B00009,2019-01-01 00:44:00,2019-01-01 00:58:00,,,
        B00009,2019-01-01 00:19:00,2019-01-01 00:36:00,,,
        B00009,2019-01-01 00:36:00,2019-01-01 00:49:00,,,
        B00009,2019-01-01 00:26:00,2019-01-01 00:32:00,,,
        ...

    :param row: the row of CSV file
    :return: Parsed csv row to [Point]
    """

    return Point("taxi-trip-data") \
        .tag("dispatching_base_num", row['dispatching_base_num']) \
        .tag("PULocationID", row['PULocationID']) \
        .tag("DOLocationID", row['DOLocationID']) \
        .tag("SR_Flag", row['SR_Flag']) \
        .field("dropoff_datetime", row['dropoff_datetime']) \
        .time(row['pickup_datetime']) \
        .to_line_protocol()
Esempio n. 18
0
    def notify(self, notifyevent, control, settings, pelletdb, in_data,
               grill_platform):
        if time.time() - self.last_updated < 1:
            return

        from influxdb_client import Point
        name = settings['globals']['grill_name']
        if len(name) == 0:
            name = 'Smoker'

        def get_or_default(data, k, default):
            if data is not None and k in data:
                return data[k]
            return default

        p = Point(name).time(time=datetime.utcnow()) \
         .field("GrillTemp", float(get_or_default(in_data, 'GrillTemp', 0.0))) \
         .field('GrillSetPoint', float(get_or_default(in_data, 'GrillSetPoint', 0.0))) \
         .field('Probe1Temp', float(get_or_default(in_data, 'Probe1Temp', 0.0))) \
         .field('Probe1SetPoint', float(get_or_default(in_data, 'Probe1SetPoint', 0.0))) \
         .field('Probe2Temp', float(get_or_default(in_data, 'Probe2Temp', 0))) \
         .field('Probe2SetPoint', float(get_or_default(in_data, 'Probe2SetPoint', 0.0))) \
         .field("Mode", str(get_or_default(control, "mode", 'unknown'))) \
         .field('PelletLevel', int(get_or_default(get_or_default(pelletdb, 'current', {}), 'hopper_level', 100)))
        if grill_platform is not None:
            outputs = grill_platform.GetOutputStatus()
            for key in outputs:
                p = p.field(key, int(outputs[key]))

        if notifyevent and 'GRILL_STATE' != notifyevent:
            p = p.field('Event', str(notifyevent))

        self.queue.append(p)

        self.last_updated = time.time()
Esempio n. 19
0
def ProcessLogin():

    with opentracing.tracer.start_span('Enter /users') as span:
        span.set_tag('step_1', '1')
    try:
        tracer.close()
    except:
        z = 1
    point = Point("endpoint_request").tag("endpoint", "/users").field(
        "value", 1).time(datetime.utcnow(), WritePrecision.NS)

    write_api.write(bucket, org, point)
    if request.method == 'GET':
        opt_name = request.args.get("name")
        opt_pass = request.args.get("pass")
        if opt_name is None or opt_pass is None:

            return "badz"
        else:
            if opt_name == "johnd" and opt_pass == "foo":
                return "good"
            else:
                return "bad"
    else:
        return "bad"
Esempio n. 20
0
def main():
    if len(sys.argv) != 4:
        usage()

    sp3_path, start_time, end_time = sys.argv[1:]

    print('Loading SP3 files...')
    for sp3 in pathlib.Path(sp3_path).glob('*.sp3'):
        load_sp3(str(sp3))

    client = InfluxDBClient.from_config_file('influxdb.ini')
    query_api = client.query_api()
    write_api = client.write_api(write_options = ASYNCHRONOUS)

    query = f"""from (bucket:"hesperides")
                |> range(start: {start_time}, stop: {end_time})
                |> aggregateWindow(every: 1m, fn: first)
                |> filter(fn: (r) => r._measurement == "position" and r.ship == "hesperides" and (r._field == "lat" or r._field == "lon"))
                |> pivot(rowKey: ["_time"], columnKey: ["_field"], valueColumn: "_value")"""

    print('Querying InfluxDB for positions...')
    records = query_api.query_stream(query)

    for record in records:
        print('Computing azels for', record.get_time())
        for satellite in satellites:
            azel = compute_azel(satellite, record)
            azel_record = Point('azel').time(record.get_time()).tag('svn', satellite).tag('ship', 'hesperides').field('az', azel[0]).field('el', azel[1])
            write_api.write(bucket = 'gnss', record = azel_record)            
Esempio n. 21
0
def send_data(what, value):
    """ 
    this function sends the data as json, and checks if it was succesfull 
    """
    client = InfluxDBClient(url=influx_cloud_url, token=influx_cloud_token)
    try:
        point = Point("measurement").field(what,
                                           value).time(time=datetime.utcnow())

        # if data is incorrect, don't send anything
        if point is None:
            return -1
        """
        Write data by Point structure
        """

        print(f'Writing to InfluxDB cloud: {point.to_line_protocol()} ...')

        write_api = client.write_api(write_options=SYNCHRONOUS)
        write_api.write(bucket=bucket, org=org, record=point)

        print()
        print('success')
        print()
        print()

    except Exception as e:
        print(e)
    finally:
        client.close()
Esempio n. 22
0
def main(gpio, room, org, bucket):
    while True:
        client = InfluxDBClient.from_env_properties()
        write_api = client.write_api(write_options=SYNCHRONOUS)
        hum, temp = Adafruit_DHT.read_retry(SENSOR, gpio)
        if temp is not None:
            p = Point("temp").tag("room", room).field("degrees_c", temp).time(datetime.utcnow())
            logging.info("Writing %s", p.to_line_protocol())
            write_api.write(bucket, org, p)
        if hum is not None:
            p = Point("humid").tag("room", room).field("perc_rh", hum).time(datetime.utcnow())
            logging.info("Writing %s", p.to_line_protocol())
            write_api.write(bucket, org, p)
        write_api.close()

        time.sleep(INTERVAL)
async def process_message(message: dict) -> None:
    """
    Parse user request and answer
    :param message: user message to be processed
    """

    # start with simple parser
    text = message.get("text", "").lower().strip()
    user_id = message.get("user", "")
    channel = message.get("channel", "")

    INFLUX_API_WRITE(
        Point("digestbot").field("overall_requests",
                                 1).time(datetime.utcnow()))

    if text.startswith("help"):
        await helper.process_message(channel, text)
    elif text == "top":
        await top.send_initial_message(user_id, channel)
    elif text == "timers":
        await timer.send_initial_message(user_id, channel)
    elif text == "presets":
        await preset.send_initial_message(user_id, channel)
    elif text == "ignore":
        await ignore.send_initial_message(user_id, channel)
    elif text == "qna" and QNA_PRESENTED:  # only if QnA provided
        await qna.send_initial_message(user_id, channel,
                                       message.get('trigger_id', ""))
    else:
        template = container.jinja_env.get_template("syntax_response.json")
        result = template.render(qna_presented=QNA_PRESENTED)
        await container.slacker.post_to_channel(channel_id=channel,
                                                blocks=result,
                                                ephemeral=True,
                                                user_id=user_id)
 def getSensePoints(self, imperial_or_metric, bucket):
     dt = datetime.now(tz=pytz.timezone('US/Pacific')).isoformat()
     point = Point(measurement_name="sense")
     point.time(time=dt)
     # % relative
     point.field("humidity", self.sense.get_humidity())
     if imperial_or_metric == "imperial":
         point.field(
             "temperature_from_humidity",
             convertCToF(self.sense.get_temperature_from_humidity()))
         point.field(
             "temperature_from_pressure",
             convertCToF(self.sense.get_temperature_from_pressure()))
         point.field("pressure", convertmbToPSI(self.sense.get_pressure()))
     else:
         point.field("temperature_from_humidity",
                     self.sense.get_temperature_from_humidity())
         point.field("temperature_from_pressure",
                     self.sense.get_temperature_from_pressure())
         point.field("pressure", self.sense.get_pressure())
     point.field("orientation_radians",
                 self.sense.get_orientation_radians())
     point.field("orientation_degress",
                 self.sense.get_orientation_degrees())
     # magnetic intensity in microteslas
     point.field("compass_raw", self.sense.get_compass_raw())
     # rotational intensity in radians per second
     point.field("gyroscope_raw", self.sense.get_gyroscope_raw())
     # acceleration intensity in Gs
     point.field("accelerometer_raw", self.sense.get_accelerometer_raw())
     return [{"bucket": bucket, "point": point}]
Esempio n. 25
0
def ReAnWr_Data(DB_info, Sensor_info, start, end, step):
    logger = SetupLogger().getLogger(__name__)

    try:
        from influxdb_client import InfluxDBClient, Point, WritePrecision
        from influxdb_client.client.write_api import SYNCHRONOUS
    except :
        logger.error("influxdb_client module was not installed! Abort!")
        sys.exit()

    if start == "0" and end == "0":
        logger.info("Read and Write data on and on!")
        while True:
            humidity, temperature = ReadSensor(Sensor_info['sensor'],Sensor_info['pin'])
            
            point = Point(DB_info['measurement']).field("temperature", float
            (temperature)).field("humidity", float(humidity)).time(datetime.datetime.utcnow(), WritePrecision.NS)
            logger.info('Time={2}  Temp={0:0.1f}*C  Humidity={1:0.1f}%'.format(temperature, humidity, datetime.datetime.now()))

            DB_info['write_api'].write(DB_info['bucket'], DB_info['org'], point)
            
            time.sleep(int(step))

    else:
        logger.error("Not a supported start and end time yet! Abort!")
        sys.exit()
Esempio n. 26
0
    def test_basic_write(self):
        """Basically from docs"""
        _point1 = Point("test_measure").tag("sometag", "sometagvalue") \
            .field("somefield", 25.3) \
            .time(datetime.utcnow(), write_precision=WritePrecision.S)
        # This is also a pretty good test for "Did I get timezones right"
        self.InfluxClient.api.write(bucket=self.bucket, record=[_point1])
        query_string = f'from(bucket:"{self.bucket}")\
        |> range(start: -10m)\
        |> filter(fn:(r) => r._measurement == "test_measure")\
        |> filter(fn: (r) => r.sometag == "sometagvalue")\
        |> filter(fn:(r) => r._field == "somefield" )'

        query_api = self.InfluxClient.client.query_api()
        start_time = time()
        query_result = list
        while time() - start_time < 10:
            sleep(1)

            query_result = query_api.query(query=query_string,
                                           org=INFLUX_CI_CONFIG['o'])
            if len(query_result) != 0:
                break

        self.assertEqual(len(query_result), 1)
        self.assertEqual(query_result[0].records[0].values['_value'], 25.3)
Esempio n. 27
0
def parse_row(row: OrderedDict):
    """Parse row of CSV file into Point with structure:

    CSV format:
        date,symbol,open,close,low,high,volume
        2016-01-05,WLTW,123.43,125.839996,122.309998,126.25,2163600.0
        2016-01-06,WLTW,125.239998,119.980003,119.940002,125.540001,2386400.0
        2016-01-07,WLTW,116.379997,114.949997,114.93,119.739998,2489500.0
        2016-01-08,WLTW,115.480003,116.620003,113.5,117.440002,2006300.0
        2016-01-11,WLTW,117.010002,114.970001,114.089996,117.330002,1408600.0
        2016-01-12,WLTW,115.510002,115.550003,114.5,116.059998,1098000.0
        2016-01-13,WLTW,116.459999,112.849998,112.589996,117.07,949600.0
        ...

    :param row: the row of CSV file
    :return: Parsed csv row to [Point]
    """
    global _progress
    _progress += 1

    if _progress % 1000 == 0:
        print(_progress)

    return Point("financial-analysis") \
        .tag("symbol", row["symbol"]) \
        .field("open", float(row['open'])) \
        .field("high", float(row['high'])) \
        .field("low", float(row['low'])) \
        .field("close", float(row['close'])) \
        .time(datetime.strptime(row['date'], '%Y-%m-%d'))
Esempio n. 28
0
def hr2point(time, val):
    return Point("health") \
      .field("heart_rate", val) \
      .time(
          datetime.fromtimestamp(time / 1000, timezone.utc),
          WritePrecision.S
      )
Esempio n. 29
0
def stress2point(time, val):
    return Point("health") \
      .field("stress", max(val, 0)) \
      .time(
          datetime.fromtimestamp(time / 1000, timezone.utc),
          WritePrecision.S
      )
Esempio n. 30
0
    def test_write_query_data_nanoseconds(self):

        from influxdb_client.client.util.date_utils_pandas import PandasDateTimeHelper
        import influxdb_client.client.util.date_utils as date_utils

        date_utils.date_helper = PandasDateTimeHelper()

        bucket = self.create_test_bucket()

        point = Point("h2o_feet") \
            .field("water_level", 155) \
            .tag("location", "creek level")\
            .time('1996-02-25T21:20:00.001001231Z')

        self.write_client.write(bucket.name, self.org, [point])

        flux_result = self.client.query_api().query(
            f'from(bucket:"{bucket.name}") |> range(start: 1970-01-01T00:00:00.000000001Z)')
        self.assertEqual(1, len(flux_result))

        record = flux_result[0].records[0]

        self.assertEqual(self.id_tag, record["id"])
        self.assertEqual(record["_value"], 155)
        self.assertEqual(record["location"], "creek level")
        self.assertEqual(record["_time"].year, 1996)
        self.assertEqual(record["_time"].month, 2)
        self.assertEqual(record["_time"].day, 25)
        self.assertEqual(record["_time"].hour, 21)
        self.assertEqual(record["_time"].minute, 20)
        self.assertEqual(record["_time"].second, 00)
        self.assertEqual(record["_time"].microsecond, 1001)
        self.assertEqual(record["_time"].nanosecond, 231)

        date_utils.date_helper = None