def test_from_line_field_values_boolean_true(self):
     p = parse_line('foobar,tag1=1 f1=t 0')
     self.assertAlmostEqual(p['fields']['f1'], True)
     p = parse_line('foobar,tag1=1 f1=true 0')
     self.assertAlmostEqual(p['fields']['f1'], True)
     p = parse_line('foobar,tag1=1 f1=True 0')
     self.assertAlmostEqual(p['fields']['f1'], True)
     p = parse_line('foobar,tag1=1 f1=TRUE 0')
     self.assertAlmostEqual(p['fields']['f1'], True)
 def test_from_line_field_values_boolean_false(self):
     p = parse_line('foobar,tag1=1 f1=f 0')
     self.assertAlmostEqual(p['fields']['f1'], False)
     p = parse_line('foobar,tag1=1 f1=false 0')
     self.assertAlmostEqual(p['fields']['f1'], False)
     p = parse_line('foobar,tag1=1 f1=False 0')
     self.assertAlmostEqual(p['fields']['f1'], False)
     p = parse_line('foobar,tag1=1 f1=FALSE 0')
     self.assertAlmostEqual(p['fields']['f1'], False)
 def test_carriage_return(self):
     p = parse_line('foobar,t0=0,t1=1 f0=0,f1=1 0\r\nABC')
     params = dict(measurement='foobar',
                   tags=dict(t0='0', t1='1'),
                   fields=dict(f0=0.0, f1=1.0),
                   time=0)
     self.assertDictEqual(p, params)
 def test_from_line(self):
     for _ in range(100):
         p = parse_line('foobar,t0=0,t1=1 f0=0,f1=1 0')
         params = dict(measurement='foobar',
                       tags=dict(t0='0', t1='1'),
                       fields=dict(f0=0.0, f1=1.0),
                       time=0)
         self.assertDictEqual(p, params)
 def output_to_file(self, lines, run_id, account_id):
     df = open(self.mfn, 'a+')
     largest_timestamp = 0
     for line in lines:
         if line:
             data = parse_line(line)
             # Transform
             ms_timestamp = float(int(data['time']) / 1000000000)
             fields_dict = {}
             for fn in data['fields']:
                 val = data['fields'][fn]
                 tfn = self.get_field(fn)
                 if not tfn:
                     if type(val) == bool:
                         tfn = self.add_field_to_schema(fn, 'BOOL')
                     elif type(val) == int:
                         tfn = self.add_field_to_schema(fn, 'INT64')
                     elif type(val) == float:
                         tfn = self.add_field_to_schema(fn, 'FLOAT64')
                     else:
                         tfn = self.add_field_to_schema(fn, 'STRING')
                 fields_dict[tfn] = val
             tags_dict = {}
             for tn in data['tags']:
                 ttn = self.get_tag(tn)
                 if not ttn:
                     ttn = self.add_tag_to_schema(tn)
                 tags_dict[ttn] = str(data['tags'][tn])
             transformed_data = {
                 'ts': ms_timestamp,
                 'account_id': "urn:f5_cs::acccount:%s" % account_id,
                 'source_id': None,
                 'evt': {
                     'version':
                     '1.0',
                     'sourceName':
                     data['measurement'],
                     'sourceDescription':
                     "data imported from beacon for account %s" %
                     account_id,
                     'fields':
                     fields_dict,
                     'tags':
                     tags_dict,
                     'timestamp':
                     ms_timestamp
                 }
             }
             df.write("%s\n" % json.dumps(transformed_data))
             if ms_timestamp > largest_timestamp:
                 largest_timestamp = int(ms_timestamp)
     time.sleep(WRITE_FILE_DELAY_SECS)
     df.close()
     return largest_timestamp
Ejemplo n.º 6
0
def separate_metrics(data):
    data = data.split('\n')
    # Get rid of all empty lines
    data = [line for line in data if line != ""]
    data_tsfdb = []
    data_rest = []
    for line in data:
        if parse_line(line)["tags"]["machine_id"] == "tsfdb":
            data_tsfdb.append(line)
        else:
            data_rest.append(line)
    return '\n'.join(data_tsfdb), '\n'.join(data_rest)
Ejemplo n.º 7
0
 def parse(self, text):
     for line in text.splitlines():
         try:
             data = parse_line(line)
         except LineFormatError as e:
             print(e, line)
             continue
         fields = data['fields']
         if len(fields) == 1 and 'v' in fields:
             fields = dict(value=fields['v'])
         yield Point(int(data['time']), data['measurement'], fields,
                     data['tags'])
Ejemplo n.º 8
0
    def write_in_kv_base(self, org, data):
        try:
            if not data:
                return

            # Create a list of lines
            data = data.split('\n')
            # Get rid of all empty lines
            data = [line for line in data if line != ""]

            metrics = set()
            total_datapoints = 0
            machine = ""
            for line in data:
                dict_line = parse_line(line)
                machine = dict_line["tags"]["machine_id"]
                total_datapoints += len(dict_line["fields"].items())
                metric = generate_metric(dict_line["tags"],
                                         dict_line["measurement"])
                for field, _ in dict_line["fields"].items():
                    metrics.add(machine + "-" + metric + "-" + field)

            self.log.warning(
                ("Request for resource: %s, number of metrics: %d," +
                 " number of datapoints: %d") %
                (machine, len(metrics), total_datapoints))

            metrics = self.write_lines(self.db, org, data)
            self.update_metrics(self.db, org, metrics)
        except fdb.FDBError as err:
            error_msg = ("%s on write_in_kv(data) with resource_id: %s" %
                         (str(err.description, 'utf-8'), parse_line(
                             data[0])["tags"]["machine_id"]))
            return error(503,
                         error_msg,
                         traceback=traceback.format_exc(),
                         request=str(data))
Ejemplo n.º 9
0
def loop(data):
    while not data['flag']:
        pass

    print(data['file'])
    data['msg_count'] = 0
    file = data['file']
    model = data['model']
    delay = data['delay']
    pub_topic = data['pub_topic']
    client = data['client']
    lps = data['lps']

    import fpstimer
    timer = fpstimer.FPSTimer(lps)  # Make a timer that is set for 60 fps.

    num_lines = sum(1 for line in open(file, 'r'))
    pbar = tqdm(total=num_lines, leave=False, unit='lines')
    # telegraf/mart-ubuntu-s-1vcpu-1gb-sgp1-01/Model-PRO
    cnt = 0
    with open(file, 'r') as f:
        for line in f:
            # sleep(delay)
            # sleep(0.00066)
            # print(line)
            parsed = parse_line(line)
            topic = parsed['tags']['topic'].split("/")[-2]
            topic = "DUSTBOY/{}/{}/status".format(model, topic)
            parsed['timestamp'] = str(parsed['time']) + '000'
            parsed['tags']['topic'] = topic
            parsed['batch_id'] = data['batch_id']
            client.publish(pub_topic,
                           json.dumps(parsed, sort_keys=True),
                           qos=0)
            pbar.update(1)
            timer.sleep(
            )  # Pause just enough to have a 1/60 second wait since last fpstSleep() call.
            cnt += 1
        pbar.close()
        # print('cnt=', cnt)
        sleep(0.00066)
        # print('msg_count =  ', data['msg_count'])
        client.disconnect()
        # client.loop_stop()
        raise SystemExit
Ejemplo n.º 10
0
def main():
    global device_details
    #get_devices()
    #print(device_details)
    get_devices_refresh()
    cache_timestamp = datetime.datetime.now()
    for line in sys.stdin:
        #May be able to adjust this logic to have seperate timers for each app_id
        #Update cache if older than 1 hour
        if (datetime.datetime.now() -
                cache_timestamp).seconds >= 3600 and len(device_details) > 0:
            #refresh every hour
            get_devices_refresh()
            #re-Set timestamp
            cache_timestamp = datetime.datetime.now()
        #Parse timeprotocol to Points type
        lproto = line_protocol_parser.parse_line(line)
        #parse topic tag to return the device_id and app_id eg: v3/fort-digital-agri@ttn/devices/2027a0023/up v3/{{app_id}}@{{tenant}}/devices/{{device_id}}/up
        topic_match = topic_re.match(lproto['tags']['topic'])
        app_id = topic_match[1]
        #tenant = topic_match[2]
        device_id = topic_match[3]
        point = Point(lproto['measurement']).time(lproto['time'])
        if app_id not in device_details or device_id not in device_details[
                app_id]:
            get_devices(app_id)
        if app_id in device_details:
            for key, value in lproto['fields'].items():
                point = point.field(key, will_it_float(value))
            for key, value in lproto['tags'].items():
                if key != 'name':
                    point = point.tag(key, value)
            #get details from global variable
            device = get_device(app_id, device_id)
            #Add additional tags
            point = point.tag("device_id", device_id)
            for key, value in device.items():
                point = point.tag(key, value)
            #Print lineprotocol to stdout
            print(point.to_line_protocol())
            #flushstdout
            sys.stdout.flush()
        else:
            print("Device Details not found")
Ejemplo n.º 11
0
 def write_lines(self, tr, org, lines):
     metrics = {}
     datapoints_dir = {}
     for line in lines:
         dict_line = parse_line(line)
         machine = dict_line["tags"]["machine_id"]
         metric = generate_metric(dict_line["tags"],
                                  dict_line["measurement"])
         dt = datetime.fromtimestamp(int(str(dict_line["time"])[:10]))
         if self.time_series.series_type == 'metering':
             dt = datetime.now()
         for field, value in dict_line["fields"].items():
             machine_metric = "%s.%s" % (metric, field)
             if not datapoints_dir.get("second"):
                 datapoints_dir["second"] = fdb.directory.create_or_open(
                     tr,
                     (self.time_series.series_type, org, machine, 'second'))
             if self.time_series.write_datapoint(
                     tr,
                     org,
                     machine,
                     key_tuple_second(dt, machine_metric),
                     value,
                     datapoints_dir=datapoints_dir['second']):
                 if not metrics.get(machine):
                     metrics[machine] = set()
                 metrics[machine].add(
                     (machine_metric, type(value).__name__))
                 for resolution in self.resolutions:
                     if not datapoints_dir.get(resolution):
                         datapoints_dir[resolution] = \
                             fdb.directory.create_or_open(
                             tr, (self.time_series.series_type, org, machine, resolution))
                     self.time_series.write_datapoint_aggregated(
                         tr,
                         org,
                         machine,
                         machine_metric,
                         dt,
                         value,
                         resolution,
                         datapoints_dir=datapoints_dir[resolution])
     return metrics
Ejemplo n.º 12
0
def node_controller(message):
    """
    async task : read message from mqtt,
    apply controller rules
    write on message on MQTT topic
    :param message:
    :return:
    """
    d: dict = parse_line(message + b" 0")
    tag: str = d["tags"]["tag"]
    print(d)
    s = Sprinklers()
    ctl = BinaryController()
    try:
        s.get_config(tag)
    except ObjectDoesNotExist:
        s.update_config(tag=tag, soil_moisture_min_level=30, soil_moisture_max_level=70)
        s.get_config(tag)
    ctl.set_conf(
        _min=s.soil_moisture_min_level, _max=s.soil_moisture_max_level, reverse=False
    )

    # ---- actuator control
    force_controller = Sprinklers().get_controller_force(tag)
    if force_controller["force_water_valve_signal"]:
        water_valve_signal = int(force_controller["water_valve_signal"])
    else:
        water_valve_signal = int(ctl.get_signal(d["fields"]["soil_moisture"]))

    s.update_controller(tag=tag, water_valve_signal=bool(water_valve_signal))

    mqtt_client.publish(
        MQTT_SPRINKLER_CONTROLLER_TOPIC,
        json.dumps(
            SprinklerCtrlDict(
                controller_type="sprinkler",
                tag=tag,
                water_valve_signal=water_valve_signal,
                soil_moisture_min_level=s.soil_moisture_min_level,
                soil_moisture_max_level=s.soil_moisture_max_level,
            )
        ),
    )
Ejemplo n.º 13
0
 def test_from_line_tag_values(self):
     p = parse_line('foobar,tag1=A\\ \\,\\=\\"\\B,tag2="\\ " f1=0 0')
     self.assertEqual(p['tags']['tag1'], 'A ,=\\"\\B')
     self.assertEqual(p['tags']['tag2'], '" "')
Ejemplo n.º 14
0
#A minibatch is created each time a new point is written to stdin.
minbatchlen = 10
maxbatchlen = 120
predictions = 40
threshold = 0.5

timestamps = []
bv_1 = []
haze_v5 = []
for line in sys.stdin:
    line = line.rstrip('\n')
    print(line)
    #lines are flushed after each loop
    sys.stdout.flush()
    #line protocol is parsed and added to the minibatch
    parsed = parse_line(line)
    brew = parsed['tags']['brew']
    field = parsed['fields']['temperature']
    time = parsed['time']
    time = pd.to_datetime(time, unit='ns', exact=True)
    timestamps.append(time)
    if brew == 'bv_1':
        bv_1.append(field)
        haze_v5.append(Nan)
    else:
        bv_1.append(Nan)
        haze_v5.append(field)
    timestamps = timestamps[-maxbatchlen:]
    bv_1 = bv_1[-maxbatchlen:]
    haze_v5 = haze_v5[-maxbatchlen:]
Ejemplo n.º 15
0
 def test_time_error(self):
     with self.assertRaisesRegex(LineFormatError, 'nanoseconds'):
         parse_line('measurement,tag=value field=1.23 time')
Ejemplo n.º 16
0
 def test_no_argument_error(self):
     with self.assertRaises(TypeError):
         parse_line()
Ejemplo n.º 17
0
 def test_type_error(self):
     with self.assertRaises(TypeError):
         parse_line(123)
Ejemplo n.º 18
0
 def test_from_line_field_values_string(self):
     p = parse_line('foobar,tag1=1 f1="MelodiesOfLife" 0')
     self.assertAlmostEqual(p['fields']['f1'], "MelodiesOfLife")
Ejemplo n.º 19
0
 def test_from_line_measurement(self):
     p = parse_line('foobar,tag1=1 f1=0 1234')
     self.assertEqual(p['measurement'], 'foobar')
Ejemplo n.º 20
0
 def test_from_line_field_values_integer_without_timestamp(self):
     p = parse_line('foobar,tag1=1 f1=123i')
     self.assertAlmostEqual(p['fields']['f1'], 123)
Ejemplo n.º 21
0
 def test_from_line_field_values_big_integer(self):
     p = parse_line('foobar,tag1=1 f1=15758827520i 0')
     self.assertAlmostEqual(p['fields']['f1'], 15758827520)
Ejemplo n.º 22
0
 def test_from_line_field_values_uinteger(self):
     p = parse_line('foobar,tag1=1 f1=123u 0')
     self.assertAlmostEqual(p['fields']['f1'], 123)
Ejemplo n.º 23
0
 def test_from_line_measurement_escape(self):
     p = parse_line('f\\ \\,\\=\\"\\oobar,tag1=1 f1=0 1234')
     self.assertEqual(p['measurement'], 'f ,="\\oobar')
Ejemplo n.º 24
0
# This code needs a couple of modules before it can be used:

#   pip install influxdb-client
#   pip install line-protocol-parser


from influxdb_client import Point
from line_protocol_parser import parse_line

while True:
    try:
        input_line = input()  # read from stdin
    except EOFError:  # catch EOF error
        break
    else:
        data = parse_line(input_line)  # parse input line
        fields = data['fields']
        ips = dict()
        for key, value in fields.items():
            ipaddress, substring = key.split('_', 1)
            if ipaddress not in ips:
                ips[ipaddress] = dict()
            ips[ipaddress][substring] = value
        for key, value in ips.items():
            datapoint = dict()
            datapoint['measurement'] = data['measurement']
            datapoint['fields'] = value
            datapoint['tags'] = {'ip' : key}
            datapoint['time'] = data['time']
            point = Point.from_dict(datapoint)  # new metric object
            print(point.to_line_protocol())  # write to stdout
Ejemplo n.º 25
0
 def test_from_line_measurement_without_tags(self):
     p = parse_line('foobar f1=0 1234')
     self.assertEqual(p['measurement'], 'foobar')
     self.assertDictEqual(p['tags'], dict())
Ejemplo n.º 26
0
 def test_from_line_tag_keys(self):
     p = parse_line('foobar,ta\\ \\,\\=\\"\\g1=1,tag2=2 f1=0 1234')
     self.assertTrue('ta ,=\\"\\g1' in p['tags'])
Ejemplo n.º 27
0
def node_controller(message):
    """
    async task : read message from mqtt,
    apply controller rules
    write on message on MQTT topic
    :param message:
    :return:
    """
    d: dict = parse_line(message + b" 0")
    tag: str = d["tags"]["tag"]
    light = Light()
    ctl = TimeRangeController()
    glbl_config = GlobalConfig().get_config()

    if glbl_config:
        timezone = glbl_config["timezone"]
    else:
        callback_d: dict = LightCtrlDict(
            controller_type="light",
            tag=tag,
            tz="not_set",
            on_time_at="",
            off_time_at="",
            light_signal=int(0),
        )

        print("[ERROR] [LIGHT] Global configuration: Timezone not set")
        mqtt_client.publish(
            MQTT_LIGHT_CONTROLLER_TOPIC,
            json.dumps(callback_d),
        )
        return callback_d

    try:
        light.get_config(tag)
    except ObjectDoesNotExist:
        on_datetime_at = datetime.now(tz=SYSTEM_TIME_ZONE).astimezone(
            pytz.timezone(timezone))
        off_datetime_at = datetime.now(tz=SYSTEM_TIME_ZONE).astimezone(
            pytz.timezone(timezone))

        light.update_config(
            tag=tag,
            on_datetime_at=on_datetime_at,
            off_datetime_at=off_datetime_at + timedelta(hours=5),
        )
        light.get_config(tag)

    current_datetime: datetime = datetime.now(tz=SYSTEM_TIME_ZONE).astimezone(
        pytz.timezone(timezone))
    ctl.set_current_datetime(current_datetime)

    ctl.set_conf(
        start_at=light.on_datetime_at,
        end_at=light.off_datetime_at,
    )
    signal = ctl.get_signal()

    light.update_controller(tag=tag, light_signal=bool(signal))

    callback_d: dict = LightCtrlDict(
        controller_type="light",
        tag=tag,
        tz=timezone,
        on_time_at=light.on_datetime_at.strftime("%H:%M:%S"),
        off_time_at=light.off_datetime_at.strftime("%H:%M:%S"),
        light_signal=signal,
    )
    mqtt_client.publish(
        MQTT_LIGHT_CONTROLLER_TOPIC,
        json.dumps(callback_d),
    )
    return callback_d
Ejemplo n.º 28
0
 def test_from_line_field_keys(self):
     p = parse_line('foobar field\\ \\,\\=\\"\\1=1,field2=2 1234')
     self.assertTrue('field ,="\\1' in p['fields'])
Ejemplo n.º 29
0
def on_message(client, userdata, msg):
    """
    Water supply pump I/O  callback
    Nutrient supply pump I/O callback
    pH down supply pump I/O callback
    :param client:
    :param userdata:
    :param msg:
    :return:
    """

    d: dict = parse_line(msg.payload + b" 0")
    w = Water()
    nutrient_ctl = BinaryController()
    ph_ctl = BinaryController()

    try:
        w.get_config()
    except ObjectDoesNotExist:
        w.update_config(
            ph_min_level=WATER_CONTROLLER["pH"]["min_level"],
            ph_max_level=WATER_CONTROLLER["pH"]["max_level"],
            tds_min_level=WATER_CONTROLLER["tds"]["max_level"],
            tds_max_level=WATER_CONTROLLER["tds"]["max_level"],
        )
        w.get_config()

    # Get actuator force configuration
    force_controller = Water().get_controller_force()
    # Nutrient control -----------
    nutrient_ctl.set_conf(_min=w.tds_min_level,
                          _max=w.tds_max_level,
                          reverse=False)
    nutrient_signal = nutrient_ctl.get_signal(d["fields"]["tds_level"])
    if force_controller["force_nutrient_pump_signal"]:
        nutrient_signal = int(force_controller["nutrient_pump_signal"])
    # pH downer control -----------
    ph_ctl.set_conf(_min=w.ph_min_level, _max=w.ph_max_level, reverse=False)
    ph_signal = ph_ctl.get_signal(d["fields"]["ph_level"])
    if force_controller["force_ph_downer_pump_signal"]:
        ph_signal = int(force_controller["ph_downer_pump_signal"])
    # water pump control ----------
    water_signal = int(Sprinklers().is_any_require_water())
    if force_controller["force_water_pump_signal"]:
        water_signal = int(force_controller["water_pump_signal"])
    # mixer pump control ----------
    mixer_signal = 0
    if force_controller["force_mixer_pump_signal"]:
        mixer_signal = int(force_controller["mixer_pump_signal"])

    pub_d: dict = WaterCtrlDict(
        tag="water",
        water_pump_signal=water_signal,
        nutrient_pump_signal=int(nutrient_signal),
        ph_downer_pump_signal=int(ph_signal),
        mixer_pump_signal=mixer_signal,
        tds_max_level=w.tds_max_level,
        tds_min_level=w.tds_min_level,
        ph_max_level=w.ph_max_level,
        ph_min_level=w.ph_min_level,
    )
    client.publish(MQTT_WATER_CONTROLLER_TOPIC, json.dumps(pub_d))
Ejemplo n.º 30
0
 def test_from_line_field_values_float_without_timestamp(self):
     p = parse_line('foobar,tag1=1 f1=3.14')
     self.assertAlmostEqual(p['fields']['f1'], 3.14)