コード例 #1
0
    def test_make_lines(self):
        data = {
            "tags": {
                "empty_tag": "",
                "none_tag": None,
                "integer_tag": 2,
                "string_tag": "hello"
            },
            "points": [
                {
                    "measurement": "test",
                    "fields": {
                        "string_val": "hello!",
                        "int_val": 1,
                        "float_val": 1.1,
                        "none_field": None,
                        "bool_val": True,
                    }
                }
            ]
        }

        self.assertEqual(
            line_protocol.make_lines(data),
            'test,integer_tag=2,string_tag=hello '
            'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n'
        )
コード例 #2
0
    def test_make_lines(self):
        """Test make new lines in TestLineProtocol object."""
        data = {
            "tags": {
                "empty_tag": "",
                "none_tag": None,
                "backslash_tag": "C:\\",
                "integer_tag": 2,
                "string_tag": "hello"
            },
            "points": [
                {
                    "measurement": "test",
                    "fields": {
                        "string_val": "hello!",
                        "int_val": 1,
                        "float_val": 1.1,
                        "none_field": None,
                        "bool_val": True,
                    }
                }
            ]
        }

        self.assertEqual(
            line_protocol.make_lines(data),
            'test,backslash_tag=C:\\\\ ,integer_tag=2,string_tag=hello '
            'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n'
        )
コード例 #3
0
ファイル: client.py プロジェクト: exiaohao/mini_monitor
    def write(self, data, params=None, expected_response_code=204):
        """Write data to InfluxDB.

        :param data: the data to be written
        :type data: dict
        :param params: additional parameters for the request, defaults to None
        :type params: dict
        :param expected_response_code: the expected response code of the write
            operation, defaults to 204
        :type expected_response_code: int
        :returns: True, if the write operation is successful
        :rtype: bool
        """

        headers = self._headers
        headers['Content-type'] = 'application/octet-stream'

        if params:
            precision = params.get('precision')
        else:
            precision = None

        self.request(
            url="write",
            method='POST',
            params=params,
            data=make_lines(data, precision).encode('utf-8'),
            expected_response_code=expected_response_code,
            headers=headers
        )
        return True
コード例 #4
0
 def test_timezone(self):
     dt = datetime(2009, 11, 10, 23, 0, 0, 123456)
     utc = UTC.localize(dt)
     berlin = timezone('Europe/Berlin').localize(dt)
     eastern = berlin.astimezone(timezone('US/Eastern'))
     data = {
         "points": [
             {"measurement": "A", "fields": {"val": 1},
              "time": 0},
             {"measurement": "A", "fields": {"val": 1},
              "time": "2009-11-10T23:00:00.123456Z"},
             {"measurement": "A", "fields": {"val": 1}, "time": dt},
             {"measurement": "A", "fields": {"val": 1}, "time": utc},
             {"measurement": "A", "fields": {"val": 1}, "time": berlin},
             {"measurement": "A", "fields": {"val": 1}, "time": eastern},
         ]
     }
     self.assertEqual(
         line_protocol.make_lines(data),
         '\n'.join([
             'A val=1i 0',
             'A val=1i 1257894000123456000',
             'A val=1i 1257894000123456000',
             'A val=1i 1257894000123456000',
             'A val=1i 1257890400123456000',
             'A val=1i 1257890400123456000',
         ]) + '\n'
     )
コード例 #5
0
ファイル: client.py プロジェクト: exiaohao/mini_monitor
    def send_packet(self, packet):
        """Send an UDP packet.

        :param packet: the packet to be sent
        :type packet: dict
        """
        data = make_lines(packet).encode('utf-8')
        self.udp_socket.sendto(data, (self._host, self.udp_port))
コード例 #6
0
ファイル: main.py プロジェクト: gaker/fuzz
async def receive(request):
    """
    Receive data and drop it in InfluxDB.
    """
    data = await request.json()

    auth_header = request.headers.get('authorization')
    if auth_header:
        auth_parts = auth_header.split(' ')

        token = os.getenv('AUTH_TOKEN', None)
        if token:
            if token != auth_parts[1]:
                return web.Response(status=401)

    if 'data' in data.keys():
        db = data.get('db', None)
        for item in data.get('data'):
            lines = line_protocol.make_lines(item)
            resp = await save(lines, db_name=db)
            resp.close()
        return web.Response(status=201)

    points = data.get('points')
    if points:
        # convert ints to floats
        for idx, item in enumerate(points):
            fields = item.get('fields')
            if fields:
                for k, v in fields.items():
                    if k != 'time':
                        if isinstance(v, int):
                            data['points'][idx]['fields'][k] = float(v)

        lines = line_protocol.make_lines(data)
        resp = await save(lines)
        resp.close()
    return web.Response(status=201)
コード例 #7
0
ファイル: client.py プロジェクト: jsferrei/influxdb-python
    def send_packet(self, packet, protocol='json'):
        """Send an UDP packet.

        :param packet: the packet to be sent
        :type packet: (if protocol is 'json') dict
                      (if protocol is 'line') sequence of line protocol strings
        :param protocol: protocol of input data, either 'json' or 'line'
        :type protocol: str
        """
        if protocol == 'json':
            data = make_lines(packet).encode('utf-8')
        elif protocol == 'line':
            data = ('\n'.join(data) + '\n').encode('utf-8')
        self.udp_socket.sendto(data, (self._host, self.udp_port))
コード例 #8
0
ファイル: client.py プロジェクト: bbc/influxdb-python
    def send_packet(self, packet, protocol='json', time_precision=None):
        """Send an UDP packet.

        :param packet: the packet to be sent
        :type packet: (if protocol is 'json') dict
                      (if protocol is 'line') list of line protocol strings
        :param protocol: protocol of input data, either 'json' or 'line'
        :type protocol: str
        :param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None
        :type time_precision: str
        """
        if protocol == 'json':
            data = make_lines(packet, time_precision).encode('utf-8')
        elif protocol == 'line':
            data = ('\n'.join(packet) + '\n').encode('utf-8')
        self.udp_socket.sendto(data, (self._host, self._udp_port))
コード例 #9
0
    def test_string_val_newline(self):
        data = {
            "points": [
                {
                    "measurement": "m1",
                    "fields": {
                        "multi_line": "line1\nline1\nline3"
                    }
                }
            ]
        }

        self.assertEqual(
            line_protocol.make_lines(data),
            'm1 multi_line="line1\\nline1\\nline3"\n'
        )
コード例 #10
0
 def test_float_with_long_decimal_fraction(self):
     """Ensure precision is preserved when casting floats into strings."""
     data = {
         "points": [
             {
                 "measurement": "test",
                 "fields": {
                     "float_val": 1.0000000000000009,
                 }
             }
         ]
     }
     self.assertEqual(
         line_protocol.make_lines(data),
         'test float_val=1.0000000000000009\n'
     )
コード例 #11
0
    def test_string_val_newline(self):
        """Test string value with newline in TestLineProtocol object."""
        data = {
            "points": [
                {
                    "measurement": "m1",
                    "fields": {
                        "multi_line": "line1\nline1\nline3"
                    }
                }
            ]
        }

        self.assertEqual(
            line_protocol.make_lines(data),
            'm1 multi_line="line1\\nline1\\nline3"\n'
        )
コード例 #12
0
    def test_make_lines_unicode(self):
        data = {
            "tags": {
                "unicode_tag": "\'Привет!\'"  # Hello! in Russian
            },
            "points": [
                {
                    "measurement": "test",
                    "fields": {
                        "unicode_val": "Привет!",  # Hello! in Russian
                    }
                }
            ]
        }

        self.assertEqual(
            line_protocol.make_lines(data),
            'test,unicode_tag=\'Привет!\' unicode_val="Привет!"\n'
        )
コード例 #13
0
    def test_empty_tag(self):
        data = {
            "tags": {
                "my_tag": ""
            },
            "points": [
                {
                    "measurement": "test",
                    "fields": {
                        "value": "hello!"
                    }
                }
            ]
        }

        self.assertEqual(
            line_protocol.make_lines(data),
            'test value="hello!"\n'
        )
コード例 #14
0
ファイル: client.py プロジェクト: bbc/influxdb-python
    def write(self, data, params=None, expected_response_code=204,
              protocol='json'):
        """Write data to InfluxDB.

        :param data: the data to be written
        :type data: (if protocol is 'json') dict
                    (if protocol is 'line') sequence of line protocol strings
                                            or single string
        :param params: additional parameters for the request, defaults to None
        :type params: dict
        :param expected_response_code: the expected response code of the write
            operation, defaults to 204
        :type expected_response_code: int
        :param protocol: protocol of input data, either 'json' or 'line'
        :type protocol: str
        :returns: True, if the write operation is successful
        :rtype: bool
        """
        headers = self._headers
        headers['Content-Type'] = 'application/octet-stream'

        if params:
            precision = params.get('precision')
        else:
            precision = None

        if protocol == 'json':
            data = make_lines(data, precision).encode('utf-8')
        elif protocol == 'line':
            if isinstance(data, str):
                data = [data]
            data = ('\n'.join(data) + '\n').encode('utf-8')

        self.request(
            url="write",
            method='POST',
            params=params,
            data=data,
            expected_response_code=expected_response_code,
            headers=headers
        )
        return True
コード例 #15
0
ファイル: client.py プロジェクト: eman/influxdb-python
    def write(self, data, params=None, expected_response_code=204,
              protocol='json'):
        """Write data to InfluxDB.

        :param data: the data to be written
        :type data: (if protocol is 'json') dict
                    (if protocol is 'line') sequence of line protocol strings
        :param params: additional parameters for the request, defaults to None
        :type params: dict
        :param expected_response_code: the expected response code of the write
            operation, defaults to 204
        :type expected_response_code: int
        :param protocol: protocol of input data, either 'json' or 'line'
        :type protocol: str
        :returns: True, if the write operation is successful
        :rtype: bool
        """

        headers = self._headers
        headers['Content-type'] = 'application/octet-stream'

        if params:
            precision = params.get('precision')
        else:
            precision = None

        if protocol == 'json':
            data = make_lines(data, precision).encode('utf-8')
        elif protocol == 'line':
            data = ('\n'.join(data) + '\n').encode('utf-8')

        self.request(
            url="write",
            method='POST',
            params=params,
            data=data,
            expected_response_code=expected_response_code,
            headers=headers
        )
        return True
コード例 #16
0
    def test_make_lines(self):
        data = {
            "tags": {
                "empty_tag": "",
                "none_tag": None,
                "integer_tag": 2,
                "string_tag": "hello"
            },
            "points": [{
                "measurement": "test",
                "fields": {
                    "string_val": "hello!",
                    "int_val": 1,
                    "none_field": None,
                }
            }]
        }

        self.assertEqual(
            line_protocol.make_lines(data),
            'test,integer_tag=2,string_tag=hello '
            'int_val=1,string_val="hello!"\n')
コード例 #17
0
    def datagram_received(self, raw_data, addr):
        logging.info('Received %s bytes: %r(...) from %s', len(raw_data),
                     raw_data[0:3], addr)
        if len(raw_data) < 7: return

        identifier = raw_data[0:3]
        serializer = self._factory.get_serializer(identifier)

        mesg, fields = serializer.deserialize(raw_data, self._max_delta_t)

        # Verify nonce is not known for that timestamp
        if mesg.timestamp in self._known_nonces.keys() and \
                mesg.nonce in self._known_nonces[mesg.timestamp]:
            raise Exception(
                'Possible replay attack: Nonce {} already knwon for timestamp {}'
                .format(mesg.nonce, mesg.timestamp))
        else:
            if not mesg.timestamp in self._known_nonces.keys():
                self._known_nonces[mesg.timestamp] = set()
            self._known_nonces[mesg.timestamp].add(mesg.nonce)

        influxdb_points = []
        for key, value in fields.items():
            influxdb_points.append({
                'measurement': key,
                'tags': {},
                'fields': dict(value)
            })

        post_data = line_protocol.make_lines({
            'points': influxdb_points
        }).encode()

        asyncio.ensure_future(
            send(self._influx_url + '?db=' + serializer.database, post_data))

        self.cleanup_known_nonces()
コード例 #18
0
    def export_to_line_protocol(self):
        """ Export object to InfluxDB Line Protocol syntax
        """
        # Device
        self.add_tag(['device', 'mode'])
        self.add_tag(['device', 'state'])

        self.add_tags(['device', 'additionalData'], prefix=['device'])

        # Part
        self.add_tag(['part', 'id'])
        self.add_tag(['part', 'type'])
        self.add_tag(['part', 'typeId'])
        self.add_tag(['part', 'code'])
        self.add_tag(['part', 'result'])

        self.add_tags(['part', 'additionalData'], prefix=['part'])

        # TimeMeasurements
        for measurement in self.data['measurements']:
            # create new Measurement object to store local tags
            meas_obj = Measurement(json.dumps(measurement), self.hostname())

            meas_obj.add_tag(['code'], ['measurement'])
            meas_obj.add_tag(['result'], ['measurement'])

            meas_obj.add_tags(['additionalData'], prefix=['measurement'])

            # TODO: context

            timestamp = parser.parse(measurement['ts'])
            keys = list(
                filter(lambda key: key != 'time',
                       measurement['series'].keys()))
            fields = dict()
            self.points = []
            for index in range(0, len(measurement['series'][keys[0]])):
                fields = {
                    key.replace(' ', '_'): measurement['series'][key][index]
                    for key in keys
                    if key != '' and measurement['series'][key][index] != ''
                }

                # add offset to base timestamp
                ts_w_offset = timestamp + timedelta(
                    milliseconds=measurement['series']['time'][index])
                # round to InfluxDB compatible timestamp (nanoseconds)
                ts_w_offset = int(round(ts_w_offset.timestamp() * 1000000000))

                self.add_point(fields, ts_w_offset)

            # merge global ProcessPayload tags with current ProcessMeasurement tags
            current_tags = self.tags.copy()
            current_tags.update(meas_obj.tags)

            # add measurement in line_protocol format
            if len(fields) > 0:
                tmp = line_protocol.make_lines({
                    'tags': current_tags,
                    'points': self.points
                })
                self.line_protocol_data += tmp

        # return sequence of line protocol strings
        return self.line_protocol_data if self.line_protocol_data else None
コード例 #19
0
def read_nmea2k():
    """Read the actisense-serial -r {device} | analyzer -json for given NMEA2000 NGT-1 device port"""
    # Actisense-Serial
    actisense_process = Popen(['actisense-serial', '-r', n2k_conf['port']],
                              stdout=PIPE)

    # Analyzer Stream for output in JSON
    global analyzer_process
    analyzer_process = Popen(['analyzer', '-json'],
                             stdin=actisense_process.stdout,
                             stdout=PIPE,
                             stderr=PIPE)
    PGNs = list(map(int, n2k_conf['pgnConfigs'].keys()))
    logger.debug('PGNs: {}'.format(PGNs))

    while True:
        incoming_json = analyzer_process.stdout.readline().decode('utf-8')
        try:
            incoming_data = json.loads(incoming_json)

            if incoming_data['pgn'] in PGNs:
                # remove unnecessary keys
                del incoming_data['dst']
                del incoming_data['prio']

                # check if the configuration for the PGN has the `fromSource` Key
                if 'fromSource' in list(n2k_conf['pgnConfigs'][str(
                        incoming_data['pgn'])].keys()):
                    logger.info('PGN Source Filter Check')
                    if incoming_data['src'] != n2k_conf['pgnConfigs'][str(
                            incoming_data['pgn'])]['fromSource']:
                        logger.info('PGN: {} with src: {}'.format(
                            incoming_data['pgn'], incoming_data['src']))
                        logger.info('Skipping data for: {}'.format(
                            incoming_data['description']))
                        continue

                measurement = {
                    "tags": {
                        "source": "nmea2k",
                        "PGN": incoming_data['pgn'],
                        "src": incoming_data['src']
                    },
                    "points": []
                }

                # Create a set of all available fields from the incoming frame
                incoming_fields = set(incoming_data['fields'].keys())
                fields_from_conf = set(n2k_conf['pgnConfigs'][str(
                    incoming_data['pgn'])]['fieldLabels'])
                logger.debug('Fields To Log: {f}'.format(
                    f=fields_from_conf.intersection(incoming_fields)))

                # Get all the Fields necessary to be stored into InfluxDB
                for selected_field in fields_from_conf.intersection(
                        incoming_fields):
                    # Measurement name is the profile type name e.g. control/environment/engine etc available
                    # as the first level for the mqtt topics
                    meas_name = n2k_conf['pgnConfigs'][str(
                        incoming_data['pgn'])]['topics'][0].split('/')[0]
                    point = {
                        "measurement": meas_name,
                        "time": int(time.time() * 1e9),
                        "fields": {}
                    }
                    point['fields'][selected_field.replace(
                        " ", "")] = incoming_data['fields'][selected_field]
                    measurement['points'].append(point)
                # logger.debug(line_protocol.make_lines(measurement))

                with concurrent.futures.ThreadPoolExecutor(
                        max_workers=2) as executor:
                    if executor.submit(save_to_db, measurement).result():
                        logger.info('saved to InfluxDB')
                    if executor.submit(
                            publish_data, incoming_data['pgn'],
                            line_protocol.make_lines(measurement)).result():
                        logger.info('Published data successfully')
                time.sleep(0.05)

        except Exception as e:
            logger.exception(e)
コード例 #20
0
def read_from_imu(i2c_port, updaterate):
    logger.info('Starting to Read BNO values on {} every {}s'.format(
        i2c_port, updaterate))

    global sensor_bno
    sensor_bno = BNO055(i2c_bus_port=i2c_port)

    if sensor_bno.begin() is not True:
        raise ValueError('Initialization Failure for BNO055')
        sys.exit(1)
    time.sleep(1)
    sensor_bno.setExternalCrystalUse(True)
    time.sleep(2)
    measurement = {
        "tags": {
            "source": "imu"
        },
        "points": [{
            "measurement": "acceleration",
            "fields": {
                "liX": -10000,
                "liY": -10000,
                "liZ": -10000
            }
        }, {
            "measurement": "acceleration",
            "fields": {
                "gX": -10000,
                "gY": -10000,
                "gZ": -10000
            }
        }, {
            "measurement": "orientation",
            "fields": {
                "yaw": -10000
            }
        }, {
            "measurement": "orientation",
            "fields": {
                "pitch": -10000
            }
        }, {
            "measurement": "orientation",
            "fields": {
                "roll": -10000
            }
        }]
    }
    logger.info('reading sensor information')
    while True:
        try:
            timestamp = int(time.time() * 1e9)
            lx, ly, lz = sensor_bno.getVector(BNO055.VECTOR_LINEARACCEL)
            measurement['points'][0]['fields']['liX'] = lx
            measurement['points'][0]['fields']['liY'] = ly
            measurement['points'][0]['fields']['liZ'] = lz
            logger.debug('linear acc.: x:{}, y:{}, z:{}'.format(lx, ly, lz))

            gX, gY, gZ = sensor_bno.getVector(BNO055.VECTOR_GRAVITY)
            measurement['points'][1]['fields']['gX'] = gX
            measurement['points'][1]['fields']['gY'] = gY
            measurement['points'][1]['fields']['gZ'] = gZ
            logger.debug('gravity: x:{}, y:{}, z:{}'.format(gX, gY, gZ))

            yaw, roll, pitch = sensor_bno.getVector(BNO055.VECTOR_EULER)
            measurement['points'][2]['fields']['yaw'] = yaw
            measurement['points'][3]['fields']['pitch'] = pitch
            measurement['points'][4]['fields']['roll'] = roll
            logger.debug('euler: yaw:{}, pitch:{}, roll:{}'.format(
                yaw, pitch, roll))

            for point in measurement['points']:
                # insert timestamp to each point
                point['time'] = timestamp

            with concurrent.futures.ThreadPoolExecutor(
                    max_workers=2) as executor:
                if executor.submit(save_to_db, measurement).result():
                    logger.info('saved data to InfluxDB')
                if executor.submit(
                        publish_data,
                        line_protocol.make_lines(measurement,
                                                 precision='ns')).result():
                    logger.info('published data to MQTT broker')
                time.sleep(updaterate)
        except Exception as imu_e:
            logger.error('Error while reading IMU data: {}'.format(imu_e))
            client.close()
            sys.exit(2)
コード例 #21
0
    def export_to_line_protocol(self):
        """ Export object to InfluxDB Line Protocol syntax
        """
        """data = {
            "tags": {
                "empty_tag": "",
                "none_tag": None,
                "backslash_tag": "C:\\",
                "integer_tag": 2,
                "string_tag": "hello"
            },
            "points": [
                {
                    "measurement": "test",
                    "fields": {
                        "string_val": "hello!",
                        "int_val": 1,
                        "float_val": 1.1,
                        "none_field": None,
                        "bool_val": True,
                    },
                    "time": 0
                }
            ]
        }"""
        # Device
        self.add_tag(['device', 'mode'])
        self.add_tag(['device', 'state'])

        self.add_tags(['device', 'additionalData'], prefix=['device'])

        # Part
        self.add_tag(['part', 'id'])
        self.add_tag(['part', 'type'])
        self.add_tag(['part', 'typeId'])
        self.add_tag(['part', 'code'])
        self.add_tag(['part', 'result'])

        self.add_tags(['part', 'additionalData'], prefix=['part'])

        # TODO: Process

        # ProcessMeasurements
        for measurement in self.data['measurements']:
            # create new Measurement object to store local tags
            meas_obj = Measurement(json.dumps(measurement), self.hostname())

            meas_obj.add_tag(['code'], ['process'])
            meas_obj.add_tag(['name'], ['process'])
            meas_obj.add_tag(['phase'], ['process'])
            meas_obj.add_tag(['result'], ['process'])

            meas_obj.add_tags(['additionalData'], prefix=['process'])

            # TODO: context
            # TODO: specialValues

            timestamp = parser.parse(measurement['ts'])
            keys = list(
                filter(lambda key: key != 'time',
                       measurement['series'].keys()))

            self.points = []
            for index in range(0, len(measurement['series'][keys[0]])):
                fields = {
                    key.replace(' ', '_'): measurement['series'][key][index]
                    for key in keys
                    if key != '' and measurement['series'][key][index] != ''
                }

                # add offset to base timestamp
                ts_w_offset = timestamp + timedelta(
                    milliseconds=measurement['series']['time'][index])
                # round to InfluxDB compatible timestamp (nanoseconds)
                ts_w_offset = int(round(ts_w_offset.timestamp() * 1000000000))

                self.add_point(fields, ts_w_offset)

            # merge global ProcessPayload tags with current ProcessMeasurement tags
            current_tags = self.tags.copy()
            current_tags.update(meas_obj.tags)

            # add measurement in line_protocol format
            if len(fields) > 0:
                tmp = line_protocol.make_lines({
                    'tags': current_tags,
                    'points': self.points
                })
                self.line_protocol_data += tmp

        # return sequence of line protocol strings
        return self.line_protocol_data if self.line_protocol_data else None
コード例 #22
0
 def test_timezone(self):
     """Test timezone in TestLineProtocol object."""
     dt = datetime(2009, 11, 10, 23, 0, 0, 123456)
     utc = UTC.localize(dt)
     berlin = timezone('Europe/Berlin').localize(dt)
     eastern = berlin.astimezone(timezone('US/Eastern'))
     data = {
         "points": [
             {
                 "measurement": "A",
                 "fields": {
                     "val": 1
                 },
                 "time": 0
             },
             {
                 "measurement": "A",
                 "fields": {
                     "val": 1
                 },
                 "time": "2009-11-10T23:00:00.123456Z"
             },
             {
                 "measurement": "A",
                 "fields": {
                     "val": 1
                 },
                 "time": dt
             },
             {
                 "measurement": "A",
                 "fields": {
                     "val": 1
                 },
                 "time": utc
             },
             {
                 "measurement": "A",
                 "fields": {
                     "val": 1
                 },
                 "time": berlin
             },
             {
                 "measurement": "A",
                 "fields": {
                     "val": 1
                 },
                 "time": eastern
             },
         ]
     }
     self.assertEqual(
         line_protocol.make_lines(data), '\n'.join([
             'A val=1i 0',
             'A val=1i 1257894000123456000',
             'A val=1i 1257894000123456000',
             'A val=1i 1257894000123456000',
             'A val=1i 1257890400123456000',
             'A val=1i 1257890400123456000',
         ]) + '\n')
コード例 #23
0
 def test_request_log_useragent(self):
     lines = self.check_length('full-request-log-malicious-useragent.json', 2)
     make_lines({'points': lines})