コード例 #1
0
ファイル: influxdb.py プロジェクト: Crapworks/eve
class InfluxDBOutput(Thread):
    output_name = 'influxdb'

    def __init__(self, host, port, username, password, database, prefetch=10, proto='http'):
        Thread.__init__(self)
        self.prefetch = prefetch
        self.proto = proto
        self.host = host
        self.port = port

        if self.proto == 'http':
            self.client = InfluxDBClient(host, port, username, password, database)
        elif self.proto == 'udp':
            self.client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        else:
            raise ValueError('unknown protocol: %s' % (self.proto, ))

    def set_in_queue(self, queue):
        self._queue = queue

    def run(self):
        metrics = []
        while True:
            metrics += self._queue.get()

            if len(metrics) >= self.prefetch:
                try:
                    if self.proto == 'http':
                        self.client.write_points(metrics)
                    elif self.proto == 'udp':
                        self.client.sendto(json.dumps(metrics), (self.host, self.port))
                except Exception as err:
                    logger.warning('error while processing event: %s' % (str(err), ))
                finally:
                    metrics = []
コード例 #2
0
def do_influx_request(n_points, host,
                      database):  # type: (int, str, str) -> float
    conn = InfluxDBClient(host=host)
    conn.switch_database('p2')
    points = generate_fake_data(n_points)
    request_start_time = time.time()
    conn.write_points(points, time_precision='u', database=database)
    return time.time() - request_start_time
コード例 #3
0
ファイル: influx.py プロジェクト: sallyruthstruik/supermann
class InfluxOutput(BaseOutput):
    """
    Writes point into influxdb.

    Each system metrics will be written into separated measurement.
    All process metrics will be tagged by process_name and will be written into
    separated metrics.

    All points will be tagged with hostname.
    """
    section_name = "influx"

    hostname = socket.gethostname()
    processname_pattern = re.compile(r"^process:([^\:]+):(.*)")

    def init(self, **params):
        self.influx_client = InfluxDBClient(**params)

        supermann.utils.getLogger(self).info(
            "Using InfluxOutput with params %s", str(params))

    def __init__(self):
        super(InfluxOutput, self).__init__()
        self.bulk = []

    def flush(self):
        self.influx_client.write_points([p.to_influx() for p in self.bulk])

        self.bulk = []

    def event(self, **data):

        try:
            service = data["service"]
            metric = data["metric_f"]
        except:
            return

        if service.startswith("system"):
            self.bulk.append(
                Point(measurement=service,
                      point={"metric": metric},
                      tags={"hostname": self.hostname}))
        elif service.startswith("process"):
            processname, tail = self.processname_pattern.findall(service)[0]

            self.bulk.append(
                Point(measurement="process:{}".format(tail),
                      point={"metric": metric},
                      tags={
                          "hostname": self.hostname,
                          "process": processname
                      }))
コード例 #4
0
ファイル: models.py プロジェクト: raphi1790/robobee
class InfluxConnector:
    client: InfluxDBClient

    def __init__(self):
        load_dotenv()
        user=os.getenv("INFLUX_DB_USER")
        password=os.getenv("INFLUX_DB_PASSWORD")
        self.client = InfluxDBClient('localhost', 8086, user, password, 'pi_influxdb')
    
    def get_client(self):
        return self.client

    def write_point(self,influx_point):
        self.client.write_points([influx_point])
コード例 #5
0
class InfluxDBAdapter(object):
    def __init__(self, settings=None, database=None):

        settings = deepcopy(settings) or {}
        settings.setdefault('host', u'localhost')
        settings.setdefault('port', u'8086')
        settings.setdefault('username', u'root')
        settings.setdefault('password', u'root')
        settings.setdefault('database', database)

        settings.setdefault('use_udp', False)
        settings.setdefault('udp_port', u'4444')

        settings['port'] = int(settings['port'])
        settings['udp_port'] = int(settings['udp_port'])

        self.__dict__.update(**settings)

        # Bookeeping for all databases having been touched already
        self.databases_written_once = Set()

        # Knowledge about all databases to be accessed using UDP
        # TODO: Refactor to configuration setting
        self.udp_databases = [
            {
                'name': 'luftdaten_info',
                'port': u'4445'
            },
        ]
        self.host_uri = u'influxdb://{host}:{port}'.format(**self.__dict__)

        log.info(u'Storage target is {uri}', uri=self.host_uri)
        self.influx_client = InfluxDBClient(host=self.host,
                                            port=self.port,
                                            username=self.username,
                                            password=self.password,
                                            database=self.database,
                                            timeout=10)

        # TODO: Hold references to multiple UDP databases using mapping "self.udp_databases".
        self.influx_client_udp = None
        if settings['use_udp']:
            self.influx_client_udp = InfluxDBClient(
                host=self.host,
                port=self.port,
                username=self.username,
                password=self.password,
                use_udp=settings['use_udp'],
                udp_port=settings['udp_port'],
                timeout=10)

    def is_udp_database(self, name):
        for entry in self.udp_databases:
            if entry['name'] == name:
                return True
        return False

    def write(self, meta, data):

        meta_copy = deepcopy(dict(meta))
        data_copy = deepcopy(data)

        try:
            chunk = self.format_chunk(meta, data)

        except Exception as ex:
            log.failure(
                u'Could not format chunk (ex={ex_name}: {ex}): data={data}, meta={meta}',
                ex_name=ex.__class__.__name__,
                ex=ex,
                meta=meta_copy,
                data=data_copy)
            raise

        try:
            success = self.write_chunk(meta, chunk)
            return success

        except requests.exceptions.ConnectionError as ex:
            log.failure(u'Problem connecting to InfluxDB at {uri}: {ex}',
                        uri=self.host_uri,
                        ex=ex)
            raise

        except InfluxDBClientError as ex:

            if ex.code == 404 or ex.message == 'database not found':

                log.info('Creating database "{database}"',
                         database=meta.database)
                self.influx_client.create_database(meta.database)

                # Attempt second write
                success = self.write_chunk(meta, chunk)
                return success

                #log.failure('InfluxDBClientError: {ex}', ex=ex)

            # [0.8] ignore "409: database kotori-dev exists"
            # [0.9] ignore "database already exists"
            elif ex.code == 409 or ex.message == 'database already exists':
                pass
            else:
                raise

    def write_chunk(self, meta, chunk):
        if self.influx_client_udp and self.is_udp_database(
                meta.database
        ) and meta.database in self.databases_written_once:
            success = self.influx_client_udp.write_points(
                [chunk], time_precision='s', database=meta.database)
        else:
            success = self.influx_client.write_points(
                [chunk],
                time_precision=chunk['time_precision'],
                database=meta.database)
            self.databases_written_once.add(meta.database)
        if success:
            log.debug(u"Storage success: {chunk}", chunk=chunk)
        else:
            log.error(u"Storage failed:  {chunk}", chunk=chunk)
        return success

    @staticmethod
    def get_tags(data):
        return project(data, ['gateway', 'node'])

    def format_chunk(self, meta, data):
        """
        Format for InfluxDB >= 0.9::
        {
            "measurement": "hiveeyes_100",
            "tags": {
                "host": "server01",
                "region": "europe"
            },
            "time": "2015-10-17T19:30:00Z",
            "fields": {
                "value": 0.42
            }
        }
        """

        assert isinstance(data, dict), 'Data payload is not a dictionary'

        chunk = {
            "measurement": meta['measurement'],
            "tags": {},
        }
        """
        if "gateway" in meta:
            chunk["tags"]["gateway"] = meta["gateway"]

        if "node" in meta:
            chunk["tags"]["node"]    = meta["node"]
        """

        # Extract timestamp field from data
        chunk['time_precision'] = 'n'
        for time_field in ['time', 'datetime', 'dateTime']:
            if time_field in data:

                # WeeWX. TODO: Move to specific vendor configuration.
                # Disabled in favor of precision detection heuristic.
                #if time_field == 'dateTime':
                #    chunk['time_precision'] = 's'

                # Process timestamp field.
                if data[time_field]:

                    # Decode timestamp.
                    chunk['time'] = data[time_field]
                    if is_number(chunk['time']):
                        chunk['time'] = int(float(chunk['time']))

                    # Remove timestamp from data payload.
                    del data[time_field]

                    # If we found a timestamp field already,
                    # don't look out for more.
                    break

        # Extract geohash from data. Finally, thanks Rich!
        # TODO: Also precompute geohash with 3-4 different zoomlevels and add them as tags
        if "geohash" in data:
            chunk["tags"]["geohash"] = data["geohash"]
            del data['geohash']

        if "latitude" in data and "longitude" in data:
            chunk["tags"]["latitude"] = data["latitude"]
            chunk["tags"]["longitude"] = data["longitude"]
            del data['latitude']
            del data['longitude']

        # Extract more information specific to luftdaten.info
        for field in [
                'location', 'location_id', 'location_name', 'sensor_id',
                'sensor_type'
        ]:
            if field in data:
                chunk["tags"][field] = data[field]
                del data[field]

        # TODO: Maybe do this at data acquisition / transformation time, not here.
        if 'time' in chunk:
            timestamp = chunk['time'] = parse_timestamp(chunk['time'])

            # Heuristically compute timestamp precision
            if isinstance(timestamp, int):
                if timestamp >= 1e17 or timestamp <= -1e17:
                    time_precision = 'n'
                elif timestamp >= 1e14 or timestamp <= -1e14:
                    time_precision = 'u'
                elif timestamp >= 1e11 or timestamp <= -1e11:
                    time_precision = 'ms'

                # FIXME: Is this a reasonable default?
                else:
                    time_precision = 's'

                chunk['time_precision'] = time_precision
            """
            # FIXME: Breaks CSV data acquisition. Why?
            if isinstance(chunk['time'], datetime.datetime):
                if chunk['time'].microsecond == 0:
                    chunk['time_precision'] = 's'
            """
        """
        Prevent errors like
        ERROR: InfluxDBClientError: 400:
                       write failed: field type conflict:
                       input field "pitch" on measurement "01_position" is type float64, already exists as type integer
        """
        self.data_to_float(data)

        assert data, 'Data payload is empty'

        chunk["fields"] = data

        return chunk

    def data_to_float(self, data):
        return convert_floats(data)

        for key, value in data.iteritems():

            # Sanity checks
            if type(value) in types.StringTypes:
                continue

            if value is None:
                data[key] = None
                continue

            # Convert to float
            try:
                data[key] = float(value)
            except (TypeError, ValueError) as ex:
                log.warn(
                    u'Measurement "{key}: {value}" float conversion failed: {ex}',
                    key=key,
                    value=value,
                    ex=ex)
コード例 #6
0
ファイル: influxdb.py プロジェクト: alanstlc/jobslib
class InfluxDBMetrics(BaseMetrics):
    """
    InfluxDB metrics implementation.

    For use of :class:`InfluxDBMetrics` write into :mod:`settings`:

    .. code-block:: python

        METRICS = {
            'backend': 'jobslib.metrics.influxdb.InfluxDBMetrics',
            'options': {
                'host': 'hostname',
                'port': 8086,
                'username': '******',
                'password': '******',
                'database': 'dbname',
            },
        }

    Or use
    :envvar:`JOBSLIB_METRICS_INFLUXDB_HOST`,
    :envvar:`JOBSLIB_METRICS_INFLUXDB_PORT`,
    :envvar:`JOBSLIB_METRICS_INFLUXDB_USERNAME`,
    :envvar:`JOBSLIB_METRICS_INFLUXDB_PASSWORD` and
    :envvar:`JOBSLIB_METRICS_INFLUXDB_DBNAME` environment variables.
    """
    class OptionsConfig(ConfigGroup):
        """
        Consul liveness options.
        """
        @option(required=True, attrtype=str)
        def host(self):
            """
            InfluxDB host
            """
            host = os.environ.get('JOBSLIB_METRICS_INFLUXDB_HOST')
            if host:
                return host
            return self._settings.get('host', 'localhost')

        @option(attrtype=int)
        def port(self):
            """
            InfluxDB port
            """
            port = os.environ.get('JOBSLIB_METRICS_INFLUXDB_PORT')
            if port:
                return int(port)
            return self._settings.get('port', 8086)

        @option(required=True, attrtype=str)
        def username(self):
            """
            InfluxDB username
            """
            username = os.environ.get('JOBSLIB_METRICS_INFLUXDB_USERNAME')
            if username:
                return username
            return self._settings.get('username', 'root')

        @option(required=True, attrtype=str)
        def password(self):
            """
            InfluxDB password
            """
            password = os.environ.get('JOBSLIB_METRICS_INFLUXDB_PASSWORD')
            if password:
                return password
            return self._settings.get('password', 'root')

        @option(attrtype=str)
        def database(self):
            """
            InfluxDB database
            """
            database = os.environ.get('JOBSLIB_METRICS_INFLUXDB_DBNAME')
            if database:
                return database
            return self._settings['database']

    def __init__(self, context, options):
        super().__init__(context, options)
        self._influxdb = InfluxDBClient(
            host=self.options.host,
            port=self.options.port,
            username=self.options.username,
            password=self.options.password,
            database=self.options.database,
        )

    def push(self, metrics):
        current_dt = datetime.datetime.utcfromtimestamp(time.time())
        ts = current_dt.strftime('%Y-%m-%dT%H:%M:%SZ')
        task_name = self.context.config.task_class.name
        try:
            points = []
            for metric_name, metric_value in metrics.items():
                tags = {
                    'task': task_name,
                }
                for k, v in metric_value.get('tags', {}).items():
                    if k in tags:
                        raise Exception("Tag '{}' is reserved".format(k))
                    tags[k] = v
                metric = {
                    'measurement': metric_name,
                    'tags': tags,
                    'time': ts,
                    'fields': {
                        'value': float(metric_value['value']),
                    },
                }
                points.append(metric)
            self._influxdb.write_points(points)
        except Exception:
            logger.exception('Push monitoring metrics into InfluxDb failed')
コード例 #7
0
ファイル: influx.py プロジェクト: zerotired/kotori
class InfluxDBAdapter(object):

    def __init__(self, settings=None, database=None):

        settings = deepcopy(settings) or {}
        settings.setdefault('host', u'localhost')
        settings.setdefault('port', u'8086')
        settings.setdefault('username', u'root')
        settings.setdefault('password', u'root')
        settings.setdefault('database', database)

        settings.setdefault('use_udp', False)
        settings.setdefault('udp_port', u'4444')

        settings['port'] = int(settings['port'])
        settings['udp_port'] = int(settings['udp_port'])

        self.__dict__.update(**settings)

        # Bookeeping for all databases having been touched already
        self.databases_written_once = Set()

        # Knowledge about all databases to be accessed using UDP
        # TODO: Refactor to configuration setting
        self.udp_databases = [
            {'name': 'luftdaten_info', 'port': u'4445'},
        ]
        self.host_uri = u'influxdb://{host}:{port}'.format(**self.__dict__)

        log.info(u'Storage target is {uri}', uri=self.host_uri)
        self.influx_client = InfluxDBClient(
            host=self.host, port=self.port,
            username=self.username, password=self.password,
            database=self.database,
            timeout=10)

        # TODO: Hold references to multiple UDP databases using mapping "self.udp_databases".
        self.influx_client_udp = None
        if settings['use_udp']:
            self.influx_client_udp = InfluxDBClient(
                host=self.host, port=self.port,
                username=self.username, password=self.password,
                use_udp=settings['use_udp'], udp_port=settings['udp_port'],
                timeout=10)

    def is_udp_database(self, name):
        for entry in self.udp_databases:
            if entry['name'] == name:
                return True
        return False

    def write(self, meta, data):

        meta_copy = deepcopy(dict(meta))
        data_copy = deepcopy(data)

        try:
            chunk = self.format_chunk(meta, data)

        except Exception as ex:
            log.failure(u'Could not format chunk (ex={ex_name}: {ex}): data={data}, meta={meta}',
                ex_name=ex.__class__.__name__, ex=ex, meta=meta_copy, data=data_copy)
            raise

        try:
            success = self.write_chunk(meta, chunk)
            return success

        except requests.exceptions.ConnectionError as ex:
            log.failure(u'Problem connecting to InfluxDB at {uri}: {ex}', uri=self.host_uri, ex=ex)
            raise

        except InfluxDBClientError as ex:

            if ex.code == 404 or ex.message == 'database not found':

                log.info('Creating database "{database}"', database=meta.database)
                self.influx_client.create_database(meta.database)

                # Attempt second write
                success = self.write_chunk(meta, chunk)
                return success

                #log.failure('InfluxDBClientError: {ex}', ex=ex)

            # [0.8] ignore "409: database kotori-dev exists"
            # [0.9] ignore "database already exists"
            elif ex.code == 409 or ex.message == 'database already exists':
                pass
            else:
                raise

    def write_chunk(self, meta, chunk):
        if self.influx_client_udp and self.is_udp_database(meta.database) and meta.database in self.databases_written_once:
            success = self.influx_client_udp.write_points([chunk], time_precision='s', database=meta.database)
        else:
            success = self.influx_client.write_points([chunk], time_precision=chunk['time_precision'], database=meta.database)
            self.databases_written_once.add(meta.database)
        if success:
            log.debug(u"Storage success: {chunk}", chunk=chunk)
        else:
            log.error(u"Storage failed:  {chunk}", chunk=chunk)
        return success

    @staticmethod
    def get_tags(data):
        return project(data, ['gateway', 'node'])

    def format_chunk(self, meta, data):
        """
        Format for InfluxDB >= 0.9::
        {
            "measurement": "hiveeyes_100",
            "tags": {
                "host": "server01",
                "region": "europe"
            },
            "time": "2015-10-17T19:30:00Z",
            "fields": {
                "value": 0.42
            }
        }
        """

        assert isinstance(data, dict), 'Data payload is not a dictionary'

        chunk = {
            "measurement": meta['measurement'],
            "tags": {},
        }

        """
        if "gateway" in meta:
            chunk["tags"]["gateway"] = meta["gateway"]

        if "node" in meta:
            chunk["tags"]["node"]    = meta["node"]
        """

        # Extract timestamp field from data
        chunk['time_precision'] = 'n'
        for time_field in ['time', 'datetime', 'dateTime']:
            if time_field in data:

                # WeeWX. TODO: Move to specific vendor configuration.
                # Disabled in favor of precision detection heuristic.
                #if time_field == 'dateTime':
                #    chunk['time_precision'] = 's'

                # Process timestamp field.
                if data[time_field]:

                    # Decode timestamp.
                    chunk['time'] = data[time_field]
                    if is_number(chunk['time']):
                        chunk['time'] = int(float(chunk['time']))

                    # Remove timestamp from data payload.
                    del data[time_field]

                    # If we found a timestamp field already,
                    # don't look out for more.
                    break

        # Extract geohash from data. Finally, thanks Rich!
        # TODO: Also precompute geohash with 3-4 different zoomlevels and add them as tags
        if "geohash" in data:
            chunk["tags"]["geohash"] = data["geohash"]
            del data['geohash']

        # Extract more information specific to luftdaten.info
        for field in ['location', 'location_id', 'location_name', 'sensor_id', 'sensor_type']:
            if field in data:
                chunk["tags"][field] = data[field]
                del data[field]

        # TODO: Maybe do this at data acquisition / transformation time, not here.
        if 'time' in chunk:
            timestamp = chunk['time'] = parse_timestamp(chunk['time'])

            # Heuristically compute timestamp precision
            if isinstance(timestamp, int):
                if timestamp >= 1e17 or timestamp <= -1e17:
                    time_precision = 'n'
                elif timestamp >= 1e14 or timestamp <= -1e14:
                    time_precision = 'u'
                elif timestamp >= 1e11 or timestamp <= -1e11:
                    time_precision = 'ms'

                # FIXME: Is this a reasonable default?
                else:
                    time_precision = 's'

                chunk['time_precision'] = time_precision

            """
            # FIXME: Breaks CSV data acquisition. Why?
            if isinstance(chunk['time'], datetime.datetime):
                if chunk['time'].microsecond == 0:
                    chunk['time_precision'] = 's'
            """

        """
        Prevent errors like
        ERROR: InfluxDBClientError: 400:
                       write failed: field type conflict:
                       input field "pitch" on measurement "01_position" is type float64, already exists as type integer
        """
        self.data_to_float(data)

        assert data, 'Data payload is empty'

        chunk["fields"] = data

        return chunk

    def data_to_float(self, data):
        return convert_floats(data)

        for key, value in data.iteritems():

            # Sanity checks
            if type(value) in types.StringTypes:
                continue

            if value is None:
                data[key] = None
                continue

            # Convert to float
            try:
                data[key] = float(value)
            except (TypeError, ValueError) as ex:
                log.warn(u'Measurement "{key}: {value}" float conversion failed: {ex}', key=key, value=value, ex=ex)
コード例 #8
0
def update_data(client: InfluxDBClient,
                measurement: str,
                tag_info: dict,
                time: str,
                fields: dict,
                keep=True):
    if keep == False:
        raise NotImplementedError

    sql_query = (f"SELECT *::field FROM {measurement} "
                 f"WHERE time='{time}' ")
    for key, value in tag_info.items():
        sql_query = sql_query + f" and {key}='{value}'"

    # print(f"sql_query = {sql_query}")

    results = client.query(sql_query)
    # print(f"results: {results}")
    points = results.get_points()
    # print(f"points: {points}")
    size = 0
    for _point in points:
        size += 1
        if size == 1:
            point = _point
    if size == 0:
        print("sql query returns empty result set")
        raise Exception
    # print(f"point = {point}")
    ''' update point
        point sample:
        point = {
            'time': '2021-01-23T15:58:30.750000Z', 
            'acceleration': 0.005427043900637734, 
            'gyroscope': 0.009184479105195894, 
            'latitude': 31.143920631980222, 
            'longitude': 121.36646195555223, 
            'velocity': 0.0
        }
    '''

    updated_point = {
        "measurement": measurement,
        "time": time,
        "tags": tag_info,
    }
    updated_fields = {}

    # update fields for old point
    for key, value in point.items():
        if key == 'time':
            continue
        if key in fields.keys():
            updated_fields[key] = fields[key]
        else:
            updated_fields[key] = value
    # add fields who does not exist in old point
    for key, value in fields.items():
        if key not in point.keys():
            updated_fields[key] = value

    updated_point["fields"] = updated_fields
    body = [updated_point]
    # print(body)

    client.write_points(body)
コード例 #9
0
class Grafana(ExtendedPlugin):
    """
    Log events to Grafana.
    """
    enabled = False
    score = 480  # Before email

    def configure(self, options, noseconfig):
        """ Call the super and then validate and call the relevant parser for
        the configuration file passed in """
        super(Grafana, self).configure(options, noseconfig)

        self.context = ContextHelper()
        cfgifc = self.context.get_config()
        self.duts = options.get(
            'duts',
            cfgifc.config.get('plugins', {}).get('_default_', {}))
        self.first_time = True
        o = options.influxdb
        if o:
            self.client = InfluxDBClient(o.host, o.port, o.user, o.password,
                                         o.db)

    def setup_snmp_bigiq(self, devices):
        for device in devices:
            if device.kind != KIND_TMOS_BIGIQ:
                continue
            rstifc = self.context.get_icontrol_rest(device=device)

            payload = SnmpInbound(contactInformation='', machineLocation='')
            address = {'address': '0.0.0.0', 'mask': '0.0.0.0'}
            payload.clientAllowList.append(address)
            rstifc.api.put(SnmpInbound.URI,
                           payload=payload)  # @UndefinedVariable

            payload = SnmpV1V2cAccessRecords()
            payload.update(community='public', addressType='IPv4')
            rstifc.api.post(SnmpV1V2cAccessRecords.URI, payload=payload)

    def startTest(self, test, blocking_context=None):
        if self.first_time:
            self.setup_snmp_bigiq(expand_devices(self.duts))
        self.first_time = False

    def graphite_start(self):
        LOG.info('Graphite url: %s', self.options.graphite_url)
        context = ContextHelper()
        session = self.context.get_config().get_session()
        with context.get_rest(url=self.options.graphite_url) as rstifc:
            payload = AttrDict()
            payload.what = 'Test run started!'
            payload.tags = "testrun start"
            payload.data = "Run logs: %s" % session.get_url()
            rstifc.api.post('/events/', payload=payload)

    def now_as_timestamp(self):
        now = datetime.datetime.now()
        return int(time.mktime(now.timetuple()) * 1e3 + now.microsecond / 1e3)

    def graphite_stop(self, result):
        context = ContextHelper()
        self.options.stop_time = self.now_as_timestamp()
        session = self.context.get_config().get_session()
        result_text = "Total: %d, Fail: %d" % \
            (result.testsRun - result.notFailCount(),
             result.failCount())
        with context.get_rest(url=self.options.graphite_url) as rstifc:
            payload = AttrDict()
            payload.what = 'Test run stopped!'
            payload.tags = "testrun stop"
            payload.data = "Run logs: %s %s" % (session.get_url(), result_text)
            rstifc.api.post('/events/', payload=payload)

    def influxdb_start(self):
        value = "run started"
        session = self.context.get_config().get_session()
        self.options.start_time = self.now_as_timestamp()
        series = []
        for dut in expand_devices(self.duts):
            with self.context.get_icontrol_rest(device=dut) as rstifc:
                info = rstifc.api.get(DeviceInfo.URI)
                point = {
                    "measurement": 'events',
                    'fields': {
                        'value': value,
                    },
                    'tags': {
                        "name": "start",
                        #"host": info.managementAddress,
                        "harness": session.get_harness_id(),
                        "host": dut.address,
                        "machine": info.machineId,
                        "run": session.session,
                        "url": session.get_url(),
                    },
                }
                series.append(point)
        self.client.write_points(series)

    def influxdb_stop(self, result):
        value = "Total: %d, Fail: %d" % \
            (result.testsRun - result.notFailCount(),
             result.failCount())
        self.options.stop_time = self.now_as_timestamp(
        ) + 5000  # 5 second padding
        session = self.context.get_config().get_session()
        series = []
        for dut in expand_devices(self.duts):
            with self.context.get_icontrol_rest(device=dut) as rstifc:
                info = rstifc.api.get(DeviceInfo.URI)
                point = {
                    "measurement": 'events',
                    'fields': {
                        'value': value,
                        'total': result.testsRun - result.notFailCount(),
                        'failed': result.failCount()
                    },
                    'tags': {
                        "name": "stop",
                        #"host": info.managementAddress,
                        "harness": session.get_harness_id(),
                        "host": dut.address,
                        "machine": info.machineId,
                        "run": session.session,
                        "url": session.get_url(),
                    },
                }
                series.append(point)
        self.client.write_points(series)

    def begin(self):
        #self.graphite_start()
        self.influxdb_start()

    def finalize(self, result):
        self.influxdb_stop(result)