Esempio n. 1
1
def main(host='localhost', port=8086):
    """Instantiate a connection to the InfluxDB."""
    user = '******'
    password = '******'
    dbname = 'example'
    dbuser = '******'
    dbuser_password = '******'
    query = 'select Float_value from cpu_load_short;'
    query_where = 'select Int_value from cpu_load_short where host=$host;'
    bind_params = {'host': 'server01'}
    json_body = [
        {
            "measurement": "cpu_load_short",
            "tags": {
                "host": "server01",
                "region": "us-west"
            },
            "time": "2009-11-10T23:00:00Z",
            "fields": {
                "Float_value": 0.64,
                "Int_value": 3,
                "String_value": "Text",
                "Bool_value": True
            }
        }
    ]

    client = InfluxDBClient(host, port, user, password, dbname)

    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Create a retention policy")
    client.create_retention_policy('awesome_policy', '3d', 3, default=True)

    print("Switch user: "******"Write points: {0}".format(json_body))
    client.write_points(json_body)

    print("Querying data: " + query)
    result = client.query(query)

    print("Result: {0}".format(result))

    print("Querying data: " + query_where)
    result = client.query(query_where, bind_params=bind_params)

    print("Result: {0}".format(result))

    print("Switch user: "******"Drop database: " + dbname)
    client.drop_database(dbname)
Esempio n. 2
0
class IdmConnection(object):

    def __init__(self, user, pswd, host='localhost', port=8086, db='metrics'):
        self.host = host
        self.port = int(port)
        self.client = InfluxDBClient(self.host, self.port, user, pswd, db)
        self.client.create_database(db)
        self.globalLabels = {}


    def addGlobalLabel(self, name, value):
        """
        Add one label with value into global labels.
        """
        self.globalLabels[name] = value


    def setGlobalLabels(self, labels, append=False):
        """
        Set global labels for this metric.

        Default clear previous labels, unless append is True
        """
        if not append:
            self.globalLabels = {}
        self.globalLabels.update(labels)


    def metric(self, name):
        return Metric(name, self.client, self.globalLabels.copy())
def main(host='localhost', port=8086, nb_day=15):

    nb_day = 15  # number of day to generate time series
    timeinterval_min = 5  # create an event every x minutes
    total_minutes = 1440 * nb_day
    total_records = int(total_minutes / timeinterval_min)
    now = datetime.datetime.today()
    cpu_series = [{
        'name':    "server_data.cpu_idle",
        'columns': ["time", "value", "hostName"],
        'points':  []
    }]

    for i in range(0, total_records):
        past_date = now - datetime.timedelta(minutes=i * timeinterval_min)
        value = random.randint(0, 200)
        hostName = "server-%d" % random.randint(1, 5)
        pointValues = [int(past_date.strftime('%s')), value, hostName]
        cpu_series[0]['points'].append(pointValues)

    client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME)

    print("Create database: " + DBNAME)
    client.create_database(DBNAME)

    print("Write points #: {0}".format(total_records))
    client.write_points(cpu_series)

    query = 'SELECT MEAN(value) FROM server_data.cpu_idle GROUP BY time(30m) WHERE time > now() - 1d;'
    print("Queying data: " + query)
    result = client.query(query)
    print("Result: {0}".format(result))

    print("Delete database: " + DBNAME)
    client.delete_database(DBNAME)
Esempio n. 4
0
class InfluxDatabase(Database):
    def __init__(self, props, database):
        super().__init__(database)

        host = props.get('host', 'localhost')
        port = props.get('port', 8086)
        self.client = InfluxDBClient(host=host, port=port)

        self.client.create_database(self.database)
        self.client.switch_database(self.database)

    def now(self):
        return super().now()

    def _write_all_points(self, points, align_points=False):
        if align_points:
            time = self.now()
            for p in points:
                p['time'] = time

        self.client.write_points(points)

    def _point(self, name, fields, tags={}):
        return {
            'measurement': name,
            'time': self.now(),
            'tags': tags,
            'fields': fields
        }

    def time_format(self):
        return '%Y-%m-%dT%H:%M:%SZ'
def main(args=None):
    parser = ArgumentParser(usage="Usage: %(prog)s [options]", description="query datacatalog")
    parser.add_argument("-H","--host",dest='host',help="hostname of influxdb instance")
    parser.add_argument("-u","--user",dest="user",help="username")
    parser.add_argument("-p","--password",dest="pw",help="password")
    parser.add_argument("-P","--port",dest="port",type=int,default=8086,help="influxdb ingest port")
    parser.add_argument("-n","--dbname", dest="dbname",help="name of DB to store data in.")
    parser.add_argument("-d", "--dry", dest="dry", action="store_true", default=False, help="do not report results to grafana")
    parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", default=False, help="verbose mode")
    opts = parser.parse_args(args)
    json_bdy = []
    for site in batchSites:
        status_dict = {key: 0 for key in statii}
        stats = JobInstance.objects.filter(site=site).item_frequencies("status")
        status_dict.update(stats)
        for stat, freq in status_dict.iteritems():
            json_bdy.append(__makeEntry__(stat, site, freq))
    print 'found %i measurements to add'%len(json_bdy)
    pp = PrettyPrinter(indent=2)
    if opts.verbose: pp.pprint(json_bdy)
    if opts.dry:
        return
    if influxdb:
        client = InfluxDBClient(opts.host,opts.port,opts.user,opts.pw,opts.dbname)
        client.create_database(opts.dbname)
        ret = client.write_points(json_bdy)
        if not ret: 
            try:
                raise Exception("Could not write points to DB")
            except Exception:
                print_exc()
                sys_exit(int(ret))
Esempio n. 6
0
 def write(self, repository, counters,
           current_time=int(time.time())):
     client = InfluxDBClient(self.host, self.port, self.user, self.password, None, True, False)
     print client.get_list_database()
     if {"name": self.database} not in client.get_list_database():
         client.create_database(self.database)
     client.switch_database(self.database)
     ts = time.time()
     st = datetime.datetime.utcfromtimestamp(ts).strftime('%Y-%m-%dT%H:%M:%SZ')
     json_body = [{
                     "measurement": "time",
                     "tags": {
                       "host": "cvm-perf01",
                       "region": repository,
                     },
                     "time": st,
                     "fields": {
                       "value": current_time
                     }
                  }]
     client.write_points(json_body)
     
     for counter in counters.values():
         json_body[0]["measurement"] = counter.name
         json_body[0]["fields"]["value"] = int(counter.avg())
         client.write_points(json_body)
Esempio n. 7
0
    def send_to_influx(self):
        host = self.get_config("hostname")
        if not host:
            self.log.error("No InfluxDB server configured, please set"
                           "`hostname` configuration key.")
            return

        port = int(self.get_config("port", default="8086"))
        database = self.get_config("database", default="ceph")

        # If influx server has authentication turned off then
        # missing username/password is valid.
        username = self.get_config("username", default="")
        password = self.get_config("password", default="")

        client = InfluxDBClient(host, port, username, password, database)

        # using influx client get_list_database requires admin privs, instead we'll catch the not found exception and inform the user if db can't be created
        try:
            client.write_points(self.get_df_stats(), 'ms')
            client.write_points(self.get_daemon_stats(), 'ms')
        except InfluxDBClientError as e:
            if e.code == 404:
                self.log.info("Database '{0}' not found, trying to create (requires admin privs).  You can also create manually and grant write privs to user '{1}'".format(database,username))
                client.create_database(database)
            else:
                raise
Esempio n. 8
0
def public_metrics_into_influxdb():
    ping, download, upload = get_internet_measure()
    current_time = get_time()

    net_json_body = [
        {
        "measurement": "internet_measure",
        "tags": {
            "host": "desktop-lab",
            "contract": "Vivo Fibra"
            },
        "time": str(current_time),
        "fields": {
            "ping": ping,
            "download":download,
            "upload": upload
            }
        }
    ]

    print ("\n=========" )
    print ("Inserting: \n" + str(net_json_body))

    influxdb_host = getting_env_container_hosts()
    client = InfluxDBClient(influxdb_host, 8086, 'root', 'root', 'internet_measure')
    client.create_database('internet_measure')
    client.write_points(net_json_body)
    print ("=========\n")   
def connect_db(host, port, user, password, dbname):
    try:
        client = InfluxDBClient(host, port, user, password)
        database = client.get_list_database()
        ''' client.get_list_database()
            e.g. [{u'name': u'_internal'}, {u'name': u'monasca'}]
            <type 'list'>
        '''
        db_exist = False
        for current_dbname in database:
            item = current_dbname
            if dbname in (item[u'name']):
                print("Database: %s is exist, switch database to %s") % (dbname, dbname)
                db_exist = True
                client.switch_database(dbname)
                print("DB connected")
                return client
        if not db_exist:
            print("DB is not exist, trying to create database.....")
            client.create_database(dbname)
            print("DB %s created, trying to switch database") % dbname
            client.switch_database(dbname)
            return client
    except influxdb.client.InfluxDBClientError as e:
        raise Exception(str(e))
Esempio n. 10
0
 def create_influxdb_database(self):
     db = "ns_" + str(self.id)
     client = InfluxDBClient(**INFLUXDB['default'])
     print("Create database: " + db)
     client.create_database(db)
     print("Add grants")
     client.grant_privilege("all", db, self.operator.name)
Esempio n. 11
0
    def send_to_influx(self):
        if not self.config['hostname']:
            self.log.error("No Influx server configured, please set one using: "
                           "ceph influx config-set hostname <hostname>")
            return

        # If influx server has authentication turned off then
        # missing username/password is valid.
        self.log.debug("Sending data to Influx host: %s",
                       self.config['hostname'])
        client = InfluxDBClient(self.config['hostname'], self.config['port'],
                                self.config['username'],
                                self.config['password'],
                                self.config['database'],
                                self.config['ssl'],
                                self.config['verify_ssl'])

        # using influx client get_list_database requires admin privs,
        # instead we'll catch the not found exception and inform the user if
        # db can not be created
        try:
            client.write_points(self.get_df_stats(), 'ms')
            client.write_points(self.get_daemon_stats(), 'ms')
        except InfluxDBClientError as e:
            if e.code == 404:
                self.log.info("Database '%s' not found, trying to create "
                              "(requires admin privs).  You can also create "
                              "manually and grant write privs to user "
                              "'%s'", self.config['database'],
                              self.config['username'])
                client.create_database(self.config['database'])
            else:
                raise
Esempio n. 12
0
    def send_to_influx(self):
        if not self.config['hostname']:
            self.log.error("No Influx server configured, please set one using: "
                           "ceph influx config-set hostname <hostname>")
            self.set_health_checks({
                'MGR_INFLUX_NO_SERVER': {
                    'severity': 'warning',
                    'summary': 'No InfluxDB server configured',
                    'detail': ['Configuration option hostname not set']
                }
            })
            return

        # If influx server has authentication turned off then
        # missing username/password is valid.
        self.log.debug("Sending data to Influx host: %s",
                       self.config['hostname'])
        client = InfluxDBClient(self.config['hostname'], self.config['port'],
                                self.config['username'],
                                self.config['password'],
                                self.config['database'],
                                self.config['ssl'],
                                self.config['verify_ssl'])

        # using influx client get_list_database requires admin privs,
        # instead we'll catch the not found exception and inform the user if
        # db can not be created
        try:
            client.write_points(self.get_df_stats(), 'ms')
            client.write_points(self.get_daemon_stats(), 'ms')
            self.set_health_checks(dict())
        except ConnectionError as e:
            self.log.exception("Failed to connect to Influx host %s:%d",
                               self.config['hostname'], self.config['port'])
            self.set_health_checks({
                'MGR_INFLUX_SEND_FAILED': {
                    'severity': 'warning',
                    'summary': 'Failed to send data to InfluxDB server at %s:%d'
                               ' due to an connection error'
                               % (self.config['hostname'], self.config['port']),
                    'detail': [str(e)]
                }
            })
        except InfluxDBClientError as e:
            if e.code == 404:
                self.log.info("Database '%s' not found, trying to create "
                              "(requires admin privs).  You can also create "
                              "manually and grant write privs to user "
                              "'%s'", self.config['database'],
                              self.config['username'])
                client.create_database(self.config['database'])
            else:
                self.set_health_checks({
                    'MGR_INFLUX_SEND_FAILED': {
                        'severity': 'warning',
                        'summary': 'Failed to send data to InfluxDB',
                        'detail': [str(e)]
                    }
                })
                raise
def main():
  parser = argparse.ArgumentParser(description='whisper file to influxDB migration script')
  parser.add_argument('path', help='path to whispers')
  parser.add_argument('-host', default=DEFAULT_HOST, metavar="host", help="influxDB host")
  parser.add_argument('-port', default=DEFAULT_PORT, metavar="port", help="influxDB port")
  parser.add_argument('-user', default=DEFAULT_USER, metavar="user", help="influxDB user")
  parser.add_argument('-password', default=DEFAULT_PASSWORD, metavar="password", help="influxDB password")
  parser.add_argument('-db', default=DEFAULT_DB, metavar="db", help="influxDB db!")
  args = parser.parse_args()
  client = InfluxDBClient(args.host, args.port, args.user, args.password, args.db)
  try: client.create_database(args.db)
  except: pass
  for whisper_file in  search(args.path):
    data = lame_whisper_read(whisper_file) 
    value = whisper_file.split('/')[-1].split('.')[0]
    time_series = '.'.join(get_path_list(whisper_file)[:-1])
    for key in data.iterkeys():
      time = float(key)
      #value = whisper_file.split('/')[-1].split('.')[0]
      #time_series = whisper_file.replace('//','/').split('/')[-2].split('/')[-1]
      #print time_series, value, time, data[key]
      #time_info, values =  whisper_read(whisper_file)
      client.write_points(
        [{
          "name":time_series,
          "columns":["time",value],
          "points":[[time,data[key]]]
        }])
def detect(runs, host, port):

    results = []
    # ZEUS malicious traffic detection (by looking at Domain)
    # Cryptowall malicious POST request detection (by looking at URL)
    # Exploit Kit malicious GET request detection (by looking at URL)
    # Ponmuocup periodic malicious GET request detection (by looking at URL)

    # connect to current traffic DB:
    client = InfluxDBClient(host, port, database="Traffic_{}".format(runs))

    # query
    zeus = client.query("select * from http where Domain =~ /[a-z0-9]{32,48}.(info|biz|ru|com|org|net)/")
    crypto_exploit = client.query(
        "select * from http where (URL =~ /.\.php.*?./ and action = 'POST') or (URL =~ /^[0-9][0-9]\.[0-9][0-9]\.[0-9][0-9][0-9]\.[0-9][0-9][0-9]\/^?/ and action = 'GET')"
    )
    ponmo = client.query("SELECT * FROM http WHERE URL =~ /\/complete.search/ and action = 'GET'")

    # analyze
    results = predict_mal(zeus, crypto_exploit, ponmo)

    # write analysis results to DB
    client = InfluxDBClient(host, port, database="Detect_{}".format(runs))
    client.create_database("Detect_{}".format(runs))

    client.write_points(results)
Esempio n. 15
0
def add_measurement_batch(meas_list, filename):
    client1 = InfluxDBClient('localhost', 8086, 'root', 'root', DATABASE_NAME)
    if DATABASE_NAME not in client.get_list_database():
        client1.create_database(DATABASE_NAME)
    client1.switch_user(DB_USER, DB_USER_PASSWORD)

    client1.write_points(meas_list, batch_size=len(meas_list),
                         time_precision='ms', protocol='line')
Esempio n. 16
0
class SkynetInflux:

    def __init__(self):

        # TODO: make this configurable.
        # connect to the influx server
        self.client = InfluxDBClient('localhost', 8086, 'root', 'root', 'skynet')
        self.client.create_database('skynet', if_not_exists=True)

    def get_results_for_plot(self, query):
        pts = self.client.query(query)        
        results = list(pts.get_points())
        
        retval = []

        if len(results) > 0:

            keys = list(results[0].keys())
            keys.remove('time')
            f = keys[0]

            for r in results:
                t = rfc3339_to_timestamp(r['time'])
                d = r[f]

                retval.append([t,d])

        return retval

    def get_measurements(self):
        series = self.client.get_list_series()
        measurements = []

        for s in series:
            print(s)
            for t in s["tags"]:
                try:
                    root = ".".join([t["board"], t["name"], s["name"]])

                    query = "SELECT * FROM %s WHERE \"board\"='%s' AND \"name\"='%s' LIMIT 1" % (s["name"], t["board"], t["name"])
                    print(query)
                    rs = self.client.query(query)
                    qr = list(rs.get_points())[0]

                    for k in qr.keys():
                        if k not in ['time', 'name', 'board', 'rtr', 'origin']:
                            measurements.append(root + "." + k)

                except Exception as e:
                    print(e)

        return sorted(measurements)

    def get_points(measurement, start, stop, limit):

        board, name, packet, data = measurement.split('.')

        q = "SELECT %s FROM %s WHERE board='%s' AND name='%s' LIMIT %i"
Esempio n. 17
0
class InfluxDB(object):

    def __init__(self, db_host, db_port, db_name):
        self._log = logging.getLogger(".".join([__name__, self.__class__.__name__]))
        self._db_host = db_host
        self._db_port = db_port
        self._db_name = db_name
        self._influx_client = None
        self._connected = False

    def connect(self):
        self._log.info("Opening connection to influxDB at {:s}:{:d}".format(self._db_host, self._db_port))

        try:
            self._influx_client = InfluxDBClient(host=self._db_host, port=self._db_port)

            existing_dbs = self._influx_client.get_list_database()
            db_exists = False
            for db in existing_dbs:
                if db['name'] == self._db_name:
                    db_exists = True
                    break

            if db_exists:
                self._log.info("{} database exists already".format(self._db_name))
            else:
                self._log.info("Creating {} database".format(self._db_name))
                self._influx_client.create_database(self._db_name)

            self._influx_client.switch_database(self._db_name)
            self._connected = True

        except requests.ConnectionError:
            self._log.info("Unable to connect to {} database".format(self._db_name))

    def get_status(self):
        status = {
            "address": self._db_host,
            "port": self._db_port,
            "name": self._db_name,
            "connected": self._connected
        }
        return status

    def log_point(self, time, measurement, data):
        if self._connected:
            point = {
                "measurement": measurement,
                "time": time,
                "fields": {}
            }

            for item in data:
                point["fields"][item] = data[item]

            self._influx_client.write_points([point])
Esempio n. 18
0
def get_client(opts):
    client = InfluxDBClient(opts.HOST, opts.PORT,
                            opts.user, opts.password,
                            opts.dbname)
    try:
        client.create_database(opts.dbname)
    except InfluxDBClientError:
        print >> sys.stderr, "debug: database %s exists. Continuing" % opts.dbname

    return client
Esempio n. 19
0
    def test_ssh_poller(self):
        """ Test real SSH connection to the server
        """

        # Spawn mock SSH server process
        p = Process(target=mock_cisco)
        p.start()
        sleep(1)

        task = {
            'hostname': 'localhost',
            'username': '******',
            'password': '******',
            'port': 9999,
            'device_type': 'cisco_nxos',
            'parser_mode': 'fsm',
            'precommands': '',
            'interval': 0,
            'commands': ['show interface:intf_name'],
        }
        poller = sshpoller.SSH_Poller(task)

        # Connect to the SSH and send the command
        poller.connect()
        poller.send_commands()

        # Generate a random name for our database
        poller.db_name = 'testsshpoller_%s' % ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(5))

        # Create client and connect to the database
        client = InfluxDBClient(host='127.0.0.1', port=poller.db_port, username=poller.db_user, password=poller.db_password)
        client.create_database(poller.db_name)

        # Write data to InfluxDB
        poller.output_influxdb()

        # Query database
        query_results = client.query('select * from "show interface"', database=poller.db_name)

        # Fetch expected results
        expected_results = json.loads(open(os.path.join('mockssh', 'cisco_show_interface_influx.json'), 'r').read())

        # Remove timestamps (the ones from the fixtures won't match the one we just wrote to the DB)
        for i in expected_results['series'][0]['values']:
            i[0] = ''
        for i in query_results.raw['series'][0]['values']:
            i[0] = ''

        # Clean up
        client.drop_database(poller.db_name)

        # Kill server process
        p.terminate()

        self.assertEqual(query_results.raw, expected_results)
Esempio n. 20
0
def main(host='localhost', port=8086, nb_day=15):

    nb_day = 15  # number of day to generate time series
    timeinterval_min = 5  # create an event every x minutes
    total_minutes = 1440 * nb_day
    total_records = int(total_minutes / timeinterval_min)
    now = datetime.datetime.today()
    metric = "server_data.cpu_idle"
    series = []

    for i in range(0, total_records):
        past_date = now - datetime.timedelta(minutes=i * timeinterval_min)
        value = random.randint(0, 200)
        hostName = "server-%d" % random.randint(1, 5)
        # pointValues = [int(past_date.strftime('%s')), value, hostName]
        pointValues = {
                "time": int(past_date.strftime('%s')),
                "measurement": metric,
                'fields':  {
                    'value': value,
                },
                'tags': {
                    "hostName": hostName,
                },
            }
        series.append(pointValues)

    print(series)

    client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME)

    print("Create database: " + DBNAME)
    try:
        client.create_database(DBNAME)
    except InfluxDBClientError:
        # Drop and create
        client.drop_database(DBNAME)
        client.create_database(DBNAME)

    print("Create a retention policy")
    retention_policy = 'server_data'
    client.create_retention_policy(retention_policy, '3d', 3, default=True)

    print("Write points #: {0}".format(total_records))
    client.write_points(series, retention_policy=retention_policy)

    time.sleep(2)

    query = "SELECT MEAN(value) FROM {} WHERE time > now() - 10d GROUP BY time(500m)".format(metric)
    result = client.query(query, database=DBNAME)
    print(result)
    print("Result: {0}".format(result))

    print("Drop database: {}".format(DBNAME))
    client.drop_database(DBNAME)
Esempio n. 21
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'example'
    dbuser = '******'
    dbuser_password = '******'
    query = 'select column_one from foo;'
    json_body = [{
        "points": [
            ["1", 1, 1.0],
            ["2", 2, 2.0]
        ],
        "name": "foo",
        "columns": ["column_one", "column_two", "column_three"]
    }]

    client = InfluxDBClient(host, port, user, password, dbname)

    print("Create database: " + dbname)
    client.create_database(dbname)

    dbusers = client.get_database_users()
    print("Get list of database users: {0}".format(dbusers))

    print("Add database user: "******"Make user a database admin")
    client.set_database_admin(dbuser)

    print("Remove admin privilege from user")
    client.unset_database_admin(dbuser)

    dbusers = client.get_database_users()
    print("Get list of database users again: {0}".format(dbusers))

    print("Switch user: "******"Write points: {0}".format(json_body))
    client.write_points(json_body)

    print("Queying data: " + query)
    result = client.query(query)

    print("Result: {0}".format(result))

    print("Switch user: "******"Delete database: " + dbname)
    client.delete_database(dbname)
Esempio n. 22
0
class InfluxDBConnection:

    def __init__(self, server="localhost", port=8086, user='******', password='******', database='skynet_lite'):

        print "Connecting to Influx: %s@%s:%s/%s" % (user, server, port, database)

        self.client = InfluxDBClient(server, port, user, password, database)
        dbs = [x['name'] for x in self.client.get_list_database()]
        if database not in dbs:
            self.client.create_database(database)

    def write_points(self, points):
        self.client.write_points(points)
Esempio n. 23
0
def main(host='localhost', port=8086):
    user = '******'
    password = '******'
    dbname = 'Airfare'
    json_body = get_airline_data()
    client = InfluxDBClient(host, port, user, password, dbname)

    client.delete_database(dbname)
    print("Create database: " + dbname)
    client.create_database(dbname)

    print("Writing data")
    client.write_points(json_body)
def writestream(count,array,M):
	runs,host,port = system.givevars()
	t0 = time.time()
	client = InfluxDBClient(host.rstrip(), port.rstrip(), database='{"Traffic"_{}_{}'.format(runs,M))
	client.create_database('{"Traffic"_{}_{}'.format(runs,M))
	client.write_points(array,time_precision="ms")
	t1 = time.time()
	influx = t1 - t0
	rate = float(count) / influx

	f = open("log.txt", "wb")
	f.write("SUCCESS. TIME INFLUX: {} WRITE SPEED PER REQUEST: {} DB: Traffic_{}_{}\n".format(influx, rate,runs,M))
	f.close()
Esempio n. 25
0
class TestResponseParser(unittest.TestCase):
    def setUp(self):
        self.client = InfluxDBClient('127.0.0.1', 8086, 'root', 'root')
        self.database_name = 'test_%s' % (time.time())
        self.client.create_database(self.database_name)
        self.client.switch_db(self.database_name)
        now = datetime.utcnow()
        then = now - timedelta(hours=4)
        self.points = geneate_points(then, now, timedelta(seconds=10), 10)

        test_data = [{
            "name": "test",
            "columns": ["time", "key"],
            "points": self.points
        }]
        self.client.write_points(data=test_data, batch_size=2000)

    def tearDown(self):
        self.client.delete_database(self.database_name)

    def test_simple(self):
        query = InfluxQuery.for_series('test').limit(None)

        client = INDBClient(conn=self.client)

        resp = client.result_for_query(query)

        series = resp.get('test')

        self.assertEqual(len(series), len(self.points))

        query = query.limit(10)
        resp = client.result_for_query(query)

        series = resp.get('test')
        assert len(series) == 10

        query = InfluxQuery.for_series('test').columns(InfluxQuery.count('key')).limit(None)
        resp = client.result_for_query(query)
        series = resp.get('test')

        self.assertEqual(series[0].count, len(self.points))

    def test_groups(self):
        q = InfluxQuery.for_series('test').columns(InfluxQuery.count('key'))
        q = q.limit(None)
        q = q.group_by(InfluxQuery.time('1h'))
        client = INDBClient(conn=self.client)
        resp = client.result_for_query(q)
        series = resp.get('test')
        assert sum(map(lambda x: x.count, series)) == len(self.points)
def start(argv):
    """
    Instantiate an InfluxDBClient. The expected inputs are the host/address and
    port of the InfluxDB and the name of the database to use. If the database
    does not exist then it will be created. If the fourth arg is "auth" then it
    will prompt the user for the InfluxDB's username and password.
    """
    influxdb_host = argv[0]
    influxdb_port = int(argv[1])
    influxdb_name = argv[2]
    if len(argv) > 3:
        if argv[3] == "auth":
            influxdb_username = raw_input("InfluxDB username: "******"Password: "******"Invalid args provided to %s: %s "\
                    "(expected: 'auth', got: '%s')" % (__name__, str(argv),
                            argv[3])
            sys.exit(1)
    else:
        influxdb_username = "******"
        influxdb_password = "******"

    LOG.info("Connecting to: %s@%s:%d database:%s.",
            influxdb_username, influxdb_host, influxdb_port, influxdb_name)

    global g_client
    g_client = InfluxDBClient(host=influxdb_host, port=influxdb_port,
                              database=influxdb_name,
                              username=influxdb_username,
                              password=influxdb_password)

    create_database = True
    try:
        databases = g_client.get_list_database()
    except (requests.exceptions.ConnectionError, InfluxDBClientError) as exc:
        print >> sys.stderr, "Failed to connect to InfluxDB server at %s:%s "\
                "database: %s.\nERROR: %s" % (influxdb_host,
                        str(influxdb_port), influxdb_name, str(exc))
        sys.exit(1)

    for database in databases:
        if database["name"] == influxdb_name:
            create_database = False
            break

    if create_database is True:
        LOG.info("Creating database: %s.", influxdb_name)
        g_client.create_database(influxdb_name)
Esempio n. 27
0
class InfluxBackend(BaseBackend):
    """
    InfluxDB backend.

    :param database: name of the InfluxDB database.
    :param client: client instance of InfluxDBClient class.
    :param connection_url: InfluxDB connection url (influxdb://username:password@localhost:8086/databasename).
    :param host: server host.
    :param port: server port.
    :param username: auth username.
    :param password: auth password.
    :param timeout: InfluxDB connection timeout (seconds).
    """
    settings_namespace = 'INFLUX'

    def __init__(self, database, client=None, connection_url=None, host=None, port=None, username=None, password=None,
                 timeout=0.3):
        if not InfluxDBClient:
            raise ImproperlyConfigured('You need to install the influxdb library to use the InfluxDB backend.')
        if client:
            if not isinstance(client, InfluxDBClient):
                raise ImproperlyConfigured('"client" parameter is not an instance of InfluxDBClient client.')
            self.client = client
        elif connection_url:
            self.client = InfluxDBClient.from_DSN(connection_url, timeout=timeout)
        else:
            self.client = InfluxDBClient(host=host, port=port, username=username, password=password,
                                         database=database, timeout=timeout)
        self.client.create_database(database)

    def _get_payload(self, name, value, metric, tags, id_):
        if tags:
            tags['host'] = self._get_host_name()
        else:
            tags = {'host': self._get_host_name()}
        if isinstance(value, dict):
            fields = value
            fields['name'] = name
        else:
            fields = {'name': name, 'value': value}
        return [{'measurement': metric, 'time': datetime.utcnow(), 'tags': tags, 'fields': fields}]

    def report(self, name, metric, value, tags, id_):
        try:
            payload = self._get_payload(name, value, metric, tags, id_)
            return self.client.write_points(payload)
        except Exception as e:
            logger = logging.getLogger(__name__)
            logger.exception(e)
Esempio n. 28
0
class IdbConnection(object):

    def __init__(self, user, pswd, host='localhost', port=8086, db='static'):
        self.host = host
        self.port = int(port)
        self.client = InfluxDBClient(self.host, self.port, user, pswd, db)
        self.client.create_database(db)
        self.client.switch_database(db)


    def table(self, name, tags=None):
        """
        Return Table object, tags is list of columns that should be indexed
        """
        return Table(self.client, name, tags)
Esempio n. 29
0
def check_db_status():
    # if the db is not found, then try to create it
    try:
        dbclient = InfluxDBClient(db_server, db_port, db_admin, db_admin_password)
        dblist = dbclient.get_list_database()
        db_found = False
        for db in dblist:
            if db['name'] == db_name:
                db_found = True
        if not(db_found):
            logger.info('Database <%s> not found, trying to create it', db_name)
            dbclient.create_database(db_name)
        return True
    except Exception as e:
        logger.error('Error querying open-nti database: %s', e)
        return False
Esempio n. 30
0
def init_influx():
    """ Initializes influx database. """

    print("Connecting to Influx...")
    influx_client = InfluxDBClient(
        host=config.INFLUX_HOST,
        port=config.INFLUX_PORT,
        database=config.INFLUX_DB_NAME,
    )
    print("Connected to Influx!")

    print("Creating influx database...")
    influx_client.create_database(config.INFLUX_DB_NAME)
    influx_client.create_retention_policy("one_week", "1w", 1, "listenbrainz")

    print("Done!")
Esempio n. 31
0
class TestInfluxDBClient(unittest.TestCase):
    def setUp(self):
        # By default, raise exceptions on warnings
        warnings.simplefilter('error', FutureWarning)

        self.cli = InfluxDBClient('localhost', 8086, 'username', 'password')
        self.dummy_points = [{
            "name": "cpu_load_short",
            "tags": {
                "host": "server01",
                "region": "us-west"
            },
            "timestamp": "2009-11-10T23:00:00Z",
            "fields": {
                "value": 0.64
            }
        }]

    def test_scheme(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        assert cli._baseurl == 'http://*****:*****@unittest.skip('Not implemented for 0.9')
    def test_write_points_batch(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 200, self.dummy_points):
            assert cli.write_points(data=self.dummy_points,
                                    batch_size=2) is True

    def test_write_points_udp(self):
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient('localhost',
                             8086,
                             'root',
                             'root',
                             'test',
                             use_udp=True,
                             udp_port=port)
        cli.write_points(self.dummy_points)

        received_data, addr = s.recvfrom(1024)

        self.assertDictEqual({
            "points": self.dummy_points,
            "database": "test"
        }, json.loads(received_data.decode(), strict=True))

    def test_write_bad_precision_udp(self):
        cli = InfluxDBClient('localhost',
                             8086,
                             'root',
                             'root',
                             'test',
                             use_udp=True,
                             udp_port=4444)

        with self.assertRaisesRegexp(
                Exception,
                "InfluxDB only supports seconds precision for udp writes"):
            cli.write_points(self.dummy_points, time_precision='ms')

    @raises(Exception)
    def test_write_points_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points([])

    def test_write_points_with_precision(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST, "http://localhost:8086/write")

            cli = InfluxDBClient(database='db')
            cli.write_points(self.dummy_points, time_precision='n')

            self.assertDictEqual(
                {
                    'points': self.dummy_points,
                    'database': 'db',
                    'precision': 'n',
                }, json.loads(m.last_request.body))

    def test_write_points_bad_precision(self):
        cli = InfluxDBClient()
        with self.assertRaisesRegexp(
                Exception, "Invalid time precision is given. "
                "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"):
            cli.write_points(self.dummy_points, time_precision='g')

    @raises(Exception)
    def test_write_points_with_precision_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points_with_precision([])

    def test_query(self):
        example_response = \
            '{"results": [{"series": [{"name": "sdfsdfsdf", ' \
            '"columns": ["time", "value"], "values": ' \
            '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \
            '[{"name": "cpu_load_short", "columns": ["time", "value"], ' \
            '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://*****:*****@unittest.skip('Not implemented for 0.9')
    def test_query_chunked(self):
        cli = InfluxDBClient(database='db')
        example_object = {
            'points': [[1415206250119, 40001, 667], [1415206244555, 30001, 7],
                       [1415206228241, 20001,
                        788], [1415206212980, 10001, 555],
                       [1415197271586, 10001, 23]],
            'name':
            'foo',
            'columns': ['time', 'sequence_number', 'val']
        }
        example_response = \
            json.dumps(example_object) + json.dumps(example_object)

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/db/db/series",
                           text=example_response)

            self.assertListEqual(cli.query('select * from foo', chunked=True),
                                 [example_object, example_object])

    @raises(Exception)
    def test_query_fail(self):
        with _mocked_session(self.cli, 'get', 401):
            self.cli.query('select column_one from foo;')

    def test_create_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.create_database('new_db')
            self.assertEqual(m.last_request.qs['q'][0],
                             'create database new_db')

    @raises(Exception)
    def test_create_database_fails(self):
        with _mocked_session(self.cli, 'post', 401):
            self.cli.create_database('new_db')

    def test_drop_database(self):
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.drop_database('new_db')
            self.assertEqual(m.last_request.qs['q'][0], 'drop database new_db')

    @raises(Exception)
    def test_drop_database_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'delete', 401):
            cli.drop_database('old_db')

    def test_get_list_database(self):
        data = {
            'results': [{
                'series': [{
                    'name': 'databases',
                    'columns': ['name'],
                    'values': [['mydb'], ['myotherdb']]
                }]
            }]
        }

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(self.cli.get_list_database(),
                                 ['mydb', 'myotherdb'])

    @raises(Exception)
    def test_get_list_database_fails(self):
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_database()

    def test_get_list_series(self):
        example_response = \
            '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
            ' "columns": ["name", "duration", "replicaN"]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://*****:*****@mock.patch('requests.Session.request')
    def test_request_retry(self, mock_request):
        """Tests that two connection errors will be handled"""
        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 3:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')
        cli.write_points(self.dummy_points)

    @mock.patch('requests.Session.request')
    def test_request_retry_raises(self, mock_request):
        """Tests that three connection errors will not be handled"""
        class CustomMock(object):
            i = 0

            def connection_error(self, *args, **kwargs):
                self.i += 1

                if self.i < 4:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')

        with self.assertRaises(requests.exceptions.ConnectionError):
            cli.write_points(self.dummy_points)
Esempio n. 32
0
class TestInfluxDBClient(unittest.TestCase):
    """Set up the TestInfluxDBClient object."""
    def setUp(self):
        """Initialize an instance of TestInfluxDBClient object."""
        # By default, raise exceptions on warnings
        warnings.simplefilter('error', FutureWarning)

        self.cli = InfluxDBClient('localhost', 8086, 'username', 'password')
        self.dummy_points = [{
            "measurement": "cpu_load_short",
            "tags": {
                "host": "server01",
                "region": "us-west"
            },
            "time": "2009-11-10T23:00:00.123456Z",
            "fields": {
                "value": 0.64
            }
        }]

        self.dsn_string = 'influxdb://*****:*****@my.host.fr:1886/db'

    def test_scheme(self):
        """Set up the test schema for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        self.assertEqual('http://host:8086', cli._baseurl)

        cli = InfluxDBClient('host',
                             8086,
                             'username',
                             'password',
                             'database',
                             ssl=True)
        self.assertEqual('https://host:8086', cli._baseurl)

    def test_dsn(self):
        """Set up the test datasource name for TestInfluxDBClient object."""
        cli = InfluxDBClient.from_dsn('influxdb://192.168.0.1:1886')
        self.assertEqual('http://192.168.0.1:1886', cli._baseurl)

        cli = InfluxDBClient.from_dsn(self.dsn_string)
        self.assertEqual('http://my.host.fr:1886', cli._baseurl)
        self.assertEqual('uSr', cli._username)
        self.assertEqual('pWd', cli._password)
        self.assertEqual('db', cli._database)
        self.assertFalse(cli._use_udp)

        cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string)
        self.assertTrue(cli._use_udp)

        cli = InfluxDBClient.from_dsn('https+' + self.dsn_string)
        self.assertEqual('https://my.host.fr:1886', cli._baseurl)

        cli = InfluxDBClient.from_dsn('https+' + self.dsn_string,
                                      **{'ssl': False})
        self.assertEqual('http://my.host.fr:1886', cli._baseurl)

    def test_switch_database(self):
        """Test switch database in TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_database('another_database')
        self.assertEqual('another_database', cli._database)

    def test_switch_user(self):
        """Test switch user in TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
        cli.switch_user('another_username', 'another_password')
        self.assertEqual('another_username', cli._username)
        self.assertEqual('another_password', cli._password)

    def test_write(self):
        """Test write in TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)
            cli = InfluxDBClient(database='db')
            cli.write({
                "database":
                "mydb",
                "retentionPolicy":
                "mypolicy",
                "points": [{
                    "measurement": "cpu_load_short",
                    "tags": {
                        "host": "server01",
                        "region": "us-west"
                    },
                    "time": "2009-11-10T23:00:00Z",
                    "fields": {
                        "value": 0.64
                    }
                }]
            })

            self.assertEqual(
                m.last_request.body,
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000000000000\n",
            )

    def test_write_points(self):
        """Test write points for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)

            cli = InfluxDBClient(database='db')
            cli.write_points(self.dummy_points, )
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_toplevel_attributes(self):
        """Test write points attrs for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)

            cli = InfluxDBClient(database='db')
            cli.write_points(self.dummy_points,
                             database='testdb',
                             tags={"tag": "hello"},
                             retention_policy="somepolicy")
            self.assertEqual(
                'cpu_load_short,host=server01,region=us-west,tag=hello '
                'value=0.64 1257894000123456000\n',
                m.last_request.body.decode('utf-8'),
            )

    def test_write_points_batch(self):
        """Test write points batch for TestInfluxDBClient object."""
        dummy_points = [{
            "measurement": "cpu_usage",
            "tags": {
                "unit": "percent"
            },
            "time": "2009-11-10T23:00:00Z",
            "fields": {
                "value": 12.34
            }
        }, {
            "measurement": "network",
            "tags": {
                "direction": "in"
            },
            "time": "2009-11-10T23:00:00Z",
            "fields": {
                "value": 123.00
            }
        }, {
            "measurement": "network",
            "tags": {
                "direction": "out"
            },
            "time": "2009-11-10T23:00:00Z",
            "fields": {
                "value": 12.00
            }
        }]
        expected_last_body = (
            "network,direction=out,host=server01,region=us-west "
            "value=12.0 1257894000000000000\n")

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)
            cli = InfluxDBClient(database='db')
            cli.write_points(points=dummy_points,
                             database='db',
                             tags={
                                 "host": "server01",
                                 "region": "us-west"
                             },
                             batch_size=2)
        self.assertEqual(m.call_count, 2)
        self.assertEqual(expected_last_body,
                         m.last_request.body.decode('utf-8'))

    def test_write_points_udp(self):
        """Test write points UDP for TestInfluxDBClient object."""
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient('localhost',
                             8086,
                             'root',
                             'root',
                             'test',
                             use_udp=True,
                             udp_port=port)
        cli.write_points(self.dummy_points)

        received_data, addr = s.recvfrom(1024)

        self.assertEqual(
            'cpu_load_short,host=server01,region=us-west '
            'value=0.64 1257894000123456000\n', received_data.decode())

    @raises(Exception)
    def test_write_points_fails(self):
        """Test write points fail for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points([])

    def test_write_points_with_precision(self):
        """Test write points with precision for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.POST,
                           "http://localhost:8086/write",
                           status_code=204)

            cli = InfluxDBClient(database='db')

            cli.write_points(self.dummy_points, time_precision='n')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456000\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='u')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123456\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='ms')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 1257894000123\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='s')
            self.assertEqual(
                b"cpu_load_short,host=server01,region=us-west "
                b"value=0.64 1257894000\n",
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='m')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 20964900\n',
                m.last_request.body,
            )

            cli.write_points(self.dummy_points, time_precision='h')
            self.assertEqual(
                b'cpu_load_short,host=server01,region=us-west '
                b'value=0.64 349415\n',
                m.last_request.body,
            )

    def test_write_points_with_precision_udp(self):
        """Test write points with precision for TestInfluxDBClient object."""
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        port = random.randint(4000, 8000)
        s.bind(('0.0.0.0', port))

        cli = InfluxDBClient('localhost',
                             8086,
                             'root',
                             'root',
                             'test',
                             use_udp=True,
                             udp_port=port)

        cli.write_points(self.dummy_points, time_precision='n')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 1257894000123456000\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='u')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 1257894000123456\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='ms')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 1257894000123\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='s')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b"cpu_load_short,host=server01,region=us-west "
            b"value=0.64 1257894000\n",
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='m')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 20964900\n',
            received_data,
        )

        cli.write_points(self.dummy_points, time_precision='h')
        received_data, addr = s.recvfrom(1024)
        self.assertEqual(
            b'cpu_load_short,host=server01,region=us-west '
            b'value=0.64 349415\n',
            received_data,
        )

    def test_write_points_bad_precision(self):
        """Test write points w/bad precision TestInfluxDBClient object."""
        cli = InfluxDBClient()
        with self.assertRaisesRegexp(
                Exception, "Invalid time precision is given. "
                "\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"):
            cli.write_points(self.dummy_points, time_precision='g')

    @raises(Exception)
    def test_write_points_with_precision_fails(self):
        """Test write points w/precision fail for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
        with _mocked_session(cli, 'post', 500):
            cli.write_points_with_precision([])

    def test_query(self):
        """Test query method for TestInfluxDBClient object."""
        example_response = (
            '{"results": [{"series": [{"measurement": "sdfsdfsdf", '
            '"columns": ["time", "value"], "values": '
            '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
            '[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
            '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}')

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://*****:*****@unittest.skip('Not implemented for 0.9')
    def test_query_chunked(self):
        """Test chunked query for TestInfluxDBClient object."""
        cli = InfluxDBClient(database='db')
        example_object = {
            'points': [[1415206250119, 40001, 667], [1415206244555, 30001, 7],
                       [1415206228241, 20001,
                        788], [1415206212980, 10001, 555],
                       [1415197271586, 10001, 23]],
            'measurement':
            'foo',
            'columns': ['time', 'sequence_number', 'val']
        }
        example_response = \
            json.dumps(example_object) + json.dumps(example_object)

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/db/db/series",
                           text=example_response)

            self.assertListEqual(cli.query('select * from foo', chunked=True),
                                 [example_object, example_object])

    @raises(Exception)
    def test_query_fail(self):
        """Test query failed for TestInfluxDBClient object."""
        with _mocked_session(self.cli, 'get', 401):
            self.cli.query('select column_one from foo;')

    def test_ping(self):
        """Test ping querying InfluxDB version."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/ping",
                           status_code=204,
                           headers={'X-Influxdb-Version': '1.2.3'})
            version = self.cli.ping()
            self.assertEqual(version, '1.2.3')

    def test_create_database(self):
        """Test create database for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.create_database('new_db')
            self.assertEqual(m.last_request.qs['q'][0],
                             'create database "new_db"')

    def test_create_numeric_named_database(self):
        """Test create db w/numeric name for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.create_database('123')
            self.assertEqual(m.last_request.qs['q'][0],
                             'create database "123"')

    @raises(Exception)
    def test_create_database_fails(self):
        """Test create database fail for TestInfluxDBClient object."""
        with _mocked_session(self.cli, 'post', 401):
            self.cli.create_database('new_db')

    def test_drop_database(self):
        """Test drop database for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.drop_database('new_db')
            self.assertEqual(m.last_request.qs['q'][0],
                             'drop database "new_db"')

    def test_drop_measurement(self):
        """Test drop measurement for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.drop_measurement('new_measurement')
            self.assertEqual(m.last_request.qs['q'][0],
                             'drop measurement "new_measurement"')

    def test_drop_numeric_named_database(self):
        """Test drop numeric db for TestInfluxDBClient object."""
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text='{"results":[{}]}')
            self.cli.drop_database('123')
            self.assertEqual(m.last_request.qs['q'][0], 'drop database "123"')

    def test_get_list_database(self):
        """Test get list of databases for TestInfluxDBClient object."""
        data = {
            'results': [{
                'series': [{
                    'name': 'databases',
                    'values': [['new_db_1'], ['new_db_2']],
                    'columns': ['name']
                }]
            }]
        }

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(self.cli.get_list_database(),
                                 [{
                                     'name': 'new_db_1'
                                 }, {
                                     'name': 'new_db_2'
                                 }])

    @raises(Exception)
    def test_get_list_database_fails(self):
        """Test get list of dbs fail for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_database()

    def test_get_list_measurements(self):
        """Test get list of measurements for TestInfluxDBClient object."""
        data = {
            "results": [{
                "series": [{
                    "name": "measurements",
                    "columns": ["name"],
                    "values": [["cpu"], ["disk"]]
                }]
            }]
        }

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(self.cli.get_list_measurements(),
                                 [{
                                     'name': 'cpu'
                                 }, {
                                     'name': 'disk'
                                 }])

    def test_create_retention_policy_default(self):
        """Test create default ret policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.create_retention_policy('somename',
                                             '1d',
                                             4,
                                             default=True,
                                             database='db')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy "somename" on '
                '"db" duration 1d replication 4 default')

    def test_create_retention_policy(self):
        """Test create retention policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.create_retention_policy('somename',
                                             '1d',
                                             4,
                                             database='db')

            self.assertEqual(
                m.last_request.qs['q'][0],
                'create retention policy "somename" on '
                '"db" duration 1d replication 4')

    def test_alter_retention_policy(self):
        """Test alter retention policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            # Test alter duration
            self.cli.alter_retention_policy('somename', 'db', duration='4d')
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" duration 4d')
            # Test alter replication
            self.cli.alter_retention_policy('somename', 'db', replication=4)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" replication 4')

            # Test alter default
            self.cli.alter_retention_policy('somename', 'db', default=True)
            self.assertEqual(
                m.last_request.qs['q'][0],
                'alter retention policy "somename" on "db" default')

    @raises(Exception)
    def test_alter_retention_policy_invalid(self):
        """Test invalid alter ret policy for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.alter_retention_policy('somename', 'db')

    def test_drop_retention_policy(self):
        """Test drop retention policy for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.drop_retention_policy('somename', 'db')
            self.assertEqual(m.last_request.qs['q'][0],
                             'drop retention policy "somename" on "db"')

    @raises(Exception)
    def test_drop_retention_policy_fails(self):
        """Test failed drop ret policy for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'delete', 401):
            cli.drop_retention_policy('default', 'db')

    def test_get_list_retention_policies(self):
        """Test get retention policies for TestInfluxDBClient object."""
        example_response = \
            '{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
            ' "columns": ["name", "duration", "replicaN"]}]}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://*****:*****@mock.patch('requests.Session.request')
    def test_request_retry(self, mock_request):
        """Test that two connection errors will be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""
            def __init__(self):
                self.i = 0

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < 3:
                    raise requests.exceptions.ConnectionError

                r = requests.Response()
                r.status_code = 204
                return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')
        cli.write_points(self.dummy_points)

    @mock.patch('requests.Session.request')
    def test_request_retry_raises(self, mock_request):
        """Test that three requests errors will not be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""
            def __init__(self):
                self.i = 0

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < 4:
                    raise requests.exceptions.HTTPError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        mock_request.side_effect = CustomMock().connection_error

        cli = InfluxDBClient(database='db')

        with self.assertRaises(requests.exceptions.HTTPError):
            cli.write_points(self.dummy_points)

    @mock.patch('requests.Session.request')
    def test_random_request_retry(self, mock_request):
        """Test that a random number of connection errors will be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""
            def __init__(self, retries):
                self.i = 0
                self.retries = retries

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < self.retries:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 204
                    return r

        retries = random.randint(1, 5)
        mock_request.side_effect = CustomMock(retries).connection_error

        cli = InfluxDBClient(database='db', retries=retries)
        cli.write_points(self.dummy_points)

    @mock.patch('requests.Session.request')
    def test_random_request_retry_raises(self, mock_request):
        """Test a random number of conn errors plus one will not be handled."""
        class CustomMock(object):
            """Create custom mock object for test."""
            def __init__(self, retries):
                self.i = 0
                self.retries = retries

            def connection_error(self, *args, **kwargs):
                """Handle a connection error for the CustomMock object."""
                self.i += 1

                if self.i < self.retries + 1:
                    raise requests.exceptions.ConnectionError
                else:
                    r = requests.Response()
                    r.status_code = 200
                    return r

        retries = random.randint(1, 5)
        mock_request.side_effect = CustomMock(retries).connection_error

        cli = InfluxDBClient(database='db', retries=retries)

        with self.assertRaises(requests.exceptions.ConnectionError):
            cli.write_points(self.dummy_points)

    def test_get_list_users(self):
        """Test get users for TestInfluxDBClient object."""
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"],'
            '"values":[["test",false]]}]}]}')

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)

            self.assertListEqual(self.cli.get_list_users(), [{
                'user': '******',
                'admin': False
            }])

    def test_get_list_users_empty(self):
        """Test get empty userlist for TestInfluxDBClient object."""
        example_response = (
            '{"results":[{"series":[{"columns":["user","admin"]}]}]}')
        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)

            self.assertListEqual(self.cli.get_list_users(), [])

    def test_grant_admin_privileges(self):
        """Test grant admin privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.grant_admin_privileges('test')

            self.assertEqual(m.last_request.qs['q'][0],
                             'grant all privileges to "test"')

    @raises(Exception)
    def test_grant_admin_privileges_invalid(self):
        """Test grant invalid admin privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.grant_admin_privileges('')

    def test_revoke_admin_privileges(self):
        """Test revoke admin privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.revoke_admin_privileges('test')

            self.assertEqual(m.last_request.qs['q'][0],
                             'revoke all privileges from "test"')

    @raises(Exception)
    def test_revoke_admin_privileges_invalid(self):
        """Test revoke invalid admin privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_admin_privileges('')

    def test_grant_privilege(self):
        """Test grant privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.grant_privilege('read', 'testdb', 'test')

            self.assertEqual(m.last_request.qs['q'][0],
                             'grant read on "testdb" to "test"')

    @raises(Exception)
    def test_grant_privilege_invalid(self):
        """Test grant invalid privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.grant_privilege('', 'testdb', 'test')

    def test_revoke_privilege(self):
        """Test revoke privs for TestInfluxDBClient object."""
        example_response = '{"results":[{}]}'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            self.cli.revoke_privilege('read', 'testdb', 'test')

            self.assertEqual(m.last_request.qs['q'][0],
                             'revoke read on "testdb" from "test"')

    @raises(Exception)
    def test_revoke_privilege_invalid(self):
        """Test revoke invalid privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 400):
            self.cli.revoke_privilege('', 'testdb', 'test')

    def test_get_list_privileges(self):
        """Tst get list of privs for TestInfluxDBClient object."""
        data = {
            'results': [{
                'series': [{
                    'columns': ['database', 'privilege'],
                    'values': [['db1', 'READ'], ['db2', 'ALL PRIVILEGES'],
                               ['db3', 'NO PRIVILEGES']]
                }]
            }]
        }

        with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
            self.assertListEqual(self.cli.get_list_privileges('test'),
                                 [{
                                     'database': 'db1',
                                     'privilege': 'READ'
                                 }, {
                                     'database': 'db2',
                                     'privilege': 'ALL PRIVILEGES'
                                 }, {
                                     'database': 'db3',
                                     'privilege': 'NO PRIVILEGES'
                                 }])

    @raises(Exception)
    def test_get_list_privileges_fails(self):
        """Test failed get list of privs for TestInfluxDBClient object."""
        cli = InfluxDBClient('host', 8086, 'username', 'password')
        with _mocked_session(cli, 'get', 401):
            cli.get_list_privileges('test')

    def test_invalid_port_fails(self):
        """Test invalid port fail for TestInfluxDBClient object."""
        with self.assertRaises(ValueError):
            InfluxDBClient('host', '80/redir', 'username', 'password')

    def test_chunked_response(self):
        """Test chunked reponse for TestInfluxDBClient object."""
        example_response = \
            u'{"results":[{"statement_id":0,"series":' \
            '[{"name":"cpu","columns":["fieldKey","fieldType"],"values":' \
            '[["value","integer"]]}],"partial":true}]}\n{"results":' \
            '[{"statement_id":0,"series":[{"name":"iops","columns":' \
            '["fieldKey","fieldType"],"values":[["value","integer"]]}],' \
            '"partial":true}]}\n{"results":[{"statement_id":0,"series":' \
            '[{"name":"load","columns":["fieldKey","fieldType"],"values":' \
            '[["value","integer"]]}],"partial":true}]}\n{"results":' \
            '[{"statement_id":0,"series":[{"name":"memory","columns":' \
            '["fieldKey","fieldType"],"values":[["value","integer"]]}]}]}\n'

        with requests_mock.Mocker() as m:
            m.register_uri(requests_mock.GET,
                           "http://localhost:8086/query",
                           text=example_response)
            response = self.cli.query('show series limit 4 offset 0',
                                      chunked=True,
                                      chunk_size=4)
            self.assertTrue(len(response) == 4)
            self.assertEqual(
                response.__repr__(),
                ResultSet({
                    'series': [{
                        'values': [['value', 'integer']],
                        'name': 'cpu',
                        'columns': ['fieldKey', 'fieldType']
                    }, {
                        'values': [['value', 'integer']],
                        'name': 'iops',
                        'columns': ['fieldKey', 'fieldType']
                    }, {
                        'values': [['value', 'integer']],
                        'name': 'load',
                        'columns': ['fieldKey', 'fieldType']
                    }, {
                        'values': [['value', 'integer']],
                        'name': 'memory',
                        'columns': ['fieldKey', 'fieldType']
                    }]
                }).__repr__())
Esempio n. 33
0
def create_influxdb(app_id,
                    database_name="asperathos",
                    img="influxdb",
                    namespace="default",
                    visualizer_port=8086,
                    timeout=60):

    kube.config.load_kube_config(api.k8s_conf_path)

    influx_pod_spec = {
        "apiVersion": "v1",
        "kind": "Pod",
        "metadata": {
            "name": "influxdb-%s" % app_id,
            "labels": {
                "app": "influxdb-%s" % app_id
            }
        },
        "spec": {
            "containers": [{
                "name": "influxdb-master",
                "image": img,
                "env": [{
                    "name": "MASTER",
                    "value": str(True)
                }],
                "ports": [{
                    "containerPort": visualizer_port
                }]
            }]
        }
    }

    influx_svc_spec = {
        "apiVersion": "v1",
        "kind": "Service",
        "metadata": {
            "name": "influxdb-%s" % app_id,
            "labels": {
                "app": "influxdb-%s" % app_id
            }
        },
        "spec": {
            "ports": [{
                "protocol": "TCP",
                "port": visualizer_port,
                "targetPort": visualizer_port
            }],
            "selector": {
                "app": "influxdb-%s" % app_id
            },
            "type":
            "NodePort"
        }
    }

    CoreV1Api = kube.client.CoreV1Api()
    node_port = None

    # Gets the redis ip if the value is not explicitic in the config file
    try:
        redis_ip = api.redis_ip
    except AttributeError:
        redis_ip = api.get_node_cluster(api.k8s_conf_path)

    try:
        KUBEJOBS_LOG.log("Creating InfluxDB Pod...")
        CoreV1Api.create_namespaced_pod(namespace=namespace,
                                        body=influx_pod_spec)
        KUBEJOBS_LOG.log("Creating InfluxDB Service...")
        s = CoreV1Api.create_namespaced_service(namespace=namespace,
                                                body=influx_svc_spec)
        ready = False
        attempts = timeout
        while not ready:
            read = CoreV1Api.read_namespaced_pod_status(name="influxdb-%s" %
                                                        app_id,
                                                        namespace=namespace)
            node_port = s.spec.ports[0].node_port

            if read.status.phase == "Running" and node_port is not None:
                try:
                    # TODO change redis_ip to node_ip
                    client = InfluxDBClient(redis_ip, node_port, 'root',
                                            'root', database_name)
                    client.create_database(database_name)
                    KUBEJOBS_LOG.log("InfluxDB is ready!!")
                    ready = True
                except Exception:
                    KUBEJOBS_LOG.log("InfluxDB is not ready yet...")
            else:
                attempts -= 1
                if attempts > 0:
                    time.sleep(1)
                else:
                    raise Exception("InfluxDB cannot be started!"
                                    "Time limite exceded...")
            time.sleep(1)

        influxdb_data = {"port": node_port, "name": database_name}
        return influxdb_data
    except kube.client.rest.ApiException as e:
        KUBEJOBS_LOG.log(e)
Esempio n. 34
0
        },
        'tags': []
    } for key in ['download', 'upload', 'ping']]
    db.write_points(res)


def run_threaded(job_func):
    job_thread = Thread(target=job_func)
    job_thread.start()


_LOGGER.info('Initialising ...')

global db
db = InfluxDBClient(host='influxdb',
                    port=8086,
                    username=_INFLUX_DB_USER,
                    password=_INFLUX_DB_PASSWORD,
                    database='speedtest')
db.create_database('speedtest')
db.create_retention_policy('forever', 'INF', 1, default=True)
db.switch_user("dbuser", "dbuser")
_LOGGER.info('Initialised')

job()  # run immediately
schedule.every(_SCHEDULE_INTERVAL_MINS).minutes.do(run_threaded, job)

while 1:
    schedule.run_pending()
    time.sleep(1)
Esempio n. 35
0
from influxdb import InfluxDBClient
import csv
import json
import random

client = InfluxDBClient('localhost', 8086, 'root', 'root',
                        'complete_data_result')
# client.drop_database('complete_data')
client.create_database('complete_data_result')

buffer_length = 4000

buffered_payload = []


def writeResults(row0, row1, row2, row3, label):
    row_count = 0
    global buffered_payload
    for i in range(len(label)):
        data = {}
        data['measurement'] = "iot_fault_data_result"
        data['time'] = row0[i]
        fields = {}
        fields['load'] = row1[i]
        fields['rate'] = row2[i]
        fields['gs'] = row3[i]
        fields['label'] = label[i]
        data['fields'] = fields
        buffered_payload.append(data)
        row_count = row_count + 1
        if row_count == buffer_length:
Esempio n. 36
0
from datetime import datetime

from influxdb import InfluxDBClient

client = InfluxDBClient(host='localhost',
                        port=8086,
                        username='******',
                        password='******',
                        timeout=3,
                        database='empower')

client.create_database('empower')

timestamp = datetime.utcnow()
# slices_rates : <id_slice>: <rate> (rate en MBits/s)
# lvap_slice :
#   para cada slice generar un punto, con tag slice igual al id de la slice
#   en campos poner los lvaps: <numero>: <lvap> donde numero no se puede repetir
points = [
    {
        "measurement": "slices_rates",
        "tags": {},
        "time": timestamp,
        "fields": {
            "1": 2.0,
            "2": 5.0,
            "3": 10.0,
            "4": 15.0
        }
    },
    {
Esempio n. 37
0
    def send_to_influx(self):
        if not self.config['hostname']:
            self.log.error(
                "No Influx server configured, please set one using: "
                "ceph influx config-set hostname <hostname>")

            self.set_health_checks({
                'MGR_INFLUX_NO_SERVER': {
                    'severity': 'warning',
                    'summary': 'No InfluxDB server configured',
                    'detail': ['Configuration option hostname not set']
                }
            })
            return

        # If influx server has authentication turned off then
        # missing username/password is valid.
        self.log.debug("Sending data to Influx host: %s",
                       self.config['hostname'])
        client = InfluxDBClient(self.config['hostname'], self.config['port'],
                                self.config['username'],
                                self.config['password'],
                                self.config['database'], self.config['ssl'],
                                self.config['verify_ssl'])

        # using influx client get_list_database requires admin privs,
        # instead we'll catch the not found exception and inform the user if
        # db can not be created
        try:
            df_stats, pools = self.get_df_stats()
            client.write_points(df_stats, 'ms')
            client.write_points(self.get_daemon_stats(), 'ms')
            client.write_points(self.get_pg_summary(pools))
            self.set_health_checks(dict())
        except ConnectionError as e:
            self.log.exception("Failed to connect to Influx host %s:%d",
                               self.config['hostname'], self.config['port'])
            self.set_health_checks({
                'MGR_INFLUX_SEND_FAILED': {
                    'severity':
                    'warning',
                    'summary':
                    'Failed to send data to InfluxDB server at %s:%d'
                    ' due to an connection error' %
                    (self.config['hostname'], self.config['port']),
                    'detail': [str(e)]
                }
            })
        except InfluxDBClientError as e:
            if e.code == 404:
                self.log.info(
                    "Database '%s' not found, trying to create "
                    "(requires admin privs).  You can also create "
                    "manually and grant write privs to user "
                    "'%s'", self.config['database'], self.config['username'])
                client.create_database(self.config['database'])
                client.create_retention_policy(
                    name='8_weeks',
                    duration='8w',
                    replication='1',
                    default=True,
                    database=self.config['database'])
            else:
                self.set_health_checks({
                    'MGR_INFLUX_SEND_FAILED': {
                        'severity': 'warning',
                        'summary': 'Failed to send data to InfluxDB',
                        'detail': [str(e)]
                    }
                })
                raise
Esempio n. 38
0
from influxdb import InfluxDBClient

# tsdb = InfluxDBClient('192.168.0.11', 8086, 'root', 'root', 'weather_data')
tsdb = InfluxDBClient('influxdb', 8086, 'root', 'root', 'weather_data')
tsdb.create_database('weather_data')

import asyncio
import asyncpg


async def init_datastores():
    pass
    # global pgdb
    # pgdb = await asyncpg.create_pool(database='postgres',
    #                                         user='******', password='******')


loop = asyncio.get_event_loop()
loop.run_until_complete(init_datastores())
# async def run():
#     conn = await asyncpg.connect(user='******', password='******',
#                                  database='database', host='127.0.0.1')
#     values = await conn.fetch('''SELECT * FROM mytable''')
#     await conn.close()
Esempio n. 39
0
def influx_database():
    influx = InfluxDBClient()
    influx.create_database('coinrat_test')
    influx._database = 'coinrat_test'
    yield influx
    influx.drop_database('coinrat_test')
class InfluxDBListener: 
    """
    Events listener that writes locust events to the given influxdb connection
    """
    
    def __init__(
        self,
        env: locust.env.Environment,
        influxDbSettings: InfluxDBSettings
    ):

        # flush related attributes
        self.cache = []
        self.stop_flag = False
        self.interval_ms = influxDbSettings.interval_ms
        # influxdb settings 
        try:
            self.influxdb_client = InfluxDBClient(influxDbSettings.influx_host, influxDbSettings.influx_port, influxDbSettings.user, influxDbSettings.pwd, influxDbSettings.database)
            self.influxdb_client.create_database(influxDbSettings.database)
        except:
           logging.exception('Could not connect to influxdb')
           return 

        # determine if worker or master
        self.node_id = 'local'
        if '--master' in sys.argv:
            self.node_id = 'master'
        if '--worker' in sys.argv:
            # TODO: Get real ID of slaves form locust somehow
            self.node_id = 'worker'

        # start background event to push data to influx
        self.flush_worker = gevent.spawn(self.__flush_cached_points_worker)
        self.test_start(0)
        
        events = env.events
        
        # requests
        events.request_success.add_listener(self.request_success)
        events.request_failure.add_listener(self.request_failure)
        # events   
        events.test_stop.add_listener(self.test_stop)
        events.user_error.add_listener(self.user_error)
        events.spawning_complete.add_listener(self.spawning_complete)
        events.quitting.add_listener(self.quitting)
        # complete
        atexit.register(self.quitting)

    def request_success(self, request_type, name, response_time, response_length, **_kwargs) -> None:
        self.__listen_for_requests_events(self.node_id, 'locust_requests', request_type, name, response_time, response_length, True, None)

    def request_failure(self, request_type, name, response_time, response_length, exception, **_kwargs) -> None:
        self.__listen_for_requests_events(self.node_id, 'locust_requests', request_type, name, response_time, response_length, False, exception)

    def spawning_complete(self, user_count) -> None:
        self.__register_event(self.node_id, user_count, 'spawning_complete')
        return True

    def test_start(self, user_count) -> None:
        self.__register_event(self.node_id, 0, 'test_started')

    def test_stop(self, user_count) -> None:
        print('test started')
        self.__register_event(self.node_id, 0, 'test_stopped')

    def test_stop(self, user_count) -> None:
        self.__register_event(self.node_id, 0, 'test_stopped')
    
    def user_error(self, user_instance, exception, tb, **_kwargs) -> None:
        self.__listen_for_locust_errors(self.node_id, user_instance, exception, tb)

    def quitting(self, **_kwargs) -> None:
        self.__register_event(self.node_id, 0, 'quitting')
        self.last_flush_on_quitting()

    def __register_event(self, node_id: str, user_count: int, event: str, **_kwargs) -> None:
        """
        Persist locust event such as hatching started or stopped to influxdb.
        Append user_count in case that it exists

        :param node_id: The id of the node reporting the event.
        :param event: The event name or description.
        """

        time = datetime.utcnow()
        tags = {
        }
        fields = {
            'node_id': node_id,
            'event': event,
            'user_count': user_count
        }

        point = self.__make_data_point('locust_events', tags, fields, time)
        self.cache.append(point)


    def __listen_for_requests_events(self, node_id, measurement, request_type, name, response_time, response_length, success, exception) -> None:
        """
        Persist request information to influxdb.

        :param node_id: The id of the node reporting the event.
        :param measurement: The measurement where to save this point.
        :param success: Flag the info to as successful request or not
        """

        time = datetime.utcnow()
        tags = {
            'node_id': node_id,
            'request_type': request_type,
            'name': name,
            'success': success,
            'exception': repr(exception),
        }

        if isinstance(exception, HTTPError):
            tags['code'] = exception.response.status_code

        fields = {
            'response_time': response_time,
            'response_length': response_length,
            'counter': 1,  # TODO: Review the need of this field
        }
        point = self.__make_data_point(measurement, tags, fields, time)
        self.cache.append(point)

    def __listen_for_locust_errors(self, node_id, user_instance, exception: Exception = None, tb = None) -> None:
        """
        Persist locust errors to InfluxDB.

        :param node_id: The id of the node reporting the error.
        :return: None
        """

        time = datetime.utcnow()
        tags = {
            'exception_tag': repr(exception)
        }
        fields = {
            'node_id': node_id,
            'user_instance': repr(user_instance),
            'exception': repr(exception),
            'traceback': "".join(traceback.format_tb(tb)),
        }
        point = self.__make_data_point('locust_exceptions', tags, fields, time)
        self.cache.append(point)


    def __flush_cached_points_worker(self) -> None:
        """
        Background job that puts the points into the cache to be flushed according tot he interval defined.

        :param influxdb_client:
        :param interval:
        :return: None
        """
        log.info('Flush worker started.')
        while not self.stop_flag:
            self.__flush_points(self.influxdb_client)
            gevent.sleep(self.interval_ms / 1000)

    def __make_data_point(self, measurement: str, tags: dict, fields: dict, time: datetime) -> dict:
        """
        Create a list with a single point to be saved to influxdb.

        :param measurement: The measurement where to save this point.
        :param tags: Dictionary of tags to be saved in the measurement.
        :param fields: Dictionary of field to be saved to measurement.
        :param time: The time os this point.
        """
        return {"measurement": measurement, "tags": tags, "time": time, "fields": fields}


    def last_flush_on_quitting(self):
        self.stop_flag = True
        self.flush_worker.join()
        self.__flush_points(self.influxdb_client)


    def __flush_points(self, influxdb_client: InfluxDBClient) -> None:
        """
        Write the cached data points to influxdb

        :param influxdb_client: An instance of InfluxDBClient
        :return: None
        """
        log.debug(f'Flushing points {len(self.cache)}')
        to_be_flushed = self.cache
        self.cache = []
        success = influxdb_client.write_points(to_be_flushed)
        if not success:
            log.error('Failed to write points to influxdb.')
            # If failed for any reason put back into the beginning of cache
            self.cache.insert(0, to_be_flushed)
Esempio n. 41
0
class InfluxdbUtils(object):
    """ 
    influxdb operator utils.
    """
    def __init__(self,
                 host=u'localhost',
                 port=8086,
                 username=u'root',
                 password=u'root',
                 dbname=u'vzhucloud'):

        self.host = host
        self.port = port
        self.username = username
        self.password = password
        self.dbname = dbname

    def connect(self):
        """
        Connect to influxdb server.
        """
        self.client = InfluxDBClient(host=self.host,
                                     port=self.port,
                                     username=self.username,
                                     password=self.password,
                                     timeout=30)

        databases = self.client.get_list_database()
        if self.dbname not in databases:
            try:
                self.client.create_database(self.dbname)
            except InfluxDBClientError:
                self.client.drop_database(self.dbname)
                self.client.create_database(self.dbname)

        self.client.switch_user(self.username, self.password)
        self.client.switch_database(self.dbname)

    def create_retention_policy(self,
                                name=u'30days',
                                duration='30d',
                                replication='1'):
        """
        Create a retention policy for a database.

        :param name (str) – the name of the new retention policy
        :param duration (str) – the duration of the new retention policy. 
               Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported 
               and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively.
        :param replication (str) – the replication of the retention policy
        """
        ret_policies = self.client.get_list_retention_policies(self.dbname)
        for policy in ret_policies:
            if policy['name'] == name:
                return

        self.client.create_retention_policy(name, duration, replication,
                                            self.dbname, True)

    def write(self, data):
        """
        Write time series data to influxdb.

        :param data (json array) - as blew
            [{
                "measurement": "vm_monitor_data",
                "tags": {
                    "host": "5e4643d3-fab0-4adb-8a13-a9d278e28686",
                    "region": "wuxi-test-compute144",
                    "object": "disk",
                    "metric": "ops_read",
                    "instance": "/dev/sda1"
                },
                "time": "2017-03-12T22:00:00Z",
                "fields": {
                    "value": 89
                }
            }]
        """
        if not self.client.write_points(data):
            logging.error("failed to write_points.")

    def read(self, query):
        """
        Send a query to InfluxDB.

        :param query (str) -  the actual query string
        """
        return self.client.query(query)
Esempio n. 42
0
parser.add_argument("state", help="current task state")
parser.add_argument("weight", help="current task weight")
args = parser.parse_args()
task_name = args.task
task_state = args.state
task_weight = args.weight

influxdb_host = 'influxdb.influxdb.svc.cluster.local'
influxdb_port = 80
influxdb_user = '******'
influxdb_password = '******'
influxdb_database = 'student'

client = InfluxDBClient(influxdb_host, influxdb_port, influxdb_user,
                        influxdb_password, influxdb_database)
client.create_database(influxdb_database)
client.get_list_database()
client.switch_database(influxdb_database)

# delete old record
client.query('DROP MEASUREMENT "%s"' % (task_name))

current_time = datetime.now().isoformat(timespec='seconds')

json_body = [{
    "measurement": "%s" % task_name,
    "tags": {
        "user": "******",
    },
    "time": "%s" % (str(current_time)),
    "fields": {
Esempio n. 43
0
class waterSensor():
    sendLevel = True
    maxLevel = 10
    minLevel = 5
    influxclient = None
    currReading = 1

    def connectInflux(self):
        self.influxclient = InfluxDBClient(host='3.139.96.11',
                                           port=8086,
                                           database='waterSensor')
        self.influxclient.create_database('waterSensor')

    def sendReading(self):
        if self.sendLevel:
            json_body = [{
                "measurement": "levels",
                "tags": {
                    "host": "server01",
                    "region": "us-west"
                },
                "time": datetime.now(),
                "fields": {
                    "waterLevel": self.currReading
                }
            }]
            self.influxclient.write_points(json_body)

    def on_connect(self, client, userdata, flags, rc):
        print("Connected to server (i.e., broker) with result code " + str(rc))

        #subscribe to topics of interest here
        client.subscribe(
            'lstsai/waterCtrl')  #sub to led and set custom callback
        client.message_callback_add('lstsai/waterCtrl',
                                    self.waterCtrl_callback)

        client.subscribe(
            'lstsai/maxLevel')  #sub to led and set custom callback
        client.message_callback_add('lstsai/maxLevel', self.maxLevel_callback)

        client.subscribe(
            'lstsai/minLevel')  #sub to led and set custom callback
        client.message_callback_add('lstsai/minLevel', self.minLevel_callback)

    def waterCtrl_callback(self, client, userdata, msg):
        if str(msg.payload, "utf-8") == "start":
            self.sendLevel = True
        elif str(msg.payload, "utf-8") == "stop":
            self.sendLevel = False

    def maxLevel_callback(self, client, userdata, msg):
        self.maxLevel = int(str(msg.payload, "utf-8"))

    def minLevel_callback(self, client, userdata, msg):
        self.minLevel = int(str(msg.payload, "utf-8"))

    #Default message callback. Please use custom callbacks.
    def on_message(self, client, userdata, msg):
        print("on_message: " + msg.topic + " " + str(msg.payload, "utf-8"))

    def main(self):
        client = mqtt.Client()
        client.on_message = self.on_message
        client.on_connect = self.on_connect
        client.connect(host="eclipse.usc.edu", port=11000, keepalive=60)
        client.loop_start()

        self.connectInflux()
        #set the ports
        ranger = 4
        ledMax = 2
        ledMin = 3
        reading = 1
        lastReading = 12 - grovepi.ultrasonicRead(ranger)
        lastReading = 1
        while True:
            self.currReading = 12 - grovepi.ultrasonicRead(ranger)
            if abs(self.currReading - lastReading) < 5:
                self.sendReading()
                client.publish("lstsai/waterLevel", self.currReading)
                lastReading = self.currReading
            if self.sendLevel:
                self.sendReading()
            if self.currReading > self.maxLevel:
                grovepi.digitalWrite(ledMax, 1)
            else:
                grovepi.digitalWrite(ledMax, 0)
            if self.currReading < self.minLevel:
                grovepi.digitalWrite(ledMin, 1)
            else:
                grovepi.digitalWrite(ledMin, 0)

            time.sleep(1)
Esempio n. 44
0
class SmartModule(object):
    """Represents a HAPI Smart Module (Implementation).

    Attributes:
        id: ID of the site
        name: Name of the site
        wunder_key: Weather Underground key to be used
        operator: Name of the primary site operator
        email: Email address of the primary site operator
        phone: Phone number of the primary site operator
        location: Location or Address of the site
    """
    def __init__(self):
        self.mock = True
        self.comm = communicator.Communicator(self)
        self.data_sync = DataSync()
        self.id = ""
        self.name = ""
        self.wunder_key = ""
        self.operator = ""
        self.email = ""
        self.phone = ""
        self.location = ""
        self.longitude = ""
        self.latitude = ""
        self.scheduler = None
        self.hostname = socket.gethostname()
        self.last_status = ""
        self.ifconn = None
        self.rtc = rtc_interface.RTCInterface()
        self.rtc.power_on_rtc()
        self.launch_time = self.rtc.get_datetime()
        self.asset = Asset(self.hostname)
        self.asset.id = self.rtc.get_id()
        self.asset.context = self.rtc.get_context()
        self.asset.type = self.rtc.get_type()
        self.ai = asset_interface.AssetInterface(self.asset.type,
                                                 self.rtc.mock)
        self.rtc.power_off_rtc()

    def load_influx_settings(self):
        """Load Influxdb server information stored in database base."""
        try:
            settings = {}
            field_names = '''
                server
                port
                username
                password
            '''.split()
            sql = 'SELECT {fields} FROM influx_settings LIMIT 1;'.format(
                fields=', '.join(field_names))
            database = sqlite3.connect(utilities.DB_CORE)
            db_elements = database.cursor().execute(sql).fetchone()
            for field, value in zip(field_names, db_elements):
                settings[field] = value
            self.ifconn = InfluxDBClient(settings["server"], settings["port"],
                                         settings["username"],
                                         settings["password"])
            Log.info("Influxdb information loaded.")
        except Exception as excpt:
            Log.exception("Trying to load Influx server information: %s.",
                          excpt)
        finally:
            database.close()

    def become_broker(self):
        """If no broker found SM performs operation(s) to become the broker."""
        try:
            os.system("sudo systemctl start avahi-daemon.service"
                      )  # We will change it soon!
        except Exception as excpt:
            Log.info("Error trying to become the Broker: %s.", excpt)

    def find_service(self, zeroconf, service_type, name, state_change):
        """Check for published MQTT. If it finds port 1883 of type '_mqtt', update broker name."""
        # Get the service we want (port 1883 and type '_mqtt._tcp.local.'
        info = zeroconf.get_service_info(service_type, name)
        if not (info.port == 1883 and service_type == "_mqtt._tcp.local."):
            return

        if state_change is ServiceStateChange.Added:
            # If this is our service, update mqtt broker name and ip on self.comm (Communicator)
            self.comm.broker_name = info.server
            self.comm.broker_ip = str(socket.inet_ntoa(info.address))
        elif state_change is ServiceStateChange.Removed:
            # Implement way to handle removed MQTT service
            # It only makes sense if leave zeroconf connection opened. It could be interesting.
            pass

    def find_broker(self, zeroconf):
        """Browser for our (MQTT) services using Zeroconf."""
        browser = ServiceBrowser(zeroconf,
                                 "_mqtt._tcp.local.",
                                 handlers=[self.find_service])

    def discover(self):
        print(
            "{status} Smart Module hosting asset {asset_id} {asset_type} {asset_context}."
            .format(status="Mock" if self.rtc.mock else "Real",
                    asset_id=self.asset.id,
                    asset_type=self.asset.type,
                    asset_context=self.asset.context))

        try:
            max_sleep_time = 3  # Calling sleep should be reviewed.
            zeroconf = Zeroconf()
            Log.info("Performing Broker discovery...")
            self.find_broker(zeroconf)
            time.sleep(max_sleep_time
                       )  # Wait for max_sleep_time to see if we found it.
            if self.comm.broker_name or self.comm.broker_ip:  # Found it.
                Log.info("MQTT Broker: {broker_name} IP: {broker_ip}.".format(
                    broker_name=self.comm.broker_name,
                    broker_ip=self.comm.broker_ip))
            else:  # Make necessary actions to become the broker.
                Log.info("Broker not found. Becoming the broker.")
                self.become_broker()
            time.sleep(max_sleep_time)
            self.comm.connect()  # Now it's time to connect to the broker.
        except Exception as excpt:
            Log.exception("[Exiting] Trying to find or become the broker.")
        finally:
            Log.info("Closing Zeroconf connection.")
            zeroconf.close()

        t_end = time.time() + 10
        while (time.time() < t_end) and not self.comm.is_connected:
            time.sleep(1)

        self.comm.subscribe("SCHEDULER/RESPONSE")
        self.comm.send("SCHEDULER/QUERY", "Where are you?")
        Log.info("Waiting for Scheduler response...")
        time.sleep(5)  # Just wait for reply... Need a review?

        self.comm.send("ANNOUNCE", self.hostname + " is online.")

        t_end = time.time() + 2
        while (time.time() < t_end) and not self.comm.is_connected:
            time.sleep(1)

        if not self.comm.scheduler_found:  # Become the Scheduler (necessary actions as Scheduler)
            try:
                Log.info("No Scheduler found. Becoming the Scheduler.")
                self.scheduler = Scheduler()
                self.scheduler.smart_module = self
                self.scheduler.prepare_jobs(self.scheduler.load_schedule())
                self.comm.scheduler_found = True
                self.comm.subscribe("SCHEDULER/QUERY")
                self.comm.unsubscribe("SCHEDULER/RESPONSE")
                self.comm.subscribe("STATUS/RESPONSE" + "/#")
                self.comm.subscribe("ASSET/RESPONSE" + "/#")
                self.comm.subscribe("ALERT" + "/#")
                self.comm.send("SCHEDULER/RESPONSE", self.hostname)
                self.comm.send("ANNOUNCE",
                               self.hostname + " is running the Scheduler.")
                Log.info("Scheduler program loaded.")
            except Exception as excpt:
                Log.exception("Error initializing scheduler. %s.", excpt)

    def load_site_data(self):
        field_names = '''
            id
            name
            wunder_key
            operator
            email
            phone
            location
            longitude
            latitude
        '''.split()
        try:
            sql = 'SELECT {fields} FROM site LIMIT 1;'.format(
                fields=', '.join(field_names))
            database = sqlite3.connect(utilities.DB_CORE)
            db_elements = database.cursor().execute(sql)
            for row in db_elements:
                for field_name, field_value in zip(field_names, row):
                    setattr(self, field_name, field_value)
            Log.info("Site data loaded.")
        except Exception as excpt:
            Log.exception("Error loading site data: %s.", excpt)
        finally:
            database.close()

    def connect_influx(self, database_name):
        """Connect to database named database_name on InfluxDB server.
        Create database if it does not already exist.
        Return the connection to the database."""

        databases = self.ifconn.get_list_database()
        for db in databases:
            if database_name in db.values():
                break
        else:
            self.ifconn.create_database(database_name)

        self.ifconn.switch_database(database_name)
        return self.ifconn

    def push_sysinfo(self, asset_context, information):
        """Push System Status (stats) to InfluxDB server."""
        timestamp = datetime.datetime.now()
        conn = self.connect_influx(asset_context)
        cpuinfo = [{
            "measurement": "cpu",
            "tags": {
                "asset": self.name
            },
            "time": timestamp,
            "fields": {
                "unit": "percentage",
                "load": information["cpu"]["percentage"]
            }
        }]
        meminfo = [{
            "measurement": "memory",
            "tags": {
                "asset": self.name
            },
            "time": timestamp,
            "fields": {
                "unit": "KBytes",
                "free": information["memory"]["free"],
                "used": information["memory"]["used"],
                "cached": information["memory"]["cached"]
            }
        }]
        netinfo = [{
            "measurement": "network",
            "tags": {
                "asset": self.name
            },
            "time": timestamp,
            "fields": {
                "unit": "packets",
                "packet_recv": information["network"]["packet_recv"],
                "packet_sent": information["network"]["packet_sent"]
            }
        }]
        botinfo = [{
            "measurement": "boot",
            "tags": {
                "asset": self.name
            },
            "time": timestamp,
            "fields": {
                "unit": "timestamp",
                "date": information["boot"]
            }
        }]
        diskinf = [{
            "measurement": "disk",
            "tags": {
                "asset": self.name
            },
            "time": timestamp,
            "fields": {
                "unit": "KBytes",
                "total": information["disk"]["total"],
                "free": information["disk"]["free"],
                "used": information["disk"]["used"]
            }
        }]
        tempinf = [{
            "measurement": "internal",
            "tags": {
                "asset": self.name
            },
            "time": timestamp,
            "fields": {
                "unit": "C",
                "unit temp": str(self.rtc.get_temp()),
            }
        }]

        conn.write_points(cpuinfo + meminfo + netinfo + botinfo + diskinf +
                          tempinf)

    def get_status(self):
        """Fetch system information (stats)."""
        try:
            sysinfo = SystemStatus(update=True)
            return sysinfo
        except Exception as excpt:
            Log.exception("Error getting System Status: %s.", excpt)

    def on_query_status(self):
        """It'll be called by the Scheduler to ask for System Status information."""
        self.comm.send("STATUS/QUERY", "How are you?")

    def on_check_alert(self):
        """It'll called by the Scheduler to ask for Alert Conditions."""
        self.comm.send("ASSET/QUERY", "Is it warm here?")

    def get_asset_data(self):
        try:
            self.asset.time = str(time.time())
            self.asset.value = str(self.ai.read_value())
        except Exception as excpt:
            Log.exception("Error getting asset data: %s.", excpt)
            self.asset.value = -1000

        return self.asset.value

    def log_sensor_data(self, data, virtual):
        if not virtual:
            try:
                self.push_data(self.asset.name, self.asset.context,
                               self.asset.value, self.asset.unit)
            except Exception as excpt:
                Log.exception("Error logging sensor data: %s.", excpt)
        else:
            # For virtual assets, assume that the data is already parsed JSON
            unit_symbol = {
                'temp_c': 'C',
                'relative_humidity': '%',
                'pressure_mb': 'mb',
            }
            try:
                for factor in ('temp_c', 'relative_humidity', 'pressure_mb'):
                    value = str(data[factor]).replace("%", "")
                    self.push_data(factor, "Environment", value,
                                   unit_symbol[factor])

            except Exception as excpt:
                Log.exception("Error logging sensor data: %s.", excpt)

    def push_data(self, asset_name, asset_context, value, unit):
        try:
            conn = self.connect_influx(asset_context)
            json_body = [{
                "measurement": asset_context,
                "tags": {
                    "site": self.name,
                    "asset": asset_name
                },
                "time": str(datetime.datetime.now()),
                "fields": {
                    "value": value,
                    "unit": unit
                }
            }]
            conn.write_points(json_body)
            Log.info("Wrote to analytic database.")
        except Exception as excpt:
            Log.exception("Error writing to analytic database: %s.", excpt)

    def get_weather(self):
        response = ""
        url = ('http://api.wunderground.com/'
               'api/{key}/conditions/q/{lat},{lon}.json').format(
                   key=self.wunder_key,
                   lat=self.latitude,
                   lon=self.longitude,
               )

        try:
            f = urllib2.urlopen(url)
            json_string = f.read()
            parsed_json = json.loads(json_string)
            response = parsed_json['current_observation']
            f.close()
        except Exception as excpt:
            Log.exception("Error getting weather data: %s.", excpt)
        return response

    def log_command(self, job, result):
        try:
            now = str(datetime.datetime.now())
            command = '''
                INSERT INTO command_log (timestamp, command, result)
                VALUES (?, ?, ?)
            ''', (now, job.name, result)
            Log.info("Executed %s.", job.name)
            database = sqlite3.connect(utilities.DB_HIST)
            database.cursor().execute(*command)
            database.commit()
        except Exception as excpt:
            Log.exception("Error logging command: %s.", excpt)
        finally:
            database.close()

    def get_env(self):
        now = datetime.datetime.now()
        uptime = now - self.launch_time
        days = uptime.days
        minutes, seconds = divmod(uptime.seconds, utilities.SECONDS_PER_MINUTE)
        hours, minutes = divmod(minutes, utilities.MINUTES_PER_HOUR)
        s = ('''
            Smart Module Status
              Software Version v{version}
              Running on: {platform}
              Encoding: {encoding}
              Python Information
               - Executable: {executable}
               - v{sys_version}
               - location: {executable}
              Timestamp: {timestamp}
              Uptime: This Smart Module has been online for:
              {days} days, {hours} hours, {minutes} minutes and {seconds} seconds.
        ''').format(
            version=utilities.VERSION,
            platform=sys.platform,
            encoding=sys.getdefaultencoding(),
            executable=sys.executable,
            sys_version=sys.version.split()[0],
            timestamp=now.strftime('%Y-%m-%d %H:%M:%S'),
            days=days,
            hours=hours,
            minutes=minutes,
            seconds=seconds,
        )
        s = utilities.trim(s) + '\n'
        try:
            self.comm.send("ENV/RESPONSE", s)
        except Exception as excpt:
            Log.exception("Error getting environment data: %s.", excpt)
Esempio n. 45
0
from influxdb import InfluxDBClient
from influxdb import DataFrameClient

#server = Flask(__name__)
#server.config['INFLUXDB_DATABASE_URI'] = os.environ.get('DATABASE_URL', 'influxdb:///plan.db')
##server.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
#db = SQLAlchemy(server)
#db_URI = os.environ.get('DATABASE_URL', 'influxdb:///plan.db')
#engine = create_engine(db_URI)

client1 = InfluxDBClient(host='127.0.0.1',
                         port=8086)  #(host='0.0.0.0', port=8086)

#client = InfluxDBClient(host='mydomain.com', port=8086, username='******', password='******', ssl=True, verify_ssl=True)

client1.create_database('plan')

#client.get_list_database()

client1.switch_database('plan')

#[{'fields': {'message': 'hii'}, 'measurement': 'table', 'tags': {'chat': <paho.mqtt.client.Client object at 0x7f196754ff10>}, 'time': '2020-05-20 14:27:27.884088'}]


def insert(messa):
    print("insert=", messa)
    client1.write_points(messa)
    print(byenbiibbb)


#query3 = client1.query('select * from ' + 'hi'  + ' where time < now()-' + '1d', chunked=True)
        IPVcolumn = df['Ipv']
        PPVcolumn = df['Ppv']
        print("analyzed time values = \n",
              Timeadjusted)  # prints converted time column values
        print(df)

        # upload to influxdb cloud
        host = 'localhost'
        port = 8086
        user = '******'
        password = '******'
        dbname = invertorID
        # Temporarily avoid line protocol time conversion issues #412, #426, #431.
        protocol = 'json'
        client = InfluxDBClient(host, port, user, password, dbname)
        client.create_database(dbname)

        for i in range(newsize):
            #print ('currentfile=',newsize)
            json_body = [{
                "measurement": invertorID,
                "tags": {
                    "col3": "col3"
                },
                "time": Timeadjusted[i],
                "fields": {
                    "temperature": tempcolumn[i],
                    "Eac_today": EACcolumn[i],
                    "Vpv": VPVcolumn[i],
                    "Iac": IACcolumn[i],
                    "Vac": VACcolumn[i],
Esempio n. 47
0
def main():
    global register
    global measurement
    global info
    global status
    global y_exp
    global c_exp
    global y_gen
    global c_gen
    global y_tot
    global c_tot
    global c_peak
    global tmax
    global min_ins
    global c_stop
    global c_start
    global client
    global flux_client
    global ok
    inverter_file = config.model
    inverter = __import__(inverter_file)
    if config.debug:
        print("got it")

    inverter_ip = inverter.inv_address()
    print(inverter_ip)
    if inverter_ip != "":
        config.inverter_ip = inverter_ip

    print(config.inverter_ip)
    if config.debug:
        print("statring setup")
        print("opening modbus")

    client = connect_bus(ip=config.inverter_ip,
                         PortN=config.inverter_port,
                         timeout=config.timeout)

    if config.debug:
        print("opening db")

    try:
        flux_client = InfluxDBClient(host=config.influxdb_ip,
                                     port=config.influxdb_port,
                                     username=config.influxdb_user,
                                     password=config.influxdb_password)

    except Exception as e:
        print("main error: %s" % str(e))
        flux_client = None
        print("problem openning db")

    if config.debug:
        print("setting db")

    flux_client.create_database(config.influxdb_database)
    flux_client.create_database(config.influxdb_longterm)
    if config.debug:
        print("setting params")

    last_loop = 0
    info_time = 0
    last_status = {}
    y_exp = 0
    c_exp = 0
    y_tot = 0
    c_tot = 0
    y_gen = 0
    c_gen = 0
    c_peak = 0
    c_start = ""
    c_stop = ""
    tmax = 0
    min_ins = 1000
    thread_time = [1]
    thread_mean = 1
    sleep = 0
    midnight = (int(
        time.mktime(
            time.strptime(
                time.strftime("%m/%d/%Y ") + " 00:00:00",
                "%m/%d/%Y %H:%M:%S"))))
    s = (
        'SELECT %s FROM "%s" WHERE time >= %s and time <= %s and P_daily > 0'
    ) % (
        'max("M_PExp") as "M_PExp", max("M_PTot") as "M_PTot", max("P_accum") as "P_accum", max("P_daily") as "P_daily", max("Temp") as "Temp"',
        config.model, str(
            (midnight - 24 * 3600) * 1000000000), str(midnight * 1000000000))
    zeros = flux_client.query(s, database=config.influxdb_database)
    m = list(zeros.get_points(measurement=config.model))
    try:
        y_exp = m[0]['M_PExp']
        y_tot = m[0]['M_PTot']
        y_gen = m[0]['P_daily']
        tmax = m[0]['Temp']

    except Exception as e:
        print("main 1 error: %s" % str(e))
        y_exp = 0
        y_tot = 0
        y_gen = 0
        tmax = 0

    s = (
        'SELECT %s FROM "%s_info" WHERE time >= %s and time <= %s and Insulation > 0'
    ) % ('min("Insulation") as "Insulation"', config.model,
         str((midnight - 3600 * 24) * 1000000000), str(midnight * 1000000000))
    zeros = flux_client.query(s, database=config.influxdb_database)
    m = list(zeros.get_points(measurement=config.model + "_info"))
    try:
        if config.debug:
            print(m[0])

        min_ins = m[0]['Insulation']

    except Exception as e:
        print("main 2 error: %s" % str(e))
        if config.debug:
            print(s)

        min_ins = 1000

    midnight = int(
        time.mktime(
            time.strptime(
                time.strftime("%m/%d/%Y ") + " 23:59:59",
                "%m/%d/%Y %H:%M:%S"))) + 1
    if ((float(time.strftime("%S")) % 60) > 1):
        time.sleep(59 - float(time.strftime("%S")))

    utils.fill_blanks(flux_client, midnight)
    forcast_time = forcast()
    if config.debug:
        print("loop")

    ok = True
    while ok:
        current_time = time.time()
        if (current_time - last_loop + thread_mean >= int(
                config.scan_interval)):
            last_loop = current_time
            measurement = {}
            info = {}
            status = {}
            j = 0
            while do_map(client, config, inverter):
                j += 1
                if config.debug:
                    print(time.ctime(), register)
                    print("map looping")

                time.sleep(10)
                if j > 10:
                    if config.debug:
                        print("map infinite loop")

                    ok = False

                if not ok:
                    return -1

            if not ok:
                return -1

            current_time = time.time()
            if (current_time - info_time > config.info_interval):
                utils.write_influx(flux_client, info, config.model + "_info",
                                   config.influxdb_database)
                info_time = current_time
                if config.debug:
                    print(info)
                    print(y_exp)
                    print(c_exp)
                    print(y_tot)
                    print(c_tot)
                    print(y_gen)
                    print(c_gen)
                    print(c_peak)
                    print(c_start)
                    print(c_stop)
                    print(tmax)
                    print(min_ins)

            if (status != last_status):
                utils.write_influx(flux_client, status, config.model + "_stat",
                                   config.influxdb_database)
                last_status = status
                if config.debug:
                    print(status)

            x = config.scan_interval - (float(time.strftime("%S")) %
                                        config.scan_interval)
            if x == 30:
                x = 0

            if (0.5 < x < 6):
                time.sleep(x - 0.05)

            utils.write_influx(flux_client, measurement, config.model,
                               config.influxdb_database)
            if (config.supla_api != ""):
                #read from supla
                supla()

            thread_time.insert(0, time.time() - last_loop)
            if len(thread_time) > 5:
                thread_time.pop()

            thread_mean = sum(thread_time) / len(thread_time)
            if (int(time.time()) > midnight):
                daily = {}
                daily['Insulation'] = float(min_ins)
                min_ins = 1000
                daily['Temp'] = float(tmax)
                tmax = 0
                s = 'SELECT cumulative_sum(integral("power90")) /3600 * 0.82  as power90, cumulative_sum(integral("power10")) /3600 * 0.82  as power10, cumulative_sum(integral("power")) /3600 * 0.82  as power FROM "forcast" WHERE time > now() -22h group by time(1d)'
                zeros = flux_client.query(s, database=config.influxdb_database)
                m = list(zeros.get_points(measurement="forcast"))
                daily['f_power'] = float(m[0]['power'])
                try:
                    daily['f_power90'] = float(m[0]['power90'])
                    daily['f_power10'] = float(m[0]['power10'])

                except Exception as e:
                    print("main 3 error: %s" % str(e))
                    daily['f_power90'] = float(m[0]['power'])
                    daily['f_power10'] = float(m[0]['power'])

                if config.debug:
                    print(midnight)
                    print(c_gen)
                    print(y_gen)
                    print(c_peak)

                if y_exp != 0 and y_tot != 0:
                    daily['P_Load'] = float(c_gen - (c_exp - y_exp) +
                                            (c_tot - y_tot))

                daily['P_daily'] = float(c_gen)
                c_gen = 0
                if (y_exp != 0):
                    daily['P_Exp'] = float(c_exp - y_exp)

                y_exp = c_exp
                if (y_tot != 0):
                    daily['P_Grid'] = float(c_tot - y_tot +
                                            config.extra_load * 24 / 1000)

                y_tot = c_tot
                daily['P_peak'] = float(c_peak)
                daily['Start'] = c_start
                daily['Shutdown'] = c_stop
                utils.write_influx(flux_client, daily, config.model + "_daily",
                                   config.influxdb_longterm,
                                   (midnight - 24 * 3600) * 1000000000)
                if config.debug:
                    print(time.ctime(midnight - 24 * 3600))
                    print(daily)

                utils.send_measurements(midnight - 24 * 3600, midnight,
                                        flux_client)
                midnight = int(
                    time.mktime(
                        time.strptime(
                            time.strftime("%m/%d/%Y ") + " 23:59:59",
                            "%m/%d/%Y %H:%M:%S"))) + 1

            if (int(time.time()) > forcast_time):
                forcast_time = forcast()

        else:
            sleep = config.scan_interval - (
                float(time.strftime("%S")) %
                config.scan_interval) - thread_mean * 1.1
            if sleep == config.scan_interval:
                sleep = 0.0

    #       sleep = 0.95 * (config.scan_interval - (time.time() - last_loop)) + 0.05
            if ((int(time.time() + sleep)) > midnight):
                sleep = midnight - int(time.time())

            if sleep < 0.16:
                sleep = 0.1

    #       if sleep < 0:
    #           sleep = 1
            time.sleep(sleep)

        try:
            pass

        except Exception as e:
            print("main 4 error: %s" % str(e))
            ok = False
            return -1
class influxIO(object):

    try:
        ## Import fehlgeschlagen
        if (PrivateImportError):
            raise IOError(PrivateImportError)

        if (ImportError):
            raise IOError(ImportError)
    # #################################################################################################
    # # Funktion: ' Constructor '
    ## \details Die Initialisierung der Klasse KeepAlive
    #   \param[in]  self der Objectpointer
    #   \param[in]  interval
    #   \param[in]  mqttClient
    #   \param[in] portal_id
    #   \return -
    # #################################################################################################

        def __init__(self, _host, _port, _username, _password, _database,
                     _gzip, logger):

            #self.log = logger.getLogger('InfluxHandler')

            self.host = _host
            self.port = _port
            self.username = _username
            self.password = _password
            self.database = _database
            self.gzip = _gzip
            self.influxdb_client = None

    # # Ende Funktion: ' Constructor ' ################################################################

    # #################################################################################################
    # # Funktion: ' Destructor '
    # #################################################################################################
    #def __del__(self):

    # # Ende Funktion: ' Destructor ' #################################################################

    # #################################################################################################
    # # Funktionen
    # # Prototypen
    # # if __name__ == '__main__':
    # #################################################################################################

    # #################################################################################################
    # #  Funktion: '_init_influxdb_database '
    ## \details     Initialisiert die vorhandene Database, bzw. erzeugt eine neue.
    #   \param[in]     -
    #   \return          -
    # #################################################################################################

        def _init_influxdb_database(self, _database, callee):

            # close connection if reload
            if self.influxdb_client is not None:
                self.influxdb_client.close()
                time.sleep(1)  #Login

            ver = None
            try:
                self.influxdb_client = InfluxDBClient(host=self.host,
                                                      port=self.port,
                                                      username=self.username,
                                                      password=self.password,
                                                      database=_database,
                                                      gzip=self.gzip)
                ver = self.influxdb_client.ping()
                print("InfluxDB Version: {}".format(ver))

                databases = self.influxdb_client.get_list_database()
                if (len(
                        list(
                            filter(lambda x: x['name'] == _database,
                                   databases))) == 0):
                    print("Erstelle Datenbank: {}".format(_database))
                    self.influxdb_client.create_database(_database)

                    if (callee == 'VrmGetData'):
                        print("Setzte Retention Policy: {}".format(_database))
                        #self.influxdb_client.alter_retention_policy('daily', database = self.database, duration = '52w', replication = 0, default = True)
                        #self.influxdb_client.create_retention_policy('sechs_monate', database = _database, duration = '26w', replication = 1, default = True)
                        self.influxdb_client.create_retention_policy(
                            'daily',
                            database=_database,
                            duration='52w',
                            replication=1,
                            default=True)

                        #print("Setzte Continuous query: {}".format(_database))
                        #select_clause = 'SELECT mean("AcPvOnGridPower") INTO "PvInvertersAcEnergyForwardDay" FROM "system" WHERE ("instance" = "Gateway") GROUP BY time(1d)'
                        #self.influxdb_client.create_continuous_query('PvDay', select_clause, _database, 'EVERY 10s FOR 1d')

                self.influxdb_client.switch_database(_database)
                print("{} initialisiert die Datenbank: {}".format(
                    callee, _database))

                if (callee == 'VrmGetData'):
                    policyList = self.influxdb_client.get_list_retention_policies(
                        database=_database)
                    print("Retention Policies: {}".format(policyList))
                    queryList = self.influxdb_client.get_list_continuous_queries(
                    )
                    print("Continuous query: {}".format(queryList))

            except requestException.ConnectionError as e:
                print("ConnectionError (Init) : {}".format(e))
                #for info in sys.exc_info():
                #   print("{}".format(info))

            except:
                print("Start Sequenz  (Init)")
                for info in sys.exc_info():
                    print("{}".format(info))
                print("Ende Sequenz\nSonstiger Error")

            return ver

    #        ping()  tested die verbindung und gibt die influx version zurück.
    # close() schließt den http Socket,

    # influx startet die console
    # DROP DATABASE EnergyAnzeige löscht diese

    # # Ende Funktion: ' _init_influxdb_database ' ####################################################

    # #################################################################################################
    # #  Funktion: '_isEmpty_influxdb_database '
    ## \details    leere DatenBank
    #   \param[in]     -
    #   \return          -
    # #################################################################################################

        def _isEmpty_influxdb_database(self):

            try:
                query = "SELECT count(*) FROM {}./.*/".format(
                    _conf.INFLUXDB_DATABASE)
                print(query)
                result1 = self.influxdb_client.query(query)
                print(result1)

            except:
                for info in sys.exc_info():
                    print("{}".format(info))

            return False

    # # Ende Funktion: ' _isEmpty_influxdb_database ' ####################################################

    # #################################################################################################
    # #  Funktion: '_send_sensor_data_to_influxdb '
    ## \details
    #   \param[in]     sensor_data
    #   \return          -
    # #################################################################################################

        def _send_sensor_data_to_influxdb(self, sensor_data):

            json.json_body = [{
                "measurement": sensor_data.device,  # pvinverter
                "tags": {
                    "instance": sensor_data.instance  # Piko, SMA
                },
                "fields": {  # AcEnergyForwardDaySoFar: 1000.0
                },
                "time": sensor_data.timestamp
            }]
            jsDict = {}
            strFields = ''
            valueCnt = 0
            for type, value in zip_longest(sensor_data.type,
                                           sensor_data.value):
                jsDict.update({type: value})
                strFields = strFields + " " + str(type) + "=" + str(value)
                valueCnt += 1

            json.json_body[0]["fields"] = jsDict

            retVal = False
            try:
                retVal = self.influxdb_client.write_points(json.json_body)

            except requestException.ChunkedEncodingError as e:
                print(
                    "ChunkedEncodingError (Write): {}\nAnzahl Daten: {} e: {}".
                    format(json.json_body, valueCnt, e))
                #for info in sys.exc_info():
                #    print("{}".format(info))

            except DbException.InfluxDBServerError as e:
                print("ServerError (Write): {}\nAnzahl Daten: {} e: {}".format(
                    json.json_body, valueCnt, e))
                #for info in sys.exc_info():
                #    print("{}".format(info))

            except DbException.InfluxDBClientError as e:
                print("CientError (Write): {}\nAnzahl Daten: {} e: {}".format(
                    json.json_body, valueCnt, e))
                #for info in sys.exc_info():
                #    print("{}".format(info))

            except requestException.ConnectionError as e:
                print("ConnectionError (Write): {}\nAnzahl Daten: {} e: {}".
                      format(json.json_body, valueCnt, e))
                #for info in sys.exc_info():
                #    print("{}".format(info))

            except:
                print("Start Sequenz")
                for info in sys.exc_info():
                    print("{}".format(info))
                print(
                    "Ende Sequenz\nSonstiger Error (Write): {}\nAnzahl Daten: {}"
                    .format(json.json_body, valueCnt))

            return retVal

    # # Ende Funktion: ' _send_sensor_data_to_influxdb ' ##############################################

    # #################################################################################################
    # #  Funktion: '_Query_influxDb '
    ## \details     Abfrage der Datenbank
    #   \param[in]     -
    #   \return          -
    # #################################################################################################

        def _Query_influxDb(self, queries, measurement, searchFor):

            try:
                retVal = []
                points = []
                results = []
                errQuery = ''
                errPoint = ''
                errResult = ''

                for query in queries:
                    errQuery = query
                    result = self.influxdb_client.query(query)
                    results.append(result)

                for result in results:
                    errResult = result
                    point = list(result.get_points(measurement))
                    points.append(point)

                for point in points:
                    errPoint = point
                    if (len(point) > 1):
                        for k in range(0, len(point)):
                            retVal.append(point[k][searchFor])
                    elif (len(point) > 0):
                        retVal.append(point[0][searchFor])
                    else:
                        retVal.append(0)
            except:
                for info in sys.exc_info():
                    print("{}".format(info))
                print("errQuery: {}".format(errQuery))
                print("errResult: {}".format(errResult))
                print("errPoint: {}".format(errPoint))

            return retVal

    # # Ende Funktion: ' _Query_influxDb ' ####################################################

##### Fehlerbehandlung #####################################################
    except IOError as e:
        print(
            'Eine der Bibliotheken konnte nicht geladen werden!\n{}!\n'.format(
                e))

    except:
        for info in sys.exc_info():
            print("Fehler: {}".format(info))
Esempio n. 49
0
from influxdb import InfluxDBClient
import random
import time

client = InfluxDBClient(host='localhost', port=8086)
client.create_database('pract')
client.switch_database('pract')

VAL = ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J"]

fields = {x: 0 for x in VAL}

client.drop_measurement("events")

countStr = input("Введите количество итераций: ")
count = int(countStr)

i = 0
while (i < count):
    i += 1
    client.write_points([{"measurement": "events", "fields": fields}])

    for a in fields:
        fields[a] = fields[a] + random.randint(-2, 2)
    time.sleep(1)
Esempio n. 50
0
import asyncio
from datetime import datetime
from xknx import XKNX
from influxdb import InfluxDBClient

client = InfluxDBClient('localhost', 8086, 'root', 'root', 'metrics')

try:
    client.create_database('metrics')
except Exception as e:
    print(e)


def get_current_time():
    return datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')


def send_metric_datapoint(measurement, location, value, timestamp):
    json_body = [{
        "measurement": measurement,
        "tags": {
            "location": location
        },
        "time": timestamp,
        "fields": {
            "value": value
        }
    }]
    try:
        client.write_points(json_body, database="metrics")
    except Exception as e:
Esempio n. 51
0
	except subprocess.CalledProcessError:
		print("Kapacitor is not yet ready. Checking again in 10 seconds.")
		time.sleep(10)
		continue
	break
print("Kapacitor is ready to accept data.")

for line in args.lines:
	for column in args.columns:
		print("Running UDF for (" + str(line) + ", " + str(column) + ")")

		client = InfluxDBClient(database = 'master')
		client.drop_database('master')
		
		initial_size = get_size()
		client.create_database('master')
		initial_time = datetime.now()

		f = open(args.file, "r")
		l = []
		bucket_size = 100
		if line >= 10000 or column >= 5000:
			bucket_size = 5
		read_line = []
		for i in tqdm(range(line * 2)):
			read_line.append( f.readline() )
		for x in tqdm(range(line)):
			s = read_line[x].split(" ")
			v = {}
			for y in range(column):
				v["a" + str(y)] = float(s[y + 1])
Esempio n. 52
0
class Output(cowrie.core.output.Output):

    def __init__(self):
        cowrie.core.output.Output.__init__(self)

    def start(self):
        try:
            host = CONFIG.get('output_influx', 'host')
        except Exception:
            host = ''

        try:
            port = CONFIG.getint('output_influx', 'port')
        except Exception:
            port = 8086

        try:
            ssl = CONFIG.getboolean('output_influx', 'ssl')
        except Exception:
            ssl = False

        self.client = None
        try:
            self.client = InfluxDBClient(host=host, port=port, ssl=ssl, verify_ssl=ssl)
        except InfluxDBClientError as e:
            log.err("output_influx: I/O error({0}): '{1}'".format(
                e.errno, e.strerror))
            return

        if self.client is None:
            log.err("output_influx: cannot instantiate client!")
            return

        if (CONFIG.has_option('output_influx', 'username') and
                CONFIG.has_option('output_influx', 'password')):
            username = CONFIG.get('output_influx', 'username')
            password = CONFIG.get('output_influx', 'password', raw=True)
            self.client.switch_user(username, password)

        try:
            dbname = CONFIG.get('output_influx', 'database_name')
        except Exception:
            dbname = 'cowrie'

        retention_policy_duration_default = '12w'
        retention_policy_name = dbname + "_retention_policy"

        if CONFIG.has_option('output_influx', 'retention_policy_duration'):
            retention_policy_duration = CONFIG.get(
                'output_influx', 'retention_policy_duration')

            match = re.search(r'^\d+[dhmw]{1}$', retention_policy_duration)
            if not match:
                log.err(("output_influx: invalid retention policy."
                         "Using default '{}'..").format(
                    retention_policy_duration))
                retention_policy_duration = retention_policy_duration_default
        else:
            retention_policy_duration = retention_policy_duration_default

        database_list = self.client.get_list_database()
        dblist = [str(elem['name']) for elem in database_list]

        if dbname not in dblist:
            self.client.create_database(dbname)
            self.client.create_retention_policy(
                retention_policy_name, retention_policy_duration, 1,
                database=dbname, default=True)
        else:
            retention_policies_list = self.client.get_list_retention_policies(
                database=dbname)
            rplist = [str(elem['name']) for elem in retention_policies_list]
            if retention_policy_name not in rplist:
                self.client.create_retention_policy(
                    retention_policy_name, retention_policy_duration, 1,
                    database=dbname, default=True)
            else:
                self.client.alter_retention_policy(
                    retention_policy_name, database=dbname,
                    duration=retention_policy_duration,
                    replication=1, default=True)

        self.client.switch_database(dbname)

    def stop(self):
        pass

    def write(self, entry):
        if self.client is None:
            log.err("output_influx: client object is not instantiated")
            return

        # event id
        eventid = entry['eventid']

        # measurement init
        m = {
            'measurement': eventid.replace('.', '_'),
            'tags': {
                'session': entry['session'],
                'src_ip': entry['src_ip']
            },
            'fields': {
                'sensor': self.sensor
            },
        }

        # event parsing
        if eventid in ['cowrie.command.failed',
                       'cowrie.command.input']:
            m['fields'].update({
                'input': entry['input'],
            })

        elif eventid == 'cowrie.session.connect':
            m['fields'].update({
                'protocol': entry['protocol'],
                'src_port': entry['src_port'],
                'dst_port': entry['dst_port'],
                'dst_ip': entry['dst_ip'],
            })

        elif eventid in ['cowrie.login.success', 'cowrie.login.failed']:
            m['fields'].update({
                'username': entry['username'],
                'password': entry['password'],
            })

        elif eventid == 'cowrie.session.file_download':
            m['fields'].update({
                'shasum': entry.get('shasum'),
                'url': entry.get('url'),
                'outfile': entry.get('outfile')
            })

        elif eventid == 'cowrie.session.file_download.failed':
            m['fields'].update({
                'url': entry.get('url')
            })

        elif eventid == 'cowrie.session.file_upload':
            m['fields'].update({
                'shasum': entry.get('shasum'),
                'outfile': entry.get('outfile'),
            })

        elif eventid == 'cowrie.session.closed':
            m['fields'].update({
                'duration': entry['duration']
            })

        elif eventid == 'cowrie.client.version':
            m['fields'].update({
                'version': ','.join(entry['version']),
            })

        elif eventid == 'cowrie.client.kex':
            m['fields'].update({
                'maccs': ','.join(entry['macCS']),
                'kexalgs': ','.join(entry['kexAlgs']),
                'keyalgs': ','.join(entry['keyAlgs']),
                'compcs': ','.join(entry['compCS']),
                'enccs': ','.join(entry['encCS'])
            })

        elif eventid == 'cowrie.client.size':
            m['fields'].update({
                'height': entry['height'],
                'width': entry['width'],
            })

        elif eventid == 'cowrie.client.var':
            m['fields'].update({
                'name': entry['name'],
                'value': entry['value'],
            })

        elif eventid == 'cowrie.client.fingerprint':
            m['fields'].update({
                'fingerprint': entry['fingerprint']
            })

            # cowrie.direct-tcpip.data, cowrie.direct-tcpip.request
            # cowrie.log.closed
            # are not implemented
        else:
            # other events should be handled
            log.err(
                "output_influx: event '{}' not handled. Skipping..".format(
                    eventid))
            return

        result = self.client.write_points([m])

        if not result:
            log.err("output_influx: error when writing '{}' measurement"
                    "in the db.".format(eventid))
Esempio n. 53
0
        except Exception as e:
            print('Could not parse value for key ' + key)
            print(e)
            if key_type == "string":
                fields[key] = ''
            elif key_type == "float":
                fields[key] = 0.0
            elif key_type == "int":
                fields[key] = 0
    return fields


client = InfluxDBClient(INFLUX_HOST, INFLUX_PORT, INFLUX_USER, INFLUX_PW,
                        INFLUX_DB)

client.create_database(INFLUX_DB)

c = Consumer({
    'bootstrap.servers': KAFKA_BOOTSTRAP,
    'group.id': KAFKA_GROUP_ID,
    'default.topic.config': {
        'auto.offset.reset': 'smallest'
    },
    'max.poll.interval.ms': 600000
})

#c.assign(TopicPartition('0', 0))

c.subscribe([KAFKA_TOPIC])
running = True
while running:
class InfluxDB:
    """
    This class will support InfluxDB database connections and operations.
    It includes methods to build the schema proposed to store on the database.
    """
    db_client = None
    table_results = None

    def __init__(self, setting):

        self.host = setting.database_info['host']
        self.port = setting.database_info['port']
        self.database = setting.database_info['database']

        self.local_connection = setting.database_info['local_connection'].lower() \
            in ['true', 'yes', 'y', '1']

        if not self.local_connection:
            self.username = setting.database_info['username']
            self.password = setting.database_info['password']

            self.ssl = setting.database_info['ssl'].lower() in [
                'true', 'yes', 'y', '1'
            ]

            self.verify_ssl = setting.database_info['verify_ssl'].lower() \
                in ['true', 'yes', 'y', '1']

        self.verbose_status = setting.verbose_status

        self.setting = setting

    def open_connection(self):
        """
        Method in charge of the connection creation with the InfluxDB database.
        In a local environment, it will remove the database content and
        tables related to the application to keep test consistency.
        :return:
        """

        if self.local_connection:
            # Local connection
            self.db_client = InfluxDBClient(self.host, self.port,
                                            self.database)

            # Begin - This will happen only on local execution
            if {'name': self.database} in self.db_client.get_list_database():
                self.db_client.delete_series(self.database,
                                             'telemetry_summary')
            else:
                self.db_client.create_database(self.database)
            # End - This will happen only on local execution

        else:
            # Remote connection
            self.db_client = InfluxDBClient(self.host,
                                            self.port,
                                            username=self.username,
                                            password=self.password,
                                            database=self.database,
                                            ssl=self.ssl,
                                            verify_ssl=self.verify_ssl)
            self.db_client.create_database(self.database)

        # It assures to be pointing to the target table after the connection
        self.db_client.switch_database(self.database)

    def fetch_results(self):
        """
        Method in charge of retrieving all the information
        from the database given the user's specification
        :return:
        """

        self.table_results = \
            self.db_client.query('SELECT * '
                                 'FROM "' + self.database + '"."autogen"."telemetry_summary"')

        # It sorts/organizes the elements got from the database
        if not isinstance(self.table_results, list):
            # It creates a list based on the content retrieved from the database
            self.table_results = list(
                self.table_results.get_points(measurement='telemetry_summary'))

            # InfluxDB uses its tags as keys on its built-in sorting process
            # Switch ID tag is a string and intervenes on this process.
            # In order to get the records in a proper order based on their
            # insertion, we have to sort them based on the meta sequence number.
            self.table_results = sorted(self.table_results,
                                        key=lambda val:
                                        (val['time'], val['service_id']))
        else:
            # It creates a list based on the content retrieved from the database
            tmp = list()
            for result in self.table_results:
                tmp.append(
                    list(result.get_points(measurement='telemetry_summary')))

            self.table_results = copy.deepcopy(tmp)
            tmp.clear()

        # It returns/visualizes the records resultant content
        for table_result in self.table_results:
            print(table_result)

    def save_to_database(self, json_structure):
        """
        Method in charge of the JSON structure
        storing process to the database
        :param json_structure:
        :return: None
        """

        # It saves the information to the database
        written = self.db_client.write_points(json_structure)

        if self.verbose_status:
            if written:
                print("Packet information saved to the database!")
            else:
                print("Errors saving the information to the database!")

    def save_to(self, data):
        """
        Method that supports the schema instantiation
        to save teh JSON Dataset.
        :param data: JSON data set with the telemetry information
        :return: None
        """
        try:
            self.save_to_database(data)

            if self.verbose_status:
                # It will visualize all the information on the console
                self.fetch_results()

        except ValueError as value_error:
            raise ValueError from value_error
Esempio n. 55
0
def influxDB(org_id, contact, msg, sendtype="mail"):
    client = InfluxDBClient(config.get("influxdb", "server"), config.get("influxdb", "port"), \
      config.get("influxdb","user"), config.get("influxdb", "passwd"), config.get("influxdb","database"), timeout=2)
    try:
        client.create_database(config.get("influxdb", "database"))
    except:
        return False

    try:
        duration = msg["数据"][0]["故障时长"].split(">")[1].split("<")[0]
    except:
        return ("get duration failed")

    try:
        duration = convertDuration(duration)
    except:
        duration = 0.0
    if "IP" in msg:
        name = msg['IP'].split("\n")
    else:
        name = msg['名称'].split("\n")

    ownerlist = sorted(contact.replace(mail_suffix, "").split(","))
    owner = ",".join(ownerlist)

    for item in name:
        json_body = [{
            "measurement": "alert",
            "tags": {
                "org": org_id,
                "sendtype": sendtype,
                "subject": msg['主题'],
                "owner": owner,
                "alerttype": msg['类型'],
                "status": status,
                "name": item,
                "level": msg['严重性']
            },
            "fields": {
                "value": 1,
                "duration": duration,
            }
        }]
        filter(json_body, client)

    # 以个人为单位统计报警量
    for item in ownerlist:
        json_body = [{
            "measurement": "ownercount",
            "tags": {
                "org": org_id,
                "owner": item,
                "sendtype": sendtype
            },
            "fields": {
                "value": 1
            }
        }]
        client.write_points(json_body)

    # 报警量统计(以'\n'拆分name之后,导致发送量统计增多,因此这里单独把发送量拿出来统计)
    json_body = [{
        "measurement": "sendcount",
        "tags": {
            "org": org_id,
            "sendtype": sendtype
        },
        "fields": {
            "value": 1
        }
    }]
    client.write_points(json_body)
Esempio n. 56
0
class TweetsProducer(KafkaProducer):
    '''
    Tweets Producer class inheriting from the KafkaProducer class to
    facilitate connection and interaction with a Kafka broker.

    This class fetches a continuous stream of tweets from Twitter's API
    and sends the text of these tweets into a Kafka topic for further processing.
    
    Also it connects to InfluxDB as time-series database to store some 
    meta-data from these tweets.
    '''
    def __init__(self, topic, *args, **kwargs):
        self.topic = topic

        self.influxdb_host = kwargs.pop('influxdb_host', 'localhost')
        self.influxdb_port = kwargs.pop('influxdb_port', 8086)
        self.influxdb_database = kwargs.pop('influxdb_database', None)
        self.influxdb_client = InfluxDBClient(host=self.influxdb_host,
                                              port=self.influxdb_port,
                                              username='******',
                                              password='******',
                                              database=self.influxdb_database)
        self.influxdb_client.create_database(self.influxdb_database)

        super().__init__(*args, **kwargs)

    def produce(self, stream_url, params, auth):
        """
        Stream a 1% sample from worldwide real-time tweets
        See Twitter Labs sample-stream docs for more details
        """
        response = requests.get(url=stream_url,
                                params=params,
                                auth=auth,
                                stream=True)

        for line in response.iter_lines():
            if line and line != b'Rate limit exceeded':
                line = json.loads(line)

                # Storing tweets' language
                data_point = [{
                    # "timestamp":
                    "measurement": self.influxdb_database,
                    "tags": {
                        "language": line['data']['lang'],
                    },
                    "fields": {
                        "id": line['data']['id']
                    }
                }]

                if geo_data := line.get('includes', {}).get('places'):
                    # If tweet is tagged in a specific location
                    if coordinates := line['data'].get('geo', {}).get(
                            'coordinates', {}).get('coordinates'):
                        lon, lat = coordinates
                    # Else use the generic location of the tweet
                    else:
                        lon1, lat1, lon2, lat2 = geo_data[0]['geo']['bbox']
                        lon = (lon1 + lon2) / 2
                        lat = (lat1 + lat2) / 2

                    # Set country_code and place_name as tags so that we can
                    # filter and group by these values
                    data_point[0]["tags"]["country_code"] = geo_data[0][
                        "country_code"]
                    data_point[0]["tags"]["place_name"] = geo_data[0][
                        "full_name"]

                    # Latitudes and Longitudes should be set as fields to be
                    # retrieved with a SELECT statement
                    data_point[0]["fields"]["latitude"] = lat
                    data_point[0]["fields"]["longitude"] = lon

                try:
                    self.influxdb_client.write_points(data_point)
                    logging.info("Successfully stored ID '{}'.".format(
                        line['data']['id']))
                except (InfluxDBServerError, InfluxDBClientError) as e:
                    logging.info("Failed at storing ID '{}'. Error: {}".format(
                        line['data']['id'], e))

                # Queueing tweets into Kafka for further processing
                if line['data']['lang'] == 'en':
                    self.send(
                        self.topic,
                        json.dumps({
                            'id': line['data']['id'],
                            'tweet': line['data']['text']
                        }).encode())

                    logging.info("Queued tweet '{}'.".format(
                        line['data']['id']))
import openpyxl, serial
import numpy as np
from influxdb import InfluxDBClient
from time import time
from time import sleep

# Configuración Base de Datos
client = InfluxDBClient(host='localhost',
                        port=8086,
                        username='******',
                        password='******')
client.create_database('Lighting')

mn1 = 'Lamp1'
mn2 = 'Lamp2'
mn3 = 'Lamp3'
mn4 = 'Lamp4'
mn5 = 'Lamp5'
mn6 = 'Lamp6'
mn7 = 'Lamp7'
mn8 = 'Lamp8'
mn9 = 'Lamp9'
mn10 = 'Window1'
mn11 = 'Window2'

data = []

# Cargar constantes de luminarias
doc = openpyxl.load_workbook('Constantes2.xlsx')
hoja = doc.get_sheet_by_name('Hoja1')
kij = np.zeros((9, 9))
Esempio n. 58
0
#main
if __name__ == '__main__':
  parser = get_args_parser()
  args = parser.parse_args()
  if args.help:
    parser.print_help()
    parser.exit()
  try:
  	db = InfluxDBClient(args.host,args.port,args.user,args.password)
  	db.get_database_list()
  except Exception, err:
    logging.exception(err)
    print err
    sys.exit()
  try:
    db.create_database(args.database)
  except:
    print "Error creating database"
  db.switch_db(args.database)
  #Generate dummy data
  backMilliseconds = 86000 * 1
  startTime = int(datetime.datetime.now().strftime('%s')) - backMilliseconds
  timeInterval = 60 * 1
  eventTypes = ["click", "view", "post", "comment"]
  cpuSeries = {
    'name': 'cpu_idle',
    'columns': ['time','value','value1','value2','value3','host'],
    'points': []
  }
  eventSeries = {
    'name': "customer_events",
Esempio n. 59
0
def send_influx_metrics(freezer):
    try:
        current_time = datetime.datetime.utcnow()

        if freezer.COMP_STATE == 1:
            COMP_STATE = "ON"
        else:
            COMP_STATE = "OFF"

        logger.info("Sending metrics to InfluxDB")
        client = InfluxDBClient(host=config.INFLUXDB["HOST"], port=8086, database='freezer', timeout=5)
        json_body = [
            {
                "measurement": "temperature",
                "tags": {
                    "type": "i2c",
                    "environment": config.ENVIRONMENT,
                },
                "time": current_time,
                "fields": {
                    "value": freezer.TEMP1
                }
            },
            {
                "measurement": "temperature",
                "tags": {
                    "type": "1w",
                    "environment": config.ENVIRONMENT,
                },
                "time": current_time,
                "fields": {
                    "value": freezer.TEMP2
                }
            },
            {
                "measurement": "humidity",
                "tags": {
                    "type": "i2c",
                    "environment": config.ENVIRONMENT,
                },
                "time": current_time,
                "fields": {
                    "value": freezer.HUMIDITY
                }
            },
            {
                "measurement": "compressor",
                "tags": {
                    "environment": config.ENVIRONMENT,
                    "state": COMP_STATE
                },
                "time": current_time,
                "fields": {
                    "value": 1
                }
            }
        ]
        client.create_database("freezer")
        client.write_points(json_body)

    except Exception as e:
        logger.error("An ERROR occured while sending metrics to InfluxDB")
        print(e)
        pass
Esempio n. 60
0
#!/usr/bin/env python3
from influxdb import InfluxDBClient
'''
InfluxDB database initialization
'''

client = InfluxDBClient('localhost', 8086, 'root', 'root', 'racktor')
client.create_database('racktor')
client.create_user('racktor', 'racktor')
client.grant_privilege('all', 'racktor', 'racktor')
client.close()