def insertIntoDB(message): client = InfluxDBClient(host='127.0.0.1', port=8086, username='******', password='******', database='dizzyAI') print(client.get_list_database()) #显示所有数据库名称 result = client.query("show measurements") #显示数据库中的表 print(result) json_body = [ { "measurement": "leave_list", "tags": { "staff": "001" }, # "time": datetime.datetime.now(), "fields": message # "fields": { # "value": 0.64 # } } ] print(json_body) client.write_points(json_body) result2 = client.query("select * from leave_list") print("Result: {0}".format(result2)) client.close()
class DataGetter: def __init__(self, setting_file="db_settings.json"): """Load the settings file and connect to the DB""" # Get the current folder current_script_dir = "/".join(__file__.split("/")[:-3]) path = current_script_dir + "/" + setting_file logger.info("Loading the DB settings from [%s]" % path) # Load the settings with open(path, "r") as f: self.settings = json.load(f) logger.info( "Conneting to the DB on [{host}:{port}] for the database [{database}]" .format(**self.settings)) # Create the client passing the settings as kwargs self.client = InfluxDBClient(**self.settings) self.dfclient = DataFrameClient(**self.settings) def __del__(self): """On exit / delation close the client connetion""" if "client" in dir(self): self.client.close() def exec_query(self, query): # Construct the query to workaround the tags distinct constraint query = query.replace("\\", "\\\\") logger.debug("Executing query [%s]" % query) result = self.client.query(query, epoch="s") if type(result) == list: return [list(subres.get_points()) for subres in result] return list(result.get_points()) def get_measurements(self) -> List[str]: """Get all the measurements sul DB""" result = [x["name"] for x in self.client.get_list_measurements()] logger.info("Found the measurements %s" % result) return result def drop_measurement(self, measurement: str) -> None: self.client.drop_measurement(measurement) def write_dataframe(self, df, measurement, tags): self.dfclient.write_points(df, measurement, tags, time_precision="s") def get_tag_values(self, tag, measurement=None, constraints: dict = None): if measurement: query = """SHOW TAG VALUES FROM "{measurement}" WITH KEY = "{tag}" """.format( measurement=measurement, tag=tag) else: query = """SHOW TAG VALUES WITH KEY = "{tag}" """.format(tag=tag) if constraints: query += " WHERE " + " AND ".join("%s = '%s'" % v for v in constraints.items()) return [x["value"].strip("'") for x in self.exec_query(query)]
class BursaHistoricalDataPipeline: def __init__(self, host, user, password, port=8086): self.dbname = 'historical_data' self.client = InfluxDBClient(host, port, user, password, self.dbname) self.client.create_database(self.dbname) self.client.switch_database(self.dbname) def process_item(self, item, spider): if isinstance(item, HistoricalDataItem): json_body = historicalDataItemToInfluxJson(item) try: self.client.write_points(json_body, time_precision='ms') except influxdb.exceptions.InfluxDBServerError as e: logging.error("Item Failed to Write to Influxdb (Stockcode): " + str(item['stockcode'])) return item return item def spider_closed(self): self.client.close() @classmethod def from_crawler(cls, crawler): db_settings = crawler.settings.getdict("INFLUXDB_SETTINGS") if not db_settings: # if we don't define db config in settings raise NotConfigured # then reaise error host = db_settings['host'] port = db_settings['port'] user = db_settings['user'] password = db_settings['password'] return cls(host, user, password, port=port)
class InfluxPublisher(AbstractContextManager): """ Manage the publication of data points to Influxdb. Synchronous and potentially lossy method of publishing points via InfluxDBClient. """ def __init__(self, credentials: str) -> None: self._client = InfluxDBClient(**read_credentials(credentials)) def publish(self, js: measureT) -> None: """ Publish a metric to the configured Influx DB. Args: js: the data to publish (a list of measurements). Returns: Nothing. """ self._client.write_points(js) def __enter__(self) -> 'InfluxPublisher': return self def __exit__(self, *args: Any) -> None: self._client.close()
def stats(): client = InfluxDBClient(host=secrets.influxhost) client.switch_database(secrets.influxdb) loop = PAYMENTS while True: data = [] tmpData = getRigData(accounts=secrets.accounts) if tmpData is not None: data = data + tmpData if loop == PAYMENTS: tmpData = getPaymentData(accounts=secrets.accounts) if tmpData is not None: data = data + tmpData loop = 0 loop = loop + 1 tmpData = getOctuneData(octuneAddresses=secrets.octuneAddresses) if tmpData is not None: data = data + tmpData print(data) try: client.write_points(data, time_precision='ms', protocol='line') except Exception as inst: print(inst) finally: client.close() time.sleep(SLEEP_TIME)
class Database: def __init__(self, host, port, dbName): self.host = host self.port = port self.client = InfluxDBClient(self.host, self.port) self.dbName = dbName try: self.client.create_database(dbName) print("Database " + dbName + " created...") except InfluxDBClientError: self.client.drop_database(dbName) self.client.create_database(dbName) def log(self, measurement, data): row = [ { "measurement": measurement, "time": datetime.datetime.now(), "fields": { "reading": data } } ] self.client.write_points(row, database=self.dbName) def close(self): self.client.close()
def create_db(self,host, port, user, password, dbname): client = InfluxDBClient(host, port, user, password, dbname) logger.debug("self ="+str(self)) try: client.drop_database(dbname) except Exception as e: print("didnt drop old db") logger.error("Didn't drop old db:"+str(e)) return False try: client.create_database(dbname) except Exception as e: print("db not created - {0}".format(e)) return False except: print("unknown error openning db") return False print("db created") try: client.create_retention_policy('infinite retention', 'INF', 3, default=True) except Exception as e: print("retention policy not set - {0}".format(e)) logger.error("retention policy not set - {0}".format(e)) return False except: print("unknown error openning db") return False client.close() return True
def get_historical_data(offset_in_minutes): logger.info( f"querying influxdb to get historical data for {offset_in_minutes} minutes ago" ) query = f"SELECT * FROM /{INFLUX_TOTAL_ENRGY_MEASUREMENTS}/ \ WHERE \ time >= now()-{offset_in_minutes+1}m \ AND \ time < now()-{offset_in_minutes-1}m;" client = InfluxDBClient(**INLUX_DB) result = client.query(query) points = result.get_points() # points is generator of dicts as {'time': '2021-07-03T16:01:24.519000Z', 'item': 'l2TotalEnergy', 'value': 1250.38} # based on item naming the second char (index=1) is the meter id: 2 in the example above results_dict = {int(p.get('item')[1]): p.get('value') for p in points} # below condition handles the very first run when no historical data exists in influxdb if not results_dict: total_energy_items_names = INFLUX_TOTAL_ENRGY_MEASUREMENTS.split( '|') results_dict = {int(p[1]): 0 for p in total_energy_items_names} for key, value in results_dict.items(): logger.info(f"for line '{key}' reading was {value} kWh") client.close() return results_dict
class InfluxItem(BasicSql): def __init__(self, kwargs): ip = kwargs['addr'].split(':')[0] port = kwargs['addr'].split(':')[1] self.client = InfluxDBClient(host=ip, port=port, database=kwargs['database'], username=kwargs['user'], password=kwargs['password']) self.jsonbody = { "measurement": kwargs['table'], "tags": {}, "fields": {} } self.coltype = self.detect_data(kwargs['col']) def detect_data(self, col): ''' :param col: :return: 1, cols = {'tags': xx, 'field':xx}; 2, without identify 'tags' and 'fields' ''' a = 0 if col.get("tags"): a += 1 if col.get("fields"): a += 1 if a == 2: self.jsonbody['tags'] = col['tags'] self.jsonbody['fields'] = col['fields'] return 1 elif a == 0: # digit default fields, rest are tags if not defined for j, k in col.items(): if isinstance(k, int) or isinstance(k, float): self.jsonbody['fields'][j] = k else: self.jsonbody['tags'][j] = k return 2 else: print('Missing one of tags/fields') def init_db(self): pass def insert(self, value): if self.coltype == 1: self.jsonbody['tags'] = value['tags'] self.jsonbody['fields'] = value['fields'] elif self.coltype == 2: for j in self.jsonbody['tags'].keys(): self.jsonbody['tags'][j] = value.get(j) for j in self.jsonbody['fields'].keys(): self.jsonbody['fields'][j] = value.get(j) self.client.write_points([self.jsonbody]) def close(self): self.client.close()
def init_db(): # mysql 配置表初始化 from models.base import Meta from models.event_config import EventConfig Meta.metadata.create_all() # influxdb 初始化 database = INFLUXDB_DATABASE_NAME client = InfluxDBClient(**INFLUXDB_CONFIG, database=database) # 创建数据库 watchdog client.create_database(database) # 创建 Retention Policy: # 1. 采样数据保留策略,2w,默认保留策略 client.create_retention_policy('rp_2_weeks', '2w', '1', database=database, default=True) # 2. 统计数据保留策略,5w client.create_retention_policy('rp_5_weeks', '5w', '1', database=database) # 3. 统计数据保留策略,26w client.create_retention_policy('rp_26_weeks', '26w', '1', database=database) # 创建 Continuous Query: # time 字段使用的是最小的时间 # 比如:某一个分组 [2018-01-01T00:00:00Z, 2018-01-01T00:05:00Z) # 自动生成的 time = 2018-01-01T00:00:00Z cq1 = (f'CREATE CONTINUOUS QUERY cq_5_minutes ON {database} BEGIN ' 'SELECT sum(value) AS value, last(end_time) AS end_time ' 'INTO rp_5_weeks.eventLog FROM rp_2_weeks.sampledLog ' 'GROUP BY event_key, time(5m) ' 'END') client.query(cq1, method='POST') client.close()
def run(self): a = 0 client = InfluxDBClient('192.168.0.3', 8086, 'root', '123456', database='t') while True: time.sleep(0.5) localtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) w_json = [{ "measurement": 'tqazx', "time": str(localtime), "tags": { 'name': 1, 'categories': 2 }, "fields": { 'price': 1, 'unit': 1, 'plp': 4.9 } }] client.write_points(w_json) print("hello world") print(client) a = a + 1 if a > 1220: client.close() break
def get_session_db(): client = InfluxDBClient(host='localhost', port=8086) client.switch_database('influxdb-test') try: yield client finally: client.close()
def insertar_temperatura(): client = InfluxDBClient(host='localhost', port=8086) client.switch_database('temperaturas') nok = True while nok: try: sensor = W1ThermSensor() temperature = sensor.get_temperature() nok = False except: nok = True fecha = time.asctime() print("The temperature at: {} is {} celsius".format(fecha, temperature)) json_body = [{ "measurement": "tempEvents", "tags": { "user": "******", "sensorID": "28-" + sensor.id, "site": "home-sitting room" }, "time": fecha, "fields": { "temperatura": temperature } }] client.write_points(json_body) client.close()
def sendData(self): client = InfluxDBClient(**self.connectionCredentials) while True: d = self.dataPackedQueue.get() if d is None: break dataToSend, rp = d logger.debug(f"Sending {len(dataToSend)} values") try: assert (client.write_points(dataToSend, time_precision='u', retention_policy=rp, batch_size=10000)) except: logger.error("Exception when saving the data", exc_info=True) # If the batch fails try to send the values one by one for value in dataToSend: try: assert (client.write_points([value], time_precision='u', retention_policy=rp)) except: logger.error(f"Ignoring value: {value}") client.close()
def query_tsdb(self, sensor_name): client = InfluxDBClient(cbpi.cache["config"]["influx_db_address"], cbpi.cache["config"]["influx_db_port"], cbpi.cache["config"]["influx_db_username"], cbpi.cache["config"]["influx_db_password"], cbpi.cache["config"]["influx_db_database_name"]) query_prefix = 'select mean(value) from cbpi where time > now() - ' + \ str(cbpi.cache["config"]["influx_db_start_relative"].__dict__["value"]) + \ 'd and \"name\" = \'' + sensor_name + '\'' query_suffix = ' group by time(' +\ str(cbpi.cache["config"]["influx_db_sampling_value"].__dict__["value"]) +\ 's) fill(previous)' if cbpi.cache["active_brew"] != "none": query = query_prefix + ' and brew = \'' + cbpi.cache["active_brew"] + '\'' + query_suffix else: query = query_prefix + query_suffix self.logger.debug("query: %s", query) result = client.query(query, epoch="ms") client.close() try: values = result.raw['series'][0]['values'] self.logger.debug("Time series for [%s] is [%s]", sensor_name, values) return values except: self.logger.warning("Failed to fetch time series for [%s]", sensor_name)
class Connection(): # Load config from config.yml def __init__(self, database=None): with open('config.yml') as f: config = load(f.read(), Loader=Loader) if 'influxdb' not in config.keys(): raise ConfigError( logger.error( 'InfluxDB config not found in config.yml, please make sure all required InfluxDB config is included.' )) # Set default values if they are not present in the config self.host = 'localhost' if 'host' not in config['influxdb'].keys( ) else config['influxdb']['host'] self.port = 8086 if 'port' not in config['influxdb'].keys( ) else config['influxdb']['port'] self.username = None if 'username' not in config['influxdb'].keys( ) else config['influxdb']['username'] self.password = None if 'password' not in config['influxdb'].keys( ) else config['influxdb']['password'] self.database = database # Context manager for opening the connection def __enter__(self): if self.username: logger.debug( f'Opening database connection to InfluxDB host {self.username}@{self.host}:{self.port}.' ) else: logger.debug( f'Opening database connection to InfluxDB host {self.host}:{self.port}.' ) try: if self.username is None and self.password is None: self.client = InfluxDBClient(host=self.host, port=self.port, database=self.database) else: self.client = InfluxDBClient(host=self.host, port=self.port, username=self.username, password=self.password, database=self.database) except: logger.error( f'Unable to open connection to {self.host}:{self.port}') raise if self.database: self.client.create_database(self.database) return self.client # Close the connection def __exit__(self, exception_type, exception_value, traceback): logger.debug(f'Closing connection to {self.host}:{self.port}') self.client.close()
async def publish_values(): db = None while True: try: pm = PowerMaster(agent_url) db = InfluxDBClient(database="ups") active = False data = {} async for change in pm.subscribe(): if not active: active = True log("connected") data = deep_update(data, change) log(data) measurements = create_measurements(data['status']) db.write_points(measurements) except Exception as e: print(type(e)) print(e) try: db.close() except: pass await asyncio.sleep(3)
def setInfoInfluxDB(id,temperature,node): influxclient = InfluxDBClient('localhost', 8086, 'chengym', 'mylove093196', 'serverlist') try: time_array = time.localtime(time.time()) other_way_time = time.strftime("%Y-%m-%d %H:%M:%S", time_array) json_body = [ { "measurement": "data_collection_temperature", "tags": { "id":id, "node": node }, "time": other_way_time, "fields": { "temperature": temperature, "time": other_way_time } } ] influxclient.write_points(json_body) # 写入数据,同时创建表 print("写入成功") logging.info(json_body) except Exception as e: print(e) logging.error("写入influxdb错误") finally: influxclient.close()
def get(self, db): """Get Monthly Consumption """ client = InfluxDBClient('34.243.186.74', 8086, 'root', 'root', db) result = client.query( "SELECT INTEGRAL(value,1h)/1000 AS value_1m FROM data_monitor WHERE (time >= 1524672000000000000 AND (measure_type = 'RealP1')) GROUP BY time(30d)" ) listperhour = list(result.get_points()) somme = 0 for i in listperhour: somme = somme + i["value_1m"] print(somme) date = datetime.datetime.strptime(listperhour[-1]["time"], '%Y-%m-%dT%H:%M:%SZ').date() print( datetime.datetime.strptime(i["time"], '%Y-%m-%dT%H:%M:%SZ').date(), datetime.datetime.strptime(i["time"], '%Y-%m-%dT%H:%M:%SZ').date()) response = jsonify({ "somme": math.ceil(somme * 100) / 100, "date": date }) response.headers.add('Access-Control-Allow-Origin', '*') # close connection when finished client.close() return response
def get(self, db): """Returns Real-Time value """ client = InfluxDBClient('34.243.186.74', 8086, 'root', 'root', db) output = [] result = client.query( "SELECT LAST(value) FROM data_monitor WHERE (measure_type = 'RealP1')" ) lastrealp1 = list(result.get_points()) result = client.query( "SELECT LAST(value) FROM data_monitor WHERE (measure_type = 'RealP2')" ) lastrealp2 = list(result.get_points()) result = client.query( "SELECT LAST(value) FROM data_monitor WHERE (measure_type = 'RealP3')" ) lastrealp3 = list(result.get_points()) last = lastrealp1 + lastrealp2 + lastrealp3 somme = sum(item['last'] for item in last) t = lastrealp1[0]['time'] # t = (datetime.datetime.strptime(t, '%Y-%m-%dT%H:%M:%SZ')).time() t = (datetime.datetime.strptime(t, '%Y-%m-%dT%H:%M:%SZ')) output = {"time": str(t), "value": abs(somme)} response = jsonify(output) response.headers.add('Access-Control-Allow-Origin', '*') # close connection when finished client.close() return response
def beat(prev_hour): try: client = InfluxDBClient('influxdb-service', 28086, database='device1') lst = [ int(p['value']) for p in client.query( 'show tag values with key=hour').get_points() ] lst.sort(reverse=True) if not lst: logging.info('there is no tag values with key hour') return if lst[0] == prev_hour: hours = lst[:1] else: hours = lst[:2] prev_hour = lst[0] inserts = [] for h in hours: logging.info('get series on {}'.format(h)) for s in client.get_list_series(database='device1', measurement='device_value', tags={'hour': h}): d = dict(t.split('=') for t in s.split(',')[1:]) d['table'] = 'device_1_{}'.format(d['deviceIdentifier']) inserts.append(d) logging.info('insert {} records'.format(len(inserts))) return inserts, prev_hour except Exception as e: raise e finally: logging.info("close influxdb client connection") client.close()
def main(): # Get arguements args = get_args() # Load configure file config = load_yaml_file(args.config) # We will be running this container in the same docker-compose configuration # as influxdb. To ensure we provide enough time for influxdb to start, # we wait 10 seconds time.sleep(10) # Make a connection to the InfluxDB Database # Create a new database if it doesn't exist influx_client = InfluxDBClient(host=config['InfluxDb']['Host'], port=config['InfluxDb']['Port']) influx_client.create_database(config['InfluxDb']['Database']) influx_client.switch_database(config['InfluxDb']['Database']) # Create a scheduler, and run the poller every 1 minute on the minute scheduler = AsyncIOScheduler() scheduler.add_job(poll, 'cron', minute='*', args=(influx_client, config['Switch'], config['InterfaceDesc'])) scheduler.start() # Execution will block here until Ctrl+C is pressed. try: asyncio.get_event_loop().run_forever() except (KeyboardInterrupt, SystemExit): pass influx_client.close()
def performReadWithConditionTest(self, threadId): client = None threadStartTime = time.time() requestStartTime = None requestEndTime = None totalResponseDuration = None readQuery = None readWithConditionQuery = None writeQuery = None try: requestStartTime = time.time() client = InfluxDBClient(host=self.influxDbConfiguration.host, port=self.influxDbConfiguration.port, username=self.influxDbConfiguration.username, password=self.influxDbConfiguration.password, database=self.influxDbConfiguration.databaseName) client.switch_database(self.influxDbConfiguration.databaseName) client.query(random.choice(self.influxDbConfiguration.readWithConditionQuery)) requestEndTime = time.time() isSuccess = True except Exception: requestEndTime = time.time() isSuccess = False traceback.print_exc() finally: if client != None: client.close() totalResponseDuration = requestEndTime - requestStartTime threadEndTime = time.time() return TestResult(threadId, threadStartTime, threadEndTime, requestStartTime, requestEndTime, totalResponseDuration, isSuccess)
def handle(req): """handle a request to the function Args: req (str): request body """ # Parse NodeMCU data packet into JSON r = json.loads(req) influx_host = os.getenv("influx_host") influx_port = os.getenv("influx_port") influx_db = os.getenv("influx_db") influx_user = get_file("/var/openfaas/secrets/influx-user") influx_pass = get_file("/var/openfaas/secrets/influx-pass") client = InfluxDBClient(influx_host, influx_port, influx_user, influx_pass, influx_db) try: client.create_database(influx_db) except: print("Database {} may already exist", influx_db) points = make_points(r) res = client.write_points(points) client.close() return json.dumps(res)
def run(self): global xh global PZAI d={"a":"1","b":"2","c":"33333"} a=0 client = InfluxDBClient('10.24.10.102', 8086,'root','123456',database='t1') while True: start=time.clock() time.sleep(0.1) localtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) w_json = [{ "measurement": 't16', "time": a, "tags": { 'name': 1, 'categories': 2 }, "fields": ssss }] client.write_points(w_json) end=time.clock() print(str(end-start)+"秒") a=a+1 ui.pc.setText(str(a)) if ts==0: client.close() break
def update_influx_database(measurement, value, tank_name): # log.debug(f'update_influx_database called with (Measurement: {measurement}, Value {value}, Tank Name: {tank_name})') client = InfluxDBClient(system_info.influx_host, system_info.influx_port, system_info.influx_user, system_info.influx_password, system_info.influx_dbname, timeout=2) json_body = [ { "measurement": measurement, "tags": { "tank": tank_name }, "fields": { "value": value } } ] try: client.write_points(json_body) client.close() except (exceptions.InfluxDBClientError, exceptions.InfluxDBServerError) as e: pass # log.warning(f'Failed to Update record in Influx database: {e}') if system_info.redundant_influx: client2 = InfluxDBClient(system_info.influx2_host, system_info.influx2_port, system_info.influx2_user, system_info.influx2_password, system_info.influx2_dbname, timeout=2) try: client2.write_points(json_body) client2.close() except (exceptions.InfluxDBClientError, exceptions.InfluxDBServerError) as e: pass
def create_influx(): client = InfluxDBClient(host=config.influx.host, port=config.influx.port, username=config.influx.user, password=config.influx.password) client.create_database(config.influx.name) client.close()
class InfluxDbPipeline(object): def __init__(self, influx_uri, influx_port, influx_db): self.influx_uri = influx_uri self.influx_port = influx_port self.influx_db = influx_db @classmethod def from_crawler(cls, crawler): return cls(influx_uri=os.getenv('INFLUX_URL', crawler.settings.get('INFLUX_URL')), influx_port=os.getenv('INFLUX_PORT', crawler.settings.get('INFLUX_PORT')), influx_db=os.getenv( 'INFLUX_DATABASE', crawler.settings.get('INFLUX_DATABASE'))) def open_spider(self, spider): self.client = InfluxDBClient(host=self.influx_uri, port=self.influx_port) self.client.create_database(self.influx_db) self.client.switch_database(self.influx_db) def close_spider(self, spider): self.client.close() def process_item(self, item, spider): if item.get('points'): self.client.write_points(item['points']) return item
def _send_to_influxdb(self, data): """Helper to package and send to influxdb""" user = self._config['InfluxDB'].get('User', '') password = self._config['InfluxDB'].get('Password', '') dbname = self._config['InfluxDB'].get('DBname', '') host = self._config['InfluxDB'].get('Host', '') port = 8086 json_body = [{ "measurement": "gardenpi", "time": datetime.utcnow().isoformat(), }] json_body[0]['fields'] = data try: client = InfluxDBClient(host, port, user, password, dbname) client.write_points(json_body) client.close() #DEBUG print(json_body) except requests.exceptions.ConnectionError as err: logging.warning("Unable to post to InfluxDB") logging.warning(err)
def run(self): try: with open('/data/data.json') as data_file: data = json.load(data_file) for action in data: ticker = yf.Ticker(action['sigle']) history = ticker.history() last_quote = (history.tail(1)['Close'].iloc[0]) json_body = [{ "measurement": "cours", "tags": { "nom": action['nom'] }, "fields": { "price": last_quote } }, { "measurement": "patrimoine", "tags": { "nom": action['nom'], }, "fields": { "quantite": action['patrimoine']['quantite'], "prix_revient": action['patrimoine']['prix_revient'] } }] influxdbClient = InfluxDBClient( host=self.influxHost, port=self.influxPort, database=self.influxDatabase) influxdbClient.write_points(json_body) influxdbClient.close() except Exception as e: print(e)
def get_xen_dom_details(self): # heart beat - 1 Minute stats collection # this is exposed as a custom django command that will be executed on server start # vital/management/commands server_configs = config.items('Servers') user = VLAB_User.objects.get(first_name='Cron', last_name='User') for key, server_url in server_configs: server = Xen_Server.objects.get(name=key) try: dom_detail_arr = [] vms = XenServer(key, server_url).get_dom_details(user) for vm in vms: if 'Domain' not in vm['name'] and vm['name'].count('_') == 2: # Parse the data and format the same into a a json # logger.debug('Dom Details : {}'.format(vm)) vm_details = vm['name'].split('_') student = VLAB_User.objects.get(id = vm_details[0]) student_name = '{} {}'.format(student.first_name, student.last_name) if 'b' in vm['state']: vm_state = 'Blocked' elif 'r' in vm['state']: vm_state = 'Running' elif 'p' in vm['state']: vm_state = 'Paused' else: vm_state = 'Unknown' tags = {} tags['host'] = server.name tags['student'] = student_name tags['course'] = Course.objects.get(id = vm_details[1]).name tags['vm_name'] = Virtual_Machine.objects.get(id = vm_details[2]).name tags['state'] = vm_state fields = {} fields['cpu_secs'] = long(vm['cpu_secs']) fields['cpu_per'] = float(vm['cpu_per']) fields['memory'] = long(vm['mem']) fields['mem_per'] = float(vm['mem_per']) fields['vcpus'] = int(vm['vcpus']) fields['networks'] = int(vm['nets']) timestr = datetime.datetime.utcnow().replace(microsecond=0).isoformat() + 'Z' dom_detail = {} dom_detail['measurement'] = 'vm_details' dom_detail['tags'] = tags dom_detail['time'] = timestr dom_detail['fields'] = fields dom_detail_arr.append(dom_detail) if dom_detail_arr: c = InfluxDBClient(host='localhost', port=8086) c.switch_database('xen_dom_stats') c.write_points(dom_detail_arr) c.close() except Exception as e: logger.error(key+ ' ' + str(e))
class InfluxDB(object): def __init__(self): """Establish a connection to the InfluxDB.""" self.connect() def connect(self): self.client = InfluxDBClient(host='127.0.0.1', port=8086, username='******', password='******', database='mydb') def send_data(self, json_body): self.client.write_points(json_body) def close(self): self.client.close()
def send_stats_to_influxdb(self, server): """ Send Stats to InfluxDB for Grafana Visualization :param server: server instance with all the values from the xen Machines """ timestr = datetime.datetime.utcnow().replace(microsecond=0).isoformat() + 'Z' json_body = [ { "measurement": "used_memory", "tags": { "host": server.name }, "time": timestr, "fields": { "value": server.used_memory } }, { "measurement": "no_of_students", "tags": { "host": server.name }, "time": timestr, "fields": { "value": server.no_of_students } }, { "measurement": "no_of_courses", "tags": { "host": server.name }, "time": timestr, "fields": { "value": server.no_of_courses } }, { "measurement": "no_of_vms", "tags": { "host": server.name }, "time": timestr, "fields": { "value": server.no_of_vms } }, { "measurement": "utilization", "tags": { "host": server.name }, "time": timestr, "fields": { "value": round(server.utilization, 5) } }, { "measurement": "status", "tags": { "host": server.name }, "time": timestr, "fields": { "value": server.status } } ] c = InfluxDBClient(host='localhost', port=8086) c.switch_database('xen_stats') c.write_points(json_body) c.close()