def update_pytomo_rrd(self): '''Insert data from the list of tuples (timestamp, parameter1, ...) to the rrd. ''' if not self.has_values: config_pytomo.LOG.warn('RRD data update aborted') return 1 # insert into rrd all the values for the extracted parameters to plot # data[][TIMESTAMP_POSITION] is the timestamp # data[][:TIMESTAMP_POSITION] represents the parameters to plot for row in self.data: # transform timestamp to epoch in local time # TODO: check problems related to timezone timestamp = row[TIMESTAMP_POSITION] parameter_values = row[:TIMESTAMP_POSITION] # function used in order to take advantage of the * operator and # retrieve all the elements of an argument identity = lambda *x: x try: rrdtool.update(self.rrd_file, update_data_types(parameter_values) % identity(lib_database.time_to_epoch(timestamp), *format_null_values(*parameter_values))) except rrdtool.error, mes: config_pytomo.LOG.debug('Could not update the rrd with error' ' %s' % mes) continue #config_pytomo.LOG.debug('Updated rrd data: (%s, %s)' % # (timestamp, str(format_null_values(*parameter_values)))) for index, parameter in enumerate(parameter_values): if parameter is None: self.unknown_values[index] += 1
def reverse_populate(node, conf, archive): """ """ if not hasattr(conf, 'db_model'): print "ERROR: Specified graph is not database-backed!" exit(1) if os.path.isfile(archive): print "ERROR: RRD exists for given graph. Please remove it before proceeding." exit(1) items = conf.db_model.objects.filter(node=node).order_by("timestamp") RRA.create(conf, archive, start=int(time.mktime(items[0].timestamp.timetuple())) - 10) for item in items: values = [] for x in conf.sources: v = str(getattr(item, x.name)) values.append(v if v is not None else "U") rrdtool.update( archive, "%d:%s" % (int(time.mktime( item.timestamp.timetuple())), ":".join(values)))
def create_rrd(sensor, initial_value = 0): count_rrd = count_rrd_template + str(sensor) + ".rrd" print 'Creating RRD: ' + count_rrd # Create RRD to store counter and consumption: # 1 trigger cycle matches consumption of 1/revs_per_kWh # Counter is GAUGE (kWh) # Consumption is ABSOLUTE (W) # 1 value per minute for 3 days # 1 value per day for 30 days # 1 value per week for 10 years # Consolidation LAST for counter # Consolidation AVERAGE for consumption try: rrdtool.create(count_rrd, '--no-overwrite', '--step', '60', 'DS:counter:GAUGE:86400:0:1000000', 'DS:consum:ABSOLUTE:86400:0:1000000', 'RRA:LAST:0.5:1:4320', 'RRA:AVERAGE:0.5:1:4320', 'RRA:LAST:0.5:1440:30', 'RRA:AVERAGE:0.5:1440:30', 'RRA:LAST:0.5:10080:520', 'RRA:AVERAGE:0.5:10080:520') except Exception as e: print 'Error ' + str(e) if initial_value != 0: update = "N:%.2f:%.0f" % (initial_value, 0) rrdtool.update(count_rrd, update)
def set_polling(self, obj, value): """ Stores the latest value of the metric for the given object. """ cache.set(self._cache_key(obj), value, 7 * 86400) if self.rrd_enabled: filepath = self.rrd_path(obj) if not os.path.exists(filepath): heartbeat = getattr(settings, 'TIMEGRAPH_HEARTBEAT', 300) dirpath = os.path.dirname(filepath) if not os.path.exists(dirpath): os.makedirs(dirpath) rrdtool.create(filepath, 'DS:%s:GAUGE:%s:U:U' % (self.id, heartbeat), 'RRA:AVERAGE:0.5:1:600', 'RRA:AVERAGE:0.5:6:600', 'RRA:AVERAGE:0.5:24:600', 'RRA:AVERAGE:0.5:288:600', 'RRA:MAX:0.5:1:600', 'RRA:MAX:0.5:6:600', 'RRA:MAX:0.5:24:600', 'RRA:MAX:0.5:288:600') # Up to 600d # As rrdupdate manpage says, "using the letter 'N', in which # case the update time is set to be the current time rrdtool.update(filepath, str("N:%s" % value))
def update(self): """Update the historical data.""" f = open(DATA_SOURCE, "r") regexp = re.compile("cpu\s+(\d+)\s+(\d+)\s+(\d+)") line = f.readline() match = regexp.match(line) if not match: raise StatsError("cannot parse " + DATA_SOURCE) # All these values represent an accumulated time since boot. # The unit is jiffies (1/100 seconds). user, nice, system = match.groups() user, nice, system = int(user), int(nice), int(system) regexp = re.compile("cpu\d*") self.cpu_count = 0 for line in f: if regexp.match(line): self.cpu_count += 1 f.close() rrdtool.update(self.database, "--template", "user:nice:system", "N:%d:%d:%d" % (user, nice, system))
def run(self): while self._running: self.bme680.get_sensor_data() self.values['temp'] = self.bme680.data.temperature self.values['tempcpu'] = self.cpu.read_temperature() self.values['humi'] = self.bme680.data.humidity self.values['airpressure'] = self.bme680.data.pressure self.values['lightness'] = self.tsl2561.lux() self.values['airquality'] = self.bme680.data.air_quality_score \ if self.bme680.data.air_quality_score != None else 0 rrd_data = "N:{:.2f}".format(self.values['temp']) + \ ":{:.2f}".format(self.values['tempcpu']) + \ ":{:.2f}".format(self.values['humi']) + \ ":{:.2f}".format(self.values['airpressure']) + \ ":{:.2f}".format(self.values['lightness']) + \ ":{:.2f}".format(self.values['airquality']) + \ ":{}".format(0) + \ ":{}".format(0) + \ ":{}".format(0) + \ ":{}".format(0) Log(rrd_data) rrdtool.update(RRDFILE, "--template", self.rrd_template, rrd_data) for _ in range(50): # interruptible sleep if self._running: time.sleep(1) # brightness control needs higher frequency self.values['lightness'] = self.tsl2561.lux() else: break
def rrd_update(rrdfile, name, value, slope): # fix annoying unicode issues rrdfile = str(rrdfile) dstype = 'GAUGE' if slope == 'zero': dstype = 'ABSOLUTE' # for now don't care about invariants return elif slope == 'both': dstype = 'GAUGE' elif slope == 'positive': dstype = 'COUNTER' token = 'DS:' + name + ':' + dstype + ':60:U:U' if not os.path.exists(rrdfile): logging.info("Creating %s\n", rrdfile) # 1440 is minutes per day # 300 minutes = 5 hours # 30 hours = 1800 minutes rrdtool.create( rrdfile, '--step=20', token, # 1 point at 20s, 900 of them 300m, 5 hours 'RRA:AVERAGE:0.5:1:900', # 3 points @ 20s = 60s = 1m, 30 hours 'RRA:AVERAGE:0.5:3:1800') # no else svalue = str(value) logging.debug("Updating '%s' with value of '%s'", rrdfile, svalue) rrdtool.update(rrdfile, 'N:' + svalue)
def server_resource(): results = [] for server in Server.query.all(): if server.status.status: skip = (('system_load', ".1.3.6.1.4.1.2021.10.1.3.2"), ("idle_cpu_time", ".1.3.6.1.4.1.2021.11.11.0"), ("total_memory", ".1.3.6.1.4.1.2021.4.5.0"), ("total_memory_free", ".1.3.6.1.4.1.2021.4.6.0")) oids = OrderedDict(skip) oids = " ".join(list(oids.values())) cmd = "snmpget -v2c -c public {0} {1} | awk {2}".format(server.ip, oids, "'{print $4}'") r = get_server_resource(cmd) results.append((server, r)) for result in results: server = result[0] r = result[1] r.wait() return_code = r.returncode output = r.stdout.read() if output: output = output.decode('utf-8').splitlines() # print(output) system_load, idle_cpu_time, total_memory, total_memory_free = map(float, output) memory_used_pecentage = int((total_memory - total_memory_free) / total_memory * 100) print(memory_used_pecentage) used_cpu_time = int(100 - idle_cpu_time) db_name = 'rrdtool/' + server.ip + '.rrd' now = datetime.now().timestamp() # print(total_memory, idle_cpu_time, system_load, total_memory_free) # print(db_name, memory_used_pecentage, used_cpu_time, system_load) rrdtool.update(db_name, "{0}:{1}:{2}:{3}".format(now, memory_used_pecentage, used_cpu_time, system_load))
def update_all(): template = "" update = "N:" rpi_t = get_temp_rpi_internal() if rpi_t: template += "rpi:" update += "%f:" % rpi_t temper_t = get_temp_temper_usb() if temper_t: template += "usbtemper:" update += "%f:" % temper_t dht11_t, dht11_h = get_temp_dht11() if dht11_t: template += "dht11temp:" update += "%f:" % float(dht11_t) if dht11_h: template += "dht11hum:" update += "%f:" % float(dht11_h) out_temp = get_temp_outside() if out_temp: template += "outside:" update += "%f:" % out_temp update = update[:-1] template = template[:-1] if args.verbose > 1: print "DEBUG: rrdtool update %s --template %s %s" % (databaseFile, template, update) rrdtool.update(databaseFile, "--template", template, update)
def rrd_update(rrdfile, name, value, slope): # fix annoying unicode issues rrdfile = str(rrdfile) dstype = 'GAUGE' if slope == 'zero': dstype = 'ABSOLUTE' # for now don't care about invariants return elif slope == 'both': dstype = 'GAUGE' elif slope == 'positive': dstype = 'COUNTER' token = 'DS:' + name + ':' + dstype + ':60:U:U' if not os.path.exists(rrdfile): logging.info("Creating %s\n", rrdfile) # 1440 is minutes per day # 300 minutes = 5 hours # 30 hours = 1800 minutes rrdtool.create(rrdfile, '--step=20', token, # 1 point at 20s, 900 of them 300m, 5 hours 'RRA:AVERAGE:0.5:1:900', # 3 points @ 20s = 60s = 1m, 30 hours 'RRA:AVERAGE:0.5:3:1800' ) # no else svalue = str(value) logging.debug("Updating '%s' with value of '%s'", rrdfile, svalue) rrdtool.update(rrdfile, 'N:' + svalue)
def updateInputData(self, inputstr): data = inputstr.split(";") for port in data: port = port.split(":") if len(port) < 2: print("Invalid data:" + str(data)) continue portid = int(port[0]) portvalue = port[1] if self.iomapping.isInputPortDigital(portid): self.digitalinputstate[portid] = int(portvalue) else: self.analoginputstate[portid] = float(portvalue) if len(self.rrdpath) > 0: self.rrdlock.acquire() u = [] for x in self.iomapping.analoginputs: u.append(str(self.analoginputstate[x.id])) updates = ":".join(u) rrdtool.update(self.rrdpath, "N:" + updates) self.rrdlock.release() print("Received new digital and analog input state: %s %s\n" % (str(self.digitalinputstate), str(self.analoginputstate))) self.inputs_last_update = time.time()
def _generate_rrd(self): ''' Create rrd file if rrdfile is None ''' stime = int(time.time()) - 1 * 86400 dpoints = 288 etime = stime + (dpoints * 300) fname = os.path.join(os.path.abspath(os.path.dirname('./')), 'test.rrd') rrdtool.create('test.rrd' , '--start' , str(stime) , "DS:input:COUNTER:600:U:U", "DS:output:COUNTER:600:U:U", "RRA:AVERAGE:0.5:1:600", "RRA:AVERAGE:0.5:6:700", "RRA:AVERAGE:0.5:24:775", "RRA:AVERAGE:0.5:288:797", "RRA:MAX:0.5:1:600", "RRA:MAX:0.5:6:700", "RRA:MAX:0.5:24:775", "RRA:MAX:0.5:444:797",) ctime = stime input = 0 output = 0 for i in xrange(dpoints): input += random.randrange(self.bandwidth / 2, self.bandwidth + self.bandwidth * 2) * 100 output += random.randrange(self.bandwidth / 2, self.bandwidth + self.bandwidth * 2) * 100 ctime += 300 rrdtool.update(fname , '%d:%d:%d' % (ctime , input, output)) return os.path.join(os.path.abspath(os.path.dirname('./')), "test.rrd")
def update_rrd_simple(self, rrdfile, count): timestamp = int(time.time()) try: open(rrdfile) except IOError: logger.error(": Creating %s.." % (rrdfile)) rrdtool.create(rrdfile, '-b', str(timestamp), '-s300', 'DS:ds0:GAUGE:600:0:1000000', 'RRA:AVERAGE:0.5:1:800', 'RRA:AVERAGE:0.5:6:800', 'RRA:AVERAGE:0.5:24:800', 'RRA:AVERAGE:0.5:288:800', 'RRA:MAX:0.5:1:800', 'RRA:MAX:0.5:6:800', 'RRA:MAX:0.5:24:800', 'RRA:MAX:0.5:288:800') else: logger.info("Updating %s with value (Count=%s).." \ % (rrdfile, str(count))) try: rrdtool.update(rrdfile, str(timestamp) + ":" + \ str(count)) except Exception, e: logger.error("Error updating %s: %s" % (rrdfile, e))
def write(self, rrd_db_path, data): """ type rrd_db_path: string param rrd_db_path: path to rrd database directory type data: dictionary param data: dictionary {metric name:value} with data to write """ rrd_db_path = str(rrd_db_path) if not os.path.isfile(rrd_db_path): self._create_db(rrd_db_path) data_to_write = 'N' for s in self.source: data_type = {'COUNTER':int, 'GAUGE':float}[s.split(':')[2]] try: data_to_write += ':%s' % (data_type)(data[s.split(':')[1]]) except Exception: data_to_write += ':None' try: logger.debug('%s, %s, %s' %(time.time(), rrd_db_path, data_to_write)) rrdtool.update(rrd_db_path, "--daemon", "unix:/var/run/rrdcached.sock", data_to_write) except rrdtool.error, e: logger.error('RRDTool update error:%s, %s' %(e, rrd_db_path))
def update_database(): if not os.path.isfile("{}/data.rrd".format(basepath)): rrdtool.create( "{}/data.rrd".format(basepath), "--step", "60", "DS:air_temperature:GAUGE:240:U:U", "RRA:AVERAGE:0.5:1:20160", "RRA:MAX:0.5:1:20160", "RRA:MIN:0.5:1:20160", "DS:dewpoint:GAUGE:240:U:U", "RRA:AVERAGE:0.5:1:20160", "RRA:MAX:0.5:1:20160", "RRA:MIN:0.5:1:20160", "DS:humidity:GAUGE:240:U:U", "RRA:AVERAGE:0.5:1:20160", "RRA:MAX:0.5:1:20160", "RRA:MIN:0.5:1:20160", "DS:wind_speed:GAUGE:240:U:U", "RRA:AVERAGE:0.5:1:20160", "RRA:MAX:0.5:1:20160", "RRA:MIN:0.5:1:20160", "DS:wind_direction:GAUGE:240:U:U", "RRA:AVERAGE:0.5:1:20160", "RRA:MAX:0.5:1:20160", "RRA:MIN:0.5:1:20160") global data try: rrdtool.update( "{}/data.rrd".format(basepath), "N:{:4f}:{:4f}:{:4f}:{:4f}:{:4f}".format(data["air_temperature"], data["dewpoint"], data["humidity"], data["wind_speed"], data["wind_direction"])) print "database updated." except Exception as e: print "exception {}".format(e) return
def log_data(): while 1: data = (yield) try: rrdtool.update(rrd_file, "N:%f:%f:%f:%f" % (data["readings"]["temp"], data["parameters"]["temp"], data["readings"]["humidity"], data["parameters"]["humidity"])) except (KeyError, rrdtool.error) as details: print(details)
def collect_data(datapoints): for i in range(1, datapoints + 1): # record time to get the runtime of the loop and to keep the timing correct loop_start_time = time.time() # get CPU temperature file_cpu_temp = open(path_cpu_temp, 'r') cpu_temp = float(file_cpu_temp.readline().rstrip()) / 1000 file_cpu_temp.close() # get GPU temperature proc_get_gpu_temp = subprocess.Popen( [path_gpu_temp, path_gpu_temp_arg], stdout=subprocess.PIPE) proc_get_gpu_temp_result = proc_get_gpu_temp.stdout.read() print proc_get_gpu_temp_result gpu_temp = regex_gpu_temp.search(proc_get_gpu_temp_result).group(1) # get CPU frequency file_cpu_freq = open(path_cpu_freq, 'r') cpu_freq = int(file_cpu_freq.readline().rstrip()) file_cpu_freq.close() print 'gpu_temp: %s\ncpu_temp: %s\ncpu_freq: %s' % (gpu_temp, cpu_temp, cpu_freq) rrdtool.update(path_rrd, 'N:%s:%s:%s' % (gpu_temp, cpu_temp, cpu_freq)) time.sleep(rrd_res - (time.time() - loop_start_time))
def actualizarBase(nombrebase, comunidad, nombrehost): OIDparaRAM = "1.3.6.1.4.1.2021.4.6.0" #Consultar carga de un procesador carga_CPU = 0 print(OIDparaRAM) while 1: carga_CPU = int( consultaSNMP( comunidad, nombrehost, #COMUNIDAD , hostname OIDparaRAM) ) #Nodo de la MIB a consultar, el ID del procesador cambiará valor = "N:" + str(carga_CPU) print(valor) baseRRD = nombrebase + ".rrd" baseXML = nombrebase + ".xml" rrdtool.update(baseRRD, valor) rrdtool.dump(baseRRD, baseXML) time.sleep(1) if ret: print(rrdtool.error()) time.sleep(300)
def updater_rrd(rrdfile_name, value): """ updater_rrd: update your existing rrd with the given values Args: rrdfile_name (string): it is the name of your file/rrd value (array as a list): the values for the rrd Returns: update_status_msg: string that says if the update was successful or failed """ if (isinstance(value, list)): # iterate over the list counter = 0 update_status_msg = "" for entry in value: try: rrdtool.update(rrdfile_name, entry) update_status_msg = f"success: {rrdfile_name} was updated successfully" counter = counter + 1 except Exception as e: update_status_msg = f"error: rrd update error: {sys.exc_info()[1]}, \n{e}" else: try: rrdtool.update(rrdfile_name, value) update_status_msg = f"success: {rrdfile_name}: was updated successfully" except Exception as e: update_status_msg = f"error: rrd update error: {sys.exc_info()[1]} \n{e}" return update_status_msg
def write_rrd(cust, *argv): rrdfile = rrdpath+cust + "_flow/billing.rrd" try: print 'rrdtool update %s:%s:%s' % (rrdfile, argv[0], argv[1]) rrdtool.update(rrdfile, 'N:%s:%s' % (argv[0], argv[1])) except: print 'update failed of ' + rrdfile
def save(self): # write values to round robin DB rrdtool.update('jrWetter.rrd', 'N:%s:%s' % (self.actTemp, self.actPress)) # print cmd # rrdtool.update(cmd) # save only if diff greater than delta deltaTemp = 0.5 if (self.actTemp < self.minTemp): self.minTemp = self.actTemp if (self.actTemp > self.maxTemp): self.maxTemp = self.actTemp if (self.actTemp < self.lastTemp - deltaTemp) or (self.actTemp > self.lastTemp + deltaTemp): # eliminate jitter (diff > 10 degrees) if abs(self.actTemp - self.lastTemp) < 10: self.myLogger.info('TempChange: ' + str(self.actTemp) + ' Druck: ' + str(self.actPress)) # send Mail to Robert myMail = JrMail() myMail.sendTempMail(self.actTemp, self.minTemp, self.maxTemp) else: self.myLogger.debug( 'Temperatur jitter: actTemp: ' + str(self.actTemp) + ' lastTemp: ' + str(self.lastTemp)) self.lastTemp = self.actTemp deltaPress = 1 if (self.actPress < self.minPress): self.minPress = self.actPress if (self.actPress > self.maxPress): self.maxPress = self.actPress if (self.actPress < self.lastPress - deltaPress) or (self.actPress > self.lastPress + deltaPress): self.myLogger.info('Temp: ' + str(self.actTemp) + ' DruckChange: ' + str(self.actPress)) self.lastPress = self.actPress myMail = JrMail() myMail.sendPressMail(self.actPress, self.minPress, self.maxPress)
def update(self, value): try: rrdtool.update(self.filename, str(int(time.time())) + ":" + str(value)) except: print("Chyba pridavania do rrd") return False
def generarGraficas(interfaz, comunidad, host): while 1: total_input_traffic = int( parseResultAfterEquals( snmpGet(comunidad, host, OID_INPUT_TRAFFIC + str(interfaz)))) total_output_traffic = int( parseResultAfterEquals( snmpGet(comunidad, host, OID_OUTPUT_TRAFFIC + str(interfaz)))) # total_input_pkts = int(parseResultAfterEquals( # snmpGet(comunidad, host, # OID_INPUT_PKTS + str(interfaz)))) # total_output_pkts = int(parseResultAfterEquals( # snmpGet(comunidad, host, # OID_OUTPUT_PKTS + str(interfaz)))) valor = "N:" + str(total_input_traffic) + ':' + str( total_output_traffic) print valor rrdtool.update('practica1.rrd', valor) rrdtool.dump('practica1.rrd', 'practica1.xml') time.sleep(1) if ret: print rrdtool.error() time.sleep(300)
def update(file_path, data, sock_path): LOG.debug('%s, %s, %s' % (time.time(), file_path, data)) try: rrdtool.update(file_path, '--daemon', 'unix:%s' % sock_path, data) except rrdtool.error: LOG.error('%s rrdtool update error: %s' % (file_path, helper.exc_info()))
def main(): logging.info('recsensors started') # open serial line ser = serial.Serial(port, 9600) if not ser.isOpen(): logging.error("Unable to open serial port %s" % port) sys.exit(1) while(1==1): # read line from WDE1 line = ser.readline() line = line.strip() data = line.split(';') if (len(data) == 25 and data[0] == '$1' and data[24] == '0'): for i, val in enumerate(data): data[i] = ('U' if val == '' else val.replace(',', '.')) Taussen = data[3] Tkeller = data[4] RHaussen = data[11] RHkeller = data[12] if ( Taussen == 'U' ) or (RHaussen == 'U'): logging.warn("Aussen: no valid sensor reading") else: AHaussen = str(feuchte.AF(float(RHaussen),float(Taussen))) DPaussen = str(feuchte.TD(float(RHaussen),float(Taussen))) logging.debug("Aussen: T %s°C, RH %s%%, AF %s g/m^3, TP %s°C" % ( Taussen,RHaussen,AHaussen,DPaussen )) rrdtool.update(DATADIR+'/aussen.rrd','N:'+Taussen+':'+RHaussen+':'+AHaussen+':'+DPaussen) if ( Tkeller == 'U' ) or (RHkeller == 'U'): logging.warn("Keller: no valid sensor reading") else: AHkeller = str(feuchte.AF(float(RHkeller),float(Tkeller))) DPkeller = str(feuchte.TD(float(RHkeller),float(Tkeller))) logging.debug("Keller: T %s°C, RH %s%%, AF %s g/m^3, TP %s°C" % ( Tkeller,RHkeller,AHkeller,DPkeller )) rrdtool.update(DATADIR+'/keller.rrd','N:'+Tkeller+':'+RHkeller+':'+AHkeller+':'+DPkeller)
def updateRRD(agent, oids): while 1: x = MongoAdmin.getHost(agent) if (x == None): break value = [] row = "N:" for mib in oids: if (mib.interface): resp = snmpGet(x["community"], x["hostaddr"], (mib.oid + "." + x["if"])) else: resp = snmpGet(x["community"], x["hostaddr"], mib.oid) if (resp == "No Such Object currently exists at this OID"): resp = "0" value.append(resp) row = row + ":".join(value) #print(row) rrdtool.update("rrd/" + x["rrdName"] + ".rrd", row) rrdtool.dump("rrd/" + x["rrdName"] + ".rrd", "xml/" + x["rrdName"] + ".xml") time.sleep(1)
def process_query(fbit_dir, fbit_query_name): dir_date = fbit_dir[-14:] unix_ts = dt.datetime.strptime(dir_date, "%Y%m%d%H%M%S").strftime("%s") query_cmd = "{} -C {} -R {} -A -N10 -q -o'{}' '{}'".format( config['DEFAULT']['fbitdump_bin'], config['DEFAULT']['fbitdump_config'], fbit_dir, config[fbit_query_name]['query_output'], config[fbit_query_name]['query_filter'] ) sub = asyncio.create_subprocess_exec(*shlex.split(query_cmd), stdout=asyncio.subprocess.PIPE); proc = yield from sub data = yield from proc.stdout.readline() line = data.decode('ascii').rstrip() #print(fbit_query.get('name'), line) if line: pkt, byt, fl = [x.strip() for x in line.split(':')] rrdtool.update("{}.rrd".format(fbit_query_name), "{}:{}:{}:{}".format(unix_ts, fl, pkt, byt)) # opentsdb stuff tsdb_url = "http://localhost:4242/api/put/" tsdb_data_fl= {"metric": "ipfix.ipv6."+fbit_query_name+".flows", "timestamp": unix_ts, "value": fl, "tags":{"exporter":"invea-10g"}} requests.post(tsdb_url, data=json.dumps(tsdb_data_fl)) tsdb_data_pkt= {"metric": "ipfix.ipv6."+fbit_query_name+".packets", "timestamp": unix_ts, "value": pkt, "tags":{"exporter":"invea-10g"}} requests.post(tsdb_url, data=json.dumps(tsdb_data_pkt)) tsdb_data_byt= {"metric": "ipfix.ipv6."+fbit_query_name+".bytes", "timestamp": unix_ts, "value": byt, "tags":{"exporter":"invea-10g"}} requests.post(tsdb_url, data=json.dumps(tsdb_data_byt)) yield from proc.wait()
def processlog(): # Read process memory usage pcpu = {} rss = {} vsz = {} file = os.popen("ps -e -o user,rss,vsz,pcpu") #read header line = file.readline() # reading all lines line = file.readline() while line: cols = line.split() if cols[0] == "121" : cols[0] = "tor" try: rss[cols[0]] += int(cols[1]) vsz[cols[0]] += int(cols[2]) pcpu[cols[0]] += float(cols[3]) except: rss[cols[0]] = int(cols[1]) vsz[cols[0]] = int(cols[2]) pcpu[cols[0]] = float(cols[3]) print "User: "******"N:" + str(rss[user]*1024) + ":" + str(vsz[user]*1024) + ":" + str(pcpu[user]) print user,data rrdtool.update('rrd/user-'+user+'.rrd', data)
def update(self,rrd_name,value): rrd_name = "test.rrd" data = "N:21" argv = list() argv.append(rrd_name) argv.append(data) rrdtool.update(argv)
def beanlog(): if os.path.exists("/proc/user_beancounters") == False: return; # We are not on an openvz system bean_check() # Read user_beancounters data file = os.popen("sudo /root/bin/bean.sh") #file = open('user_beancounters', 'r') line = file.readline() print line, held = {} fail = {} while line: line = file.readline() m = reline.match(line) if m: held[m.group(1)] = m.group(2) fail[m.group(1)] = m.group(3) # else: # print "None: " + line, file.close() # Write the data data = "N" for param in parameters: data = data + ":"+held[param] data = data + ":"+fail[param] rrdtool.update('rrd/beancounters.rrd', data)
def update_graph_database(rrd_dir, type, n_source, n_binary): if not rrd_dir: return rrd_file = os.path.join(rrd_dir, type.lower() + '.rrd') update = [rrd_file, "N:%s:%s" % (n_source, n_binary)] try: rrdtool.update(*update) except rrdtool.error: create = [rrd_file] + """ --step 300 --start 0 DS:ds0:GAUGE:7200:0:1000 DS:ds1:GAUGE:7200:0:1000 RRA:AVERAGE:0.5:1:599 RRA:AVERAGE:0.5:6:700 RRA:AVERAGE:0.5:24:775 RRA:AVERAGE:0.5:288:795 RRA:MAX:0.5:1:600 RRA:MAX:0.5:6:700 RRA:MAX:0.5:24:775 RRA:MAX:0.5:288:795 """.strip().split("\n") try: rrdtool.create(*create) rrdtool.update(*update) except rrdtool.error as e: print( ('warning: queue_report: rrdtool error, skipping %s.rrd: %s' % (type, e))) except NameError: pass
def updateDatabase(self, grid_power, solar_power): '''update rrd database with values''' rrdtool.update( self.rrd_power, "N:" + ":".join(map(str, (grid_power, solar_power))))
def snmpResult2RRD3(host): dOid_to_Res = dict() for i, o in enumerate(host.oids): dOid_to_Res[o] = host.results[i] lOids = dOid_to_Res.keys() # # sort oids to guarantee proper o.alias's order: lOids.sort(commonFuncs.sortOids) inioverride = '%s/%s.override' % (host.hostdir, host.ifacesfile) if os.access(inioverride, os.F_OK): ifnamesOverride = ConfigParser.ConfigParser() ifnamesOverride.read(inioverride) ifnamesPairs = [ (int(index), value) for index, value in ifnamesOverride.items('ifnames') ] else: ifnamesPairs = host.ifnamesDict.items() for index, ifname in ifnamesPairs: lDsValues = [] for o in lOids: # if index in dOid_to_Res[o].keys(): if dOid_to_Res[o].get(index): value = dOid_to_Res[o][index] else: # # Arggh! Some HC tables may not contain # some indexes. value = 'U' lDsValues.append(value) # try to update RRD base: # template = ':'.join(lDsNames) updstr = 'N:%s' % ':'.join([str(v) for v in lDsValues]) rrdbase = '%s/%s.rrd' % ( host.hostdir, commonFuncs.cleanIfName(ifname) ) try: rrdtool.update( rrdbase, updstr ) #except: # print "Error in update (%s, %s)" % (host.hostname, iface) except Exception, why: print "Error in update (%s, %s) - %s" % ( host.hostname, index, why )
def update_host_rrd(host_ip,m_type=None): try: rrd_file = get_rrd_file(host_ip.replace(".","_")) rrd_logger.info(rrd_file) timestamp_now = time.time() rrd_logger.info(timestamp_now) if not (os.path.exists(rrd_file)): rrd_logger.warn("RRD file (%s) does not exists" % (rrd_file)) rrd_logger.warn("Creating new RRD file") if m_type is None: create_rrd(rrd_file,"host") else: create_rrd(rrd_file) else: rrd_logger.info("updating RRD file") if m_type is None: rrd_logger.debug("host_ip is"+ str(host_ip)) host_stats = get_host_resources_usage(host_ip) output=rrdtool.update(rrd_file, "%s:%s:%s:%s:%s:%s:%s:%s:%s" % (timestamp_now, host_stats['cpu'], host_stats['ram'], host_stats['dr'], host_stats['dw'], host_stats['tx'], host_stats['rx'],host_stats['tmp'], host_stats['pwr'])) rrd_logger.debug("update status"+str(output)) else: host_stats = get_host_resources_usage(host_ip,m_type) rrdtool.update(rrd_file, "%s:%s:%s:%s:%s:%s:%s" % (timestamp_now, host_stats['cpu'], host_stats['ram'], host_stats['dr'], host_stats['dw'], host_stats['tx'], host_stats['rx'])) except Exception, e: rrd_logger.debug("Error occured while creating/updating rrd for host: %s" % (host_ip)) rrd_logger.debug(e)
def update(data_dir): path = os.path.join(data_dir, "net.rrd") if not os.path.exists(path): create(data_dir) fp = open("/proc/net/dev") lines = fp.readlines() fp.close() for line in lines: if re.search("eth0", line): eth = line break (interface, values) = eth.split(":") values = values.split() download = values[0] upload = values[8] values = ":".join((download, upload)) # print 'net '+values rrdtool.update(path, "N:" + values)
def rrd2Metrics(sensortype, sensorid, metric1, metric2, rrd_root): DS1 = "metric1" DS2 = "metric2" # ensure rrd dir exist if not os.path.exists(rrd_root): os.makedirs(rrd_root) # One dirs per sensor type, one rrd per sensor # If dir doesn't exist, create it. rrd_path = rrd_root + "/" + sensortype + "/" if not os.path.exists(rrd_path): os.makedirs(rrd_path) rrdfile = rrd_path + sensorid + ".rrd" rrdfile = str(rrdfile) # If rrdfile doesn't exist, create it if not os.path.exists(rrdfile): # Legend depends of sensor type # 52: Temperature and humidity if sensortype == '52': DS1 = "Temperature" DS2 = "Humidity" # Create the rrd rrdtool.create(rrdfile, '--step', '30', '--start', '0', 'DS:%s:GAUGE:120:U:U' % (DS1), 'DS:%s:GAUGE:120:U:U' % (DS2), 'RRA:AVERAGE:0.5:1:1051200', 'RRA:AVERAGE:0.5:10:210240') # Update the rdd with new values rrdtool.update('%s' % (rrdfile), 'N:%s:%s' % (metric1, metric2))
def mySocket(): s.listen(1) c, addr = s.accept() print ("Socket Up and running with a connection from",addr) while True: rcvdData = c.recv(120).decode() if not 'C' in rcvdData: c.close() time.sleep(0.2) break power = rcvdData time.sleep(0.3) os.system('clear') print(power) entry = re.split(':',power) entry = entry[1::2] for i in range(len(entry)): rrdtool.update(f"powerCapture{i}.rrd", f'N:{entry[i]}') print(f'Wrote N:{entry[i]} to powerCapture{i}.rrd') print(entry) entry = list(map(float, entry)) for i in range(len(entry)): amountToBeFilled = int(float((80*entry[i])/3680)) amountToBeNotFilled = 80 - amountToBeFilled print('') print('[', end='') for f in range(amountToBeFilled): print('#', end='') for d in range(amountToBeNotFilled): print(' ', end = '') print(f'] {entry[i]} in W (CH{i})')
def overall_min_max_avg(prefix, start_time, end_time, intervals): max_average = 0.0 min_graph_interval = 60 length = int(end_time) - int(start_time) # (iavg, imin, imax, istart, iend) # first will be overwritten with peak when known results_list = [(0.0, 0.0, 0.0, 0, 0)] rrdtool.create(prefix + "_intervals.rrd", '--step', '1', '--start', str(int(start_time) - 1), 'DS:avg:GAUGE:1:U:U', 'RRA:AVERAGE:0.5:1:%d' % int(length), 'DS:min:GAUGE:1:U:U', 'RRA:AVERAGE:0.5:1:%d' % int(length), 'DS:max:GAUGE:1:U:U', 'RRA:AVERAGE:0.5:1:%d' % int(length)) for id, interval in enumerate(intervals, start=1): # something to customize the x-axis labling graph_interval = interval[1] - interval[0] if (graph_interval > 0 and graph_interval < min_graph_interval): min_graph_interval = graph_interval start = interval[0] + 1 # take care if there was a long delay between when we started # netperf and when we started getting results out of it. if (start < start_time): start = int(start_time + 1) end = interval[1] - 1 # if we have a bogus interval, skip it if (start >= end): continue # we have no interest in the size of the graph (the first two # items in the list) so slice just the part of interest result = rrdtool.graph('/dev/null', '--start', str(start), '--end', str(end), 'DEF:foo=%s_overall.rrd:mbps:AVERAGE' % prefix, 'VDEF:avg=foo,AVERAGE', 'VDEF:min=foo,MINIMUM', 'VDEF:max=foo,MAXIMUM', 'PRINT:avg:"%6.2lf"', 'PRINT:min:"%6.2lf"', 'PRINT:max:"%6.2lf"')[2] # print "from %d to %d iavg, imin, imax are %s" % (start,end,result) iavg = float(result[0].strip('"')) imin = float(result[1].strip('"')) imax = float(result[2].strip('"')) results_list.append((iavg, imin, imax, start, end)) for time in xrange(start, end + 1): rrdtool.update(prefix + "_intervals.rrd", '%d:%f:%f:%f' % (time, iavg, imin, imax)) if iavg > max_average: peak_interval_id = id peak_interval_start = start peak_interval_end = end max_average = iavg max_minimum = imin max_maximum = imax results_list[0] = (max_average, max_minimum, max_maximum, peak_interval_start, peak_interval_end) return peak_interval_id, min_graph_interval, results_list
def update_rrd(self, initial=False): fpstat = open("/proc/%d/stat" % self.pid) pstat = fpstat.readline().split() fpstat.close() ftstat = open("/proc/stat") tstat = ftstat.readline().split() ftstat.close() fio = open("/proc/%d/io" % self.pid) io = dict((n,int(v)) for n,v in (a.split(': ') for a in fio.readlines() ) ) fio.close() utime_now = int(pstat[13]) stime_now = int(pstat[14]) time_total_now = sum([int(a) for a in tstat[1:]]) # we are interested in a "per core" value, hence multiply by number of CPUs time_total_now /= multiprocessing.cpu_count() if not initial and (time_total_now - self.time_total_last) > 0: user_util = (utime_now - self.utime_last) / (time_total_now - self.time_total_last); sys_util = (stime_now - self.stime_last) / (time_total_now - self.time_total_last); rss = int(pstat[23]) * PAGE_SIZE data = ((user_util + sys_util) * 100, rss, int(io['rchar']), int(io['wchar'])) print("New data", data) rrdtool.update("proc_%s.rrd" % self.graph_name, "N:%f:%d:%d:%d" % data) self.utime_last = utime_now self.stime_last = stime_now self.time_total_last = time_total_now
def update(database): rrd_filename = hass.config.path(rrd_dir, database[CONF_NAME]) + ".rrd" # RRD data source names for store. ds_names = [] # RRD data source values for store. Corresponding with `ds_names` variable. ds_values = [] # Prepare parameters with all sensor values for `rrdtool` command for data_source in database[CONF_DS]: sensor_id = data_source[CONF_SENSOR] ds_name = data_source[CONF_NAME] # Get data value sensor_state = hass.states.get(sensor_id) try: if sensor_state is None: _LOGGER.debug( "[%s] Skipping sensor %s, because value is unknown.", rrd_filename, sensor_id, ) raise Exception("Sensor has no value or not exists.") sensor_value = sensor_state.state # Convert value to integer, when type is COUNTER or DERIVE. if data_source[CONF_CF] in ["COUNTER", "DERIVE"]: sensor_value = round(float(sensor_value)) except Exception: _LOGGER.info( "[%s] sensor %s value will be stored as NaN.", rrd_filename, sensor_id, ) sensor_value = "NaN" # Add pairs of name, value. Will be used as parameters for data save to rrd file. ds_names.append(ds_name) ds_values.append(str(sensor_value)) # Save to RRD file try: template = ":".join(ds_names) timestamp = int(time.time()) values_string = ":".join(ds_values) rrdtool.update(rrd_filename, f"-t{template}", f"{timestamp}:{values_string}") _LOGGER.debug( "%s data added. ds=%s, values=%s:%s", rrd_filename, template, timestamp, values_string, ) except rrdtool.OperationalError as exc: _LOGGER.error(exc) # Schedule next update schedule_next_update(database)
def main(): # Check command args parser = argparse.ArgumentParser(description='Program to read the water meter using a reflective light sensor.') parser.add_argument('-c', '--create', action='store_true', default=False, help='Create rrd database if necessary') args = parser.parse_args() if args.create: create_rrd() # Open serial line ser = serial.Serial(port, 9600) if not ser.isOpen(): print "Unable to open serial port %s" % port sys.exit(1) trigger_state = 0 counter = last_rrd_count() print "restoring counter to %f" % counter while(1==1): # Read line from arduino and convert to trigger value line = ser.readline() line = line.strip() old_state = trigger_state if line == '1': trigger_state = 1 elif line == '0': trigger_state = 0 if old_state == 1 and trigger_state == 0: # trigger active -> update count rrd counter += trigger_step update = "N:%.3f:%.3f" % (counter, trigger_step) #print update rrdtool.update(count_rrd, update)
def updateChannelRRD(dataDir, name, values, lastTS={}): filename = os.path.join(dataDir, '%s.rrd' % name) if os.path.isfile(filename): updates = [] for ts, v in values: ts = int(ts + 0.5) # Throw out updates that are less than a second apart. if ts != lastTS.get(name, 0): lastTS[name] = ts updates.append('%s:%s' % (ts, v)) print updates rrdtool.update(filename, *updates) else: rra = [] for cf in 'AVERAGE', 'MIN', 'MAX': rra.extend([ "RRA:%s:0.99:1:172800" % cf, "RRA:%s:0.99:60:2880" % cf, "RRA:%s:0.5:420:2880" % cf, "RRA:%s:0.5:1860:2880" % cf, "RRA:%s:0.5:21900:2880" % cf, ]) rrdtool.create(filename, "DS:value:GAUGE:120:U:U", "-s 1", *rra)
def accept_temperature(): " Temperature receiving backend " ip_address = request.environ.get("REMOTE_ADDR") host_name = request.environ.get("REMOTE_HOST") if host_name is None: host_name = socket.gethostbyaddr(ip_address)[0] rrdname = "./rrd/%s.rrd" % host_name if not os.path.exists(rrdname): rrdtool.create(rrdname, '--start', 'now', '--step', '600', 'DS:ds0:GAUGE:1200:-273:5000', 'DS:ds1:GAUGE:1200:-273:5000', 'RRA:AVERAGE:0.5:1:1200', 'RRA:AVERAGE:0.5:6:1200', 'RRA:AVERAGE:0.5:24:1200', 'RRA:MIN:0.5:1:1200', 'RRA:MIN:0.5:6:1200', 'RRA:MIN:0.5:24:1200', 'RRA:MAX:0.5:1:1200', 'RRA:MAX:0.5:6:1200', 'RRA:MAX:0.5:24:1200') hdd_temps = request.json['hdd'] max_hdd = float('-inf') max_cpu = float('-inf') if hdd_temps: max_hdd = max([float(x) for x in hdd_temps]) cpu_temps = request.json['cpu'] if cpu_temps: max_cpu = max([float(x) for x in cpu_temps]) rrdtool.update(rrdname, 'N:%s:%s' % (max_cpu, max_hdd)) return dict()
def update_graph_database(rrd_dir, type, n_source, n_binary): if not rrd_dir: return rrd_file = os.path.join(rrd_dir, type.lower() + '.rrd') update = [rrd_file, "N:%s:%s" % (n_source, n_binary)] try: rrdtool.update(*update) except rrdtool.error: create = [rrd_file] + """ --step 300 --start 0 DS:ds0:GAUGE:7200:0:1000 DS:ds1:GAUGE:7200:0:1000 RRA:AVERAGE:0.5:1:599 RRA:AVERAGE:0.5:6:700 RRA:AVERAGE:0.5:24:775 RRA:AVERAGE:0.5:288:795 RRA:MAX:0.5:1:600 RRA:MAX:0.5:6:700 RRA:MAX:0.5:24:775 RRA:MAX:0.5:288:795 """.strip().split("\n") try: rrdtool.create(*create) rrdtool.update(*update) except rrdtool.error as e: print(('warning: queue_report: rrdtool error, skipping %s.rrd: %s' % (type, e))) except NameError: pass
def touch_rrd(item): """ create a rrd database for each ip, if not exists """ for _type,_target in item['result'].items(): ip = item['ip'] if not os.path.isdir('./%s'%ip): os.mkdir('./%s'%ip) for result in item['result'][_type]: # example: ['166.111.8.28','30','ms'] rrd_filename = './%s/%s_%s.rrd'%(ip,_type,format_url(result[0])) if not os.path.exists(rrd_filename): rrdtool.create(rrd_filename, '--step','900', '--start','-8640000', "DS:result:GAUGE:2000:U:U", "RRA:AVERAGE:0.5:1:600", "RRA:AVERAGE:0.5:24:775", "RRA:MAX:0.5:1:600", "RRA:MAX:0.5:24:775", ) #then update it rrdtool.update(rrd_filename,"%d:%s"%(item['time'],result[1]))
def update_rrd_simple(self, rrdfile, count): timestamp = int(time.time()) try: open(rrdfile) except IOError: print __name__, ": Creating %s.." % (rrdfile) rrdtool.create(rrdfile, '-b', str(timestamp-1), '-s300', 'DS:ds0:GAUGE:600:0:1000000', 'RRA:HWPREDICT:1440:0.1:0.0035:288', 'RRA:AVERAGE:0.5:1:800', 'RRA:AVERAGE:0.5:6:800', 'RRA:AVERAGE:0.5:24:800', 'RRA:AVERAGE:0.5:288:800', 'RRA:MAX:0.5:1:800', 'RRA:MAX:0.5:6:800', 'RRA:MAX:0.5:24:800', 'RRA:MAX:0.5:288:800') else: print __name__, ": Updating %s with value (Count=%s).." \ % (rrdfile, count) try: rrdtool.update(rrdfile, str(timestamp) + ":" + \ str(count)) except Exception, e: print "Error updating %s: %s" % (rrdfile, e)
def rrd2Metrics (sensortype, sensorid, metric1, metric2, rrd_root): DS1 = "metric1" DS2 = "metric2" # ensure rrd dir exist if not os.path.exists (rrd_root): os.makedirs(rrd_root) # One dirs per sensor type, one rrd per sensor # If dir doesn't exist, create it. rrd_path = rrd_root + "/"+ sensortype +"/" if not os.path.exists(rrd_path): os.makedirs(rrd_path) rrdfile = rrd_path + sensorid + ".rrd" rrdfile = str(rrdfile) # If rrdfile doesn't exist, create it if not os.path.exists(rrdfile): # Legend depends of sensor type # 52: Temperature and humidity if sensortype == '52' : DS1 = "Temperature" DS2 = "Humidity" # Create the rrd rrdtool.create(rrdfile, '--step', '30', '--start', '0', 'DS:%s:GAUGE:120:U:U' % (DS1), 'DS:%s:GAUGE:120:U:U' % (DS2), 'RRA:AVERAGE:0.5:1:1051200', 'RRA:AVERAGE:0.5:10:210240') # Update the rdd with new values rrdtool.update('%s' % (rrdfile), 'N:%s:%s' % (metric1, metric2) )
def updateDatabase(self, updates): updateString = rrdConstants.NOW for key, value in updates.items(): updates[key] = str( value) if value != None else rrdConstants.UNKNOWN updateString += (':' + updates[rrdConstants.DS_MEMORY]) updateString += (':' + updates[rrdConstants.DS_DISK]) updateString += (':' + updates[rrdConstants.DS_CPU]) rrdtool.update(self.fileName, updateString) end = rrdtool.last(self.fileName) begin, end = str(end - rrdConstants.TIME_FRAME), str(end) #lastMem = float(rrdGraphs.makeMemoryGraph(self.path, begin, end)) #lastDisk = float(rrdGraphs.makeDiskGraph(self.path, begin, end)) lastCpu = float(rrdGraphs.makeCPUGraph(self.path, begin, end)) #lastMem = lastMem if not math.isnan(lastMem) else 0 #lastDisk = lastDisk if not math.isnan(lastDisk) else 0 lastCpu = lastCpu if not math.isnan(lastCpu) else 0 return self.pickNotificationLevel({ #rrdConstants.DS_MEMORY : lastMem, #rrdConstants.DS_DISK : lastDisk, rrdConstants.DS_CPU: lastCpu })
def rrdupdate(owdata): if config.rrdenable: stime = int(time.mktime(time.localtime())) path = config.rrdpath step = 300 hb = 3600 xff = 0.5 HOUR = 3600 YEAR = 31536000 steps1 = 1 rows1 = YEAR // step for sensor in owdata: (value, timestamp) = owdata[sensor] if value == config.owfail: continue rrdfile = '%s/%s.rrd' % (path, sensor.upper()) if not os.path.isfile(rrdfile): try: rrdtool.create(rrdfile, '--step', '%d' % step, 'DS:data:GAUGE:%d:U:U' % hb, 'RRA:AVERAGE:%d:%d:%d' % (xff, steps1, rows1)) except rrdtool.error, e: logger.warning(e) logger.debug("RRD %s created" % sensor) info = rrdtool.info(rrdfile) if ((stime - info['last_update']) > step): try: rrdtool.update(rrdfile,'%s:%s' % (timestamp, value)) except rrdtool.error, e: logger.warning(e) logger.debug("RRD %s updated" % sensor)
def updateRrrd(powerconsumption): try: import rrdtool if not os.path.exists("fritz.rrd"): rrdtool.create("fritz.rrd", "--step", "300", "DS:power:GAUGE:900:0:150", 'RRA:AVERAGE:0.5:12:3600', 'RRA:AVERAGE:0.5:288:3600', 'RRA:AVERAGE:0.5:1:3600') rrdtool.update("fritz.rrd", "N:%f" % powerconsumption) except: raise
def updateDatabase(self, updates): updateString = appConstants.RRD_NOW updateString += ':' + str(updates[appConstants.DS_INBW]) updateString += ':' + str(updates[appConstants.DS_OUTBW]) rrdtool.update(self.fileName, updateString)
def main(): """main part""" temphumi = DHT22_AM2302(19) # BCM 19 = PIN 35 temp_cpu = CPU() measurements = { DS_TEMP1: Measurements(3), DS_TEMPCPU: Measurements(3), DS_TEMP2: Measurements(3), DS_HUMI: Measurements(3), } rrd_template = DS_TEMP1 + ":" + DS_TEMPCPU + ":" + DS_TEMP2 + ":" + DS_HUMI while True: _temp, _humi = temphumi.read() measurements[DS_TEMP1].append(_temp) measurements[DS_HUMI].append(_humi) measurements[DS_TEMPCPU].append(temp_cpu.read()) measurements[DS_TEMP2].append(0) # empty, for later useage rrd_data = ( "N:{:.2f}".format(measurements[DS_TEMP1].last()) + ":{:.2f}".format(measurements[DS_TEMPCPU].last()) + ":{:.2f}".format(measurements[DS_TEMP2].last()) + ":{:.2f}".format(measurements[DS_HUMI].last()) ) print strftime("%H:%M:%S", localtime()), rrd_data rrdtool.update(RRDFILE, "--template", rrd_template, rrd_data) sleep(35)
def plugin(srv, item): srv.logging.debug("*** MODULE=%s: service=%s, target=%s", __file__, item.service, item.target) # If the incoming payload has been transformed, use that, # else the original payload text = item.message try: # addrs is a list[] associated with a particular target. # it can contain an arbitrary amount of entries that are just # passed along to rrdtool # mofified by otfdr @ github to accept abitray arguments with # the payload and to not always add the 'N' in front # 2017-06-05 - fix/enhancement for https://github.com/jpmens/mqttwarn/issues/248 if re.match("^\d+$", text): rrdtool.update(item.addrs, "N:" + text) else: rrdtool.update(item.addrs + text.split()) except Exception as e: srv.logging.warning("Cannot call rrdtool") return False return True
def start (self): while True: payload = self.udp.receive() Log(f"RRD Data received: {payload}") (source, values) = payload.split(',') self.data[source] = values # data['particulates_2'] = "2_pm25:2_pm10:N:11.1:5.5" data_complete = True rrd_template = "" rrd_data = "N:" for p in self.Particulates: if not self.data[p]: data_complete = False else: try: rrd_template += self.data[p].split(":N:")[0] + ":" rrd_data += self.data[p].split(":N:")[1] + ":" except IndexError: Log("Wrong data format: {0[0]} {0[1]}".format(sys.exc_info())) Log(f"data[p]: {data[p]}") data_complete = False if data_complete: rrd_template = rrd_template.rstrip(":") rrd_data = rrd_data.rstrip(":") try: # Log(f"Updating rrd: {rrd_template}, {rrd_data}") import rrdtool rrdtool.update(RRDFILE, "--template", rrd_template, rrd_data) except rrdtool.OperationalError: Log("Cannot update rrd database: {0[0]} {0[1]}".format(sys.exc_info()))
def update(self): """Update the historical data.""" f = open(DATA_SOURCE, "r") # Everything is in KBytes regexp = re.compile("MemTotal:\s+(\d+)" \ ".+\sMemFree:\s+(\d+)" \ ".+\sBuffers:\s+(\d+)" \ ".+\sCached:\s+(\d+)" \ ".+\sSwapTotal:\s+(\d+)" \ ".+\sSwapFree:\s+(\d+)") lines = [line.strip() for line in f] text = " ".join(lines) f.close() match = regexp.search(text) if not match: raise StatsError("cannot parse " + DATA_SOURCE) memtotal, memfree, buffers, cached, swaptotal, swapfree = match.groups() memtotal, memfree, buffers, cached = int(memtotal), int(memfree), int(buffers), int(cached) swaptotal, swapfree = int(swaptotal), int(swapfree) memused = memtotal - (memfree + buffers + cached) swapused = swaptotal - swapfree self.memory = memtotal self.swap = swaptotal rrdtool.update(self.database, "--template", "memused:buffers:cached:swapused", "N:%d:%d:%d:%d" % (memused * 1024, buffers * 1024, cached * 1024, swapused * 1024))
def rrd1Metric (sensortype, sensorid, metric1, rrd_root): DS1 = "metric1" # ensure rrd dir exist if not os.path.exists (rrd_root): os.makedirs(rrd_root) # One dirs per sensor type, one rrd per sensor # If dir doesn't exist, create it. rrd_path = rrd_root + "/"+ sensortype +"/" if not os.path.exists(rrd_path): os.makedirs(rrd_path) rrdfile = rrd_path + sensorid + ".rrd" rrdfile = str(rrdfile) # If rrdfile doesn't exist, create it if not os.path.exists(rrdfile): # Legend depends of sensor type # 5A: Energy if sensortype == '5A' : DS1 = "Watt" # Create the rrd rrdtool.create(rrdfile, '--step', '30', '--start', '0', 'DS:%s:GAUGE:120:U:U' % (DS1), 'RRA:AVERAGE:0.5:1:1051200', 'RRA:AVERAGE:0.5:10:210240') if sensortype == '71' : DS1 = "energy" rrdtool.create(rrdfile, '--step', '60', '--start', '0', 'DS:%s:COUNTER:1200:0:U' % (DS1), 'RRA:AVERAGE:0.5:1:525600', 'RRA:AVERAGE:0.5:30:175200') # Update the rdd with new values rrdtool.update('%s' % (rrdfile), 'N:%s' % (metric1) )
def updateDb(sid): sid = str(sid) cnx = mysql.connector.connect(user='******', password='******', host='localhost', database='iisc') cursor = cnx.cursor() cursor.execute("select down,up from speedtest where sid='%s'" % sid) for i in cursor: up = str(i[1]) down = str(i[0]) rrdtool.update("speedtest/database/"+sid+".rrd", "--template", "up:dw", "N:%s:%s" % (up,down)) cursor.close() cnx.close() rrdtool.graph("speedtest/images/"+sid+"_1d.png", "--slope-mode", "--start", "-86400", "--end", "now", "--width", "900", "--height", "200", "-a", "PNG", "--title", "Internet Speed (24 Hours)", "--vertical-label", "Mbit", "--watermark", "`date`", "--lower-limit", "0", "--x-grid", "MINUTE:10:HOUR:1:MINUTE:120:0:%R", "--alt-y-grid", "--rigid", "DEF:up=speedtest/database/"+sid+".rrd:up:MAX", "DEF:dw=speedtest/database/"+sid+".rrd:dw:MAX", "LINE1:dw#FF6600:Download", "GPRINT:dw:LAST:Cur\: %5.2lf", "GPRINT:dw:AVERAGE:Avg\: %5.2lf", "GPRINT:dw:MAX:Max\: %5.2lf", "GPRINT:dw:MIN:Min\: %5.2lf", "LINE2:up#003366:Upload", "GPRINT:up:LAST:Cur\: %5.2lf", "GPRINT:up:AVERAGE:Avg\: %5.2lf", "GPRINT:up:MAX:Max\: %5.2lf", "GPRINT:up:MIN:Min\: %5.2lf")