def _create_mongodb_actions_file(destination_file, first_ts): """Create a rrd file to store mongodb action statistics. Mimics the file settings used by collectd for it's rrd file creation. """ rrdtool.create( destination_file, '--start', '{}-10'.format(first_ts), '--step', '5', 'DS:insert:GAUGE:10:{min}:{max}'.format(min=0, max=281474976710000), 'DS:query:GAUGE:10:{min}:{max}'.format(min=0, max=281474976710000), 'DS:update:GAUGE:10:{min}:{max}'.format(min=0, max=281474976710000), 'DS:delete:GAUGE:10:{min}:{max}'.format(min=0, max=281474976710000), 'RRA:AVERAGE:0.1:1:1200', 'RRA:MIN:0.1:1:1200', 'RRA:MAX:0.1:1:1200', 'RRA:AVERAGE:0.1:14:1234', 'RRA:MIN:0.1:14:1234', 'RRA:MAX:0.1:14:1234', 'RRA:AVERAGE:0.1:100:1209', 'RRA:MIN:0.1:100:1209', 'RRA:MAX:0.1:100:1209', 'RRA:AVERAGE:0.1:446:1201', 'RRA:MIN:0.1:446:1201', 'RRA:MAX:0.1:446:1201', 'RRA:AVERAGE:0.1:5270:1200', 'RRA:MIN:0.1:5270:1200', 'RRA:MAX:0.1:5270:1200', )
def update(countrycity): country, city = countrycity.split('_',1) DB = RRD_PATH + 'weather_' + country + '_' + city + '.rrd' if not os.path.isfile(DB): rrdtool.create(DB, '-s', '300', 'DS:humidity:GAUGE:600:0:100', 'DS:temperature:GAUGE:600:U:U', 'RRA:AVERAGE:0.5:1:576', 'RRA:AVERAGE:0.5:6:672', 'RRA:AVERAGE:0.5:24:732', 'RRA:AVERAGE:0.5:144:1460' ) stats = load(country, city); error = rrdtool.update(DB, '-t', 'humidity:temperature', 'N:' + stats['humidity'] + ':' + stats['temperature'] ) if error: log(__file__, 'An error occured : ' + error) else: log(__file__, 'Stats for '+city+' are: '+stats['humidity']+'%, '+stats['temperature']+' C') for period in PERIODS: graph(country, city, 'temperature', period) graph(country, city, 'humidity', period) return
def updateRrrd(powerconsumption): try: import rrdtool if not os.path.exists("fritz.rrd"): rrdtool.create("fritz.rrd", "--step", "300", "DS:power:GAUGE:900:0:150", 'RRA:AVERAGE:0.5:12:3600', 'RRA:AVERAGE:0.5:288:3600', 'RRA:AVERAGE:0.5:1:3600') rrdtool.update("fritz.rrd", "N:%f" % powerconsumption) except: raise
def _create_mongodb_memory_file(destination_file, first_ts): """Create a rrd file to store mongodb memory usage statistics. Mimics the file settings used by collectd for it's rrd file creation. """ rrdtool.create( destination_file, '--start', '{}-10'.format(first_ts), '--step', '5', 'DS:vsize:GAUGE:600:{min}:{max}'.format(min=0, max=281474976710000), 'DS:res:GAUGE:600:{min}:{max}'.format(min=0, max=281474976710000), 'RRA:AVERAGE:0.1:1:1200', 'RRA:MIN:0.1:1:1200', 'RRA:MAX:0.1:1:1200', 'RRA:AVERAGE:0.1:14:1234', 'RRA:MIN:0.1:14:1234', 'RRA:MAX:0.1:14:1234', 'RRA:AVERAGE:0.1:100:1209', 'RRA:MIN:0.1:100:1209', 'RRA:MAX:0.1:100:1209', 'RRA:AVERAGE:0.1:446:1201', 'RRA:MIN:0.1:446:1201', 'RRA:MAX:0.1:446:1201', 'RRA:AVERAGE:0.1:5270:1200', 'RRA:MIN:0.1:5270:1200', 'RRA:MAX:0.1:5270:1200', )
def init_rrd(self, fname, m): """init_rrd Set-up Data Sources (DS) Set-up Round Robin Archives (RRA): - day,week,month and year archives - 2 types : AVERAGE and MAX parameter : start time return : last epoch recorded """ ds_type = 'ABSOLUTE' rows = xpoints / points_per_sample realrows = int(rows * 1.1) # ensure that the full range is covered day_steps = int(3600 * 24 / (rrdstep * rows)) week_steps = day_steps * 7 month_steps = week_steps * 5 year_steps = month_steps * 12 # Set up data sources for our RRD params = [] for v in variables: params += ['DS:%s:%s:%s:0:U' % (v, ds_type, rrdstep * 2)] # Set up RRD to archive data for cf in ['AVERAGE', 'MAX']: for step in [day_steps, week_steps, month_steps, year_steps]: params += ['RRA:%s:0.5:%s:%s' % (cf, step, realrows)] # With those setup, we can now created the RRD rrdtool.create(str(fname), '--start', str(m), '--step', str(rrdstep), *params) return m
def createRRD(): global nameArray tcReturnVal = subprocess.check_output([tcPath, "-s", "qdisc", "show", "dev", defaultDev]) #split in to the qdiscs and then remove the empty place at [0]. splitString = tcReturnVal.split("qdisc") splitString.pop(0) for x in splitString: #split into lines stringLine = x.split("\n") #there are three lines... nameLine = stringLine[0].split(" ") statsLine = stringLine[1].split(" ") rateLine = stringLine[2].split(" ") queueType = nameLine[1] queueName = nameLine[2].replace(':','-') rootOrParent = nameLine[3] nameArray.append({'name':queueName,'type':queueType,'rootOrParent':rootOrParent}) fullFileName = (workingPath + queueName + ".rrd").replace(":","-") rrdtool.create(fullFileName, '--step', '5', 'DS:packetSent:COUNTER:10:U:U', 'DS:bytesSent:COUNTER:10:U:U', 'DS:dropped:COUNTER:10:U:U', 'DS:overlimits:COUNTER:10:U:U', 'DS:requeues:COUNTER:10:U:U', 'RRA:AVERAGE:0.5:1:600', 'RRA:AVERAGE:0.5:6:360')
def update_rrd_simple(self, rrdfile, count): timestamp = int(time.time()) try: open(rrdfile) except IOError: print __name__, ": Creating %s.." % (rrdfile) rrdtool.create(rrdfile, '-b', str(timestamp-1), '-s300', 'DS:ds0:GAUGE:600:0:1000000', 'RRA:HWPREDICT:1440:0.1:0.0035:288', 'RRA:AVERAGE:0.5:1:800', 'RRA:AVERAGE:0.5:6:800', 'RRA:AVERAGE:0.5:24:800', 'RRA:AVERAGE:0.5:288:800', 'RRA:MAX:0.5:1:800', 'RRA:MAX:0.5:6:800', 'RRA:MAX:0.5:24:800', 'RRA:MAX:0.5:288:800') else: print __name__, ": Updating %s with value (Count=%s).." \ % (rrdfile, count) try: rrdtool.update(rrdfile, str(timestamp) + ":" + \ str(count)) except Exception, e: print "Error updating %s: %s" % (rrdfile, e)
def update_rrd(self, rrdfile, compromise, attack): timestamp = int(time.time()) try: open(rrdfile) except IOError: logger.error("Creating %s.." % (rrdfile)) # This needs some checking, we don't need HWPREDICT here and I got some probs # on MacosX (def update_rrd_simple) so I removed aberrant behaviour detection. try: rrdtool.create(rrdfile, '-b', str(timestamp-1), '-s300', 'DS:ds0:GAUGE:600:0:1000000', 'DS:ds1:GAUGE:600:0:1000000', 'RRA:AVERAGE:0.5:1:800', 'RRA:HWPREDICT:1440:0.1:0.0035:288', 'RRA:AVERAGE:0.5:6:800', 'RRA:AVERAGE:0.5:24:800', 'RRA:AVERAGE:0.5:288:800', 'RRA:MAX:0.5:1:800', 'RRA:MAX:0.5:6:800', 'RRA:MAX:0.5:24:800', 'RRA:MAX:0.5:288:800') except Exception, e: logger.warning(": Error creating %s.." % (rrdfile)) return
def update_rrd_simple(self, rrdfile, count): timestamp = int(time.time()) try: open(rrdfile) except IOError: logger.error(": Creating %s.." % (rrdfile)) rrdtool.create(rrdfile, '-b', str(timestamp), '-s300', 'DS:ds0:GAUGE:600:0:1000000', 'RRA:AVERAGE:0.5:1:800', 'RRA:AVERAGE:0.5:6:800', 'RRA:AVERAGE:0.5:24:800', 'RRA:AVERAGE:0.5:288:800', 'RRA:MAX:0.5:1:800', 'RRA:MAX:0.5:6:800', 'RRA:MAX:0.5:24:800', 'RRA:MAX:0.5:288:800') else: logger.info("Updating %s with value (Count=%s).." \ % (rrdfile, str(count))) try: rrdtool.update(rrdfile, str(timestamp) + ":" + \ str(count)) except Exception, e: logger.error("Error updating %s: %s" % (rrdfile, e))
def updateChannelRRD(dataDir, name, values, lastTS={}): filename = os.path.join(dataDir, '%s.rrd' % name) if os.path.isfile(filename): updates = [] for ts, v in values: ts = int(ts + 0.5) # Throw out updates that are less than a second apart. if ts != lastTS.get(name, 0): lastTS[name] = ts updates.append('%s:%s' % (ts, v)) print updates rrdtool.update(filename, *updates) else: rra = [] for cf in 'AVERAGE', 'MIN', 'MAX': rra.extend([ "RRA:%s:0.99:1:172800" % cf, "RRA:%s:0.99:60:2880" % cf, "RRA:%s:0.5:420:2880" % cf, "RRA:%s:0.5:1860:2880" % cf, "RRA:%s:0.5:21900:2880" % cf, ]) rrdtool.create(filename, "DS:value:GAUGE:120:U:U", "-s 1", *rra)
def update_graph_database(rrd_dir, type, n_source, n_binary): if not rrd_dir: return rrd_file = os.path.join(rrd_dir, type.lower() + '.rrd') update = [rrd_file, "N:%s:%s" % (n_source, n_binary)] try: rrdtool.update(*update) except rrdtool.error: create = [rrd_file] + """ --step 300 --start 0 DS:ds0:GAUGE:7200:0:1000 DS:ds1:GAUGE:7200:0:1000 RRA:AVERAGE:0.5:1:599 RRA:AVERAGE:0.5:6:700 RRA:AVERAGE:0.5:24:775 RRA:AVERAGE:0.5:288:795 RRA:MAX:0.5:1:600 RRA:MAX:0.5:6:700 RRA:MAX:0.5:24:775 RRA:MAX:0.5:288:795 """.strip().split("\n") try: rrdtool.create(*create) rrdtool.update(*update) except rrdtool.error as e: print(('warning: queue_report: rrdtool error, skipping %s.rrd: %s' % (type, e))) except NameError: pass
def rrd_update(rrdfile, name, value, slope): # fix annoying unicode issues rrdfile = str(rrdfile) dstype = 'GAUGE' if slope == 'zero': dstype = 'ABSOLUTE' # for now don't care about invariants return elif slope == 'both': dstype = 'GAUGE' elif slope == 'positive': dstype = 'COUNTER' token = 'DS:' + name + ':' + dstype + ':60:U:U' if not os.path.exists(rrdfile): logging.info("Creating %s\n", rrdfile) # 1440 is minutes per day # 300 minutes = 5 hours # 30 hours = 1800 minutes rrdtool.create(rrdfile, '--step=20', token, # 1 point at 20s, 900 of them 300m, 5 hours 'RRA:AVERAGE:0.5:1:900', # 3 points @ 20s = 60s = 1m, 30 hours 'RRA:AVERAGE:0.5:3:1800' ) # no else svalue = str(value) logging.debug("Updating '%s' with value of '%s'", rrdfile, svalue) rrdtool.update(rrdfile, 'N:' + svalue)
def _create_mem_rrd_command(rrd_name, start, period): """Create a new RRD for managing memory utilisation information. The new RRD will have a specific sampling period, plus a 10s roll up for one minute, a minute roll up for one hour, and an hourly roll up for one day. :param rrd_name: The name of the RRD :type rrd_name: str :param start: The start of the new RRD. None means now. :type start: int :param period: The sampling period, in number of seconds :type period: int """ if not start: start = time.time() int_period = int(period) samples_per_hour = 3600 / int_period samples_per_min = 60 / int_period rrdtool.create(str(rrd_name), '--start', str(int(start)), '--step', str(period), 'DS:memtotal:GAUGE:' + str(int_period * 2) + ':0:U', 'DS:memused:GAUGE:' + str(int_period * 2) + ':0:U', 'DS:memfree:GAUGE:' + str(int_period * 2) + ':0:U', 'DS:swaptotal:GAUGE:' + str(int_period * 2) + ':0:U', 'DS:swapused:GAUGE:' + str(int_period * 2) + ':0:U', 'DS:swapfree:GAUGE:' + str(int_period * 2) + ':0:U', 'DS:buffers:GAUGE:' + str(int_period * 2) + ':0:U', 'DS:cached:GAUGE:' + str(int_period * 2) + ':0:U', 'RRA:LAST:0.5:1:' + str(samples_per_hour), # Samples as is, good for 1 hour 'RRA:AVERAGE:0.5:' + str(samples_per_min) + ':' + str(12 * 60), # Minute rollup for 12h 'RRA:AVERAGE:0.5:' + str(samples_per_hour) + ':' + str(24), # Hourly rollup for 24h 'RRA:AVERAGE:0.5:' + str(24 * samples_per_hour) + ':' + str(7)) # Daily rollup for 1w
def create_rrd_files(): start_time = int(time.time()) timeout = str(2*time_interval) rrdtool.create(str(cpufile+'.rrd'), '--step', str(time_interval), '--start', str(start_time), 'DS:cpu_percent:GAUGE:'+timeout+':0:U', 'RRA:AVERAGE:0.5:1:17280', # stores samples collected for 1 day 5*60*24 'RRA:AVERAGE:0.5:360:240', # 30min averages for 5 days 'RRA:AVERAGE:0.5:720:240', # 1 hour averages for 10 days 'RRA:AVERAGE:0.5:1440:240', # 2 hour averages for 20 days 'RRA:AVERAGE:0.5:8640:40', # 12 hour averages for 20 days 'RRA:AVERAGE:0.5:12780:60', # 1 day averages for 60 days 'RRA:AVERAGE:0.5:89460:10') # 1 week for 10 weeks rrdtool.create(str(memfile+'.rrd'), '--step', str(time_interval), '--start', str(start_time), 'DS:mem_percent:GAUGE:'+timeout+':0:U', 'RRA:MAX:0.5:1:17280', # stores samples collected for 1 day 5*60*24 'RRA:MAX:0.5:720:240', # 1 hour averages for 5 days 'RRA:MAX:0.5:12780:10', # 1 day averages for 10 days 'RRA:MIN:0.5:1:17280', # stores samples collected for 1 day 5*60*24 'RRA:MIN:0.5:720:240', # 1 hour averages for 5 days 'RRA:MIN:0.5:12780:10', # 1 day averages for 10 days 'RRA:AVERAGE:0.5:1:17280', # stores samples collected for 1 day 5*60*24 'RRA:AVERAGE:0.5:360:240', # 30min averages for 5 days 'RRA:AVERAGE:0.5:720:240', # 1 hour averages for 10 days 'RRA:AVERAGE:0.5:1440:240', # 2 hour averages for 20 days 'RRA:AVERAGE:0.5:8640:40', # 12 hour averages for 20 days 'RRA:AVERAGE:0.5:12780:60', # 1 day averages for 60 days 'RRA:AVERAGE:0.5:89460:10') # 1 week for 10 weeks
def createRRDFile( self, dataPointName, rrdCreateCommand ): """ Create an RRD file if it does not exist or if the step value has changed. Returns the basename of the rrdFile, suitable for checking thresholds. """ self.log.debug( 'Checking RRD File for %s' % dataPointName ) if not self.daemonName: return directory = zenPath( 'perf', 'Daemons', self.monitor ) if not os.path.exists( directory ): self.log.debug( 'Creating directory: %s' % directory ) os.makedirs( directory ) fileName = '%s_%s.rrd' % ( self.daemonName, dataPointName ) filePath = zenPath( directory, fileName ) step = rrdCreateCommand[rrdCreateCommand.index('--step')+1] if not os.path.exists( filePath ) or \ self._hasStepTimeChanged( filePath, step ): import getpass self.log.debug( 'Creating RRD file %s as user %s with options %s' % ( filePath, getpass.getuser(), rrdCreateCommand ) ) rrdtool.create( filePath, *rrdCreateCommand ) else: self.log.debug( 'RRD file already exists' )
def rrdupdate(owdata): if config.rrdenable: stime = int(time.mktime(time.localtime())) path = config.rrdpath step = 300 hb = 3600 xff = 0.5 HOUR = 3600 YEAR = 31536000 steps1 = 1 rows1 = YEAR // step for sensor in owdata: (value, timestamp) = owdata[sensor] if value == config.owfail: continue rrdfile = '%s/%s.rrd' % (path, sensor.upper()) if not os.path.isfile(rrdfile): try: rrdtool.create(rrdfile, '--step', '%d' % step, 'DS:data:GAUGE:%d:U:U' % hb, 'RRA:AVERAGE:%d:%d:%d' % (xff, steps1, rows1)) except rrdtool.error, e: logger.warning(e) logger.debug("RRD %s created" % sensor) info = rrdtool.info(rrdfile) if ((stime - info['last_update']) > step): try: rrdtool.update(rrdfile,'%s:%s' % (timestamp, value)) except rrdtool.error, e: logger.warning(e) logger.debug("RRD %s updated" % sensor)
def create_rrd_db(self): cfg.log.debug("Creating generic rrd database %s" % self.filename) overdue_secs = self.interval_secs * 2 summary_interval = self.interval_secs * cfg.points_to_summarize intervals_per_day = 86400 / summary_interval intervals_to_keep = cfg.days_to_keep * intervals_per_day # Hold 180 days of data before it rolls out, average/max calculated every 4 hours # samples are expected every hour try: rrdtool.create(str(self.filename), '--step', str(cfg.interval_secs), '--start', '0', 'DS:node_count:GAUGE:%s:U:U' % overdue_secs, 'DS:nodes_active:GAUGE:%s:U:U' % overdue_secs, 'DS:raw_capacity:GAUGE:%s:U:U' % overdue_secs, 'DS:raw_used:GAUGE:%s:U:U' % overdue_secs, 'DS:usable_capacity:GAUGE:%s:U:U' % overdue_secs, 'DS:used_capacity:GAUGE:%s:U:U' % overdue_secs, 'RRA:AVERAGE:0.5:%d:%d' % (cfg.points_to_summarize, intervals_to_keep), 'RRA:MAX:0.5:%d:%d' % (cfg.points_to_summarize, intervals_to_keep)) self.db_usable = True except rrdtool.error: self.db_usable = False
def touch_rrd(item): """ create a rrd database for each ip, if not exists """ for _type,_target in item['result'].items(): ip = item['ip'] if not os.path.isdir('./%s'%ip): os.mkdir('./%s'%ip) for result in item['result'][_type]: # example: ['166.111.8.28','30','ms'] rrd_filename = './%s/%s_%s.rrd'%(ip,_type,format_url(result[0])) if not os.path.exists(rrd_filename): rrdtool.create(rrd_filename, '--step','900', '--start','-8640000', "DS:result:GAUGE:2000:U:U", "RRA:AVERAGE:0.5:1:600", "RRA:AVERAGE:0.5:24:775", "RRA:MAX:0.5:1:600", "RRA:MAX:0.5:24:775", ) #then update it rrdtool.update(rrd_filename,"%d:%s"%(item['time'],result[1]))
def create_rrd(filename, when = None): args = [ filename ] if when is not None: args.append("-b%s" % when) args += [ "-s300", "DS:icmp_flows:ABSOLUTE:600:0:U", "DS:tcp_flows:ABSOLUTE:600:0:U", "DS:udp_flows:ABSOLUTE:600:0:U", "DS:gre_flows:ABSOLUTE:600:0:U", "DS:esp_flows:ABSOLUTE:600:0:U", "DS:other_flows:ABSOLUTE:600:0:U", "DS:icmp_bytes:ABSOLUTE:600:0:U", "DS:tcp_bytes:ABSOLUTE:600:0:U", "DS:udp_bytes:ABSOLUTE:600:0:U", "DS:gre_bytes:ABSOLUTE:600:0:U", "DS:esp_bytes:ABSOLUTE:600:0:U", "DS:other_bytes:ABSOLUTE:600:0:U", "DS:icmp_packets:ABSOLUTE:600:0:U", "DS:tcp_packets:ABSOLUTE:600:0:U", "DS:udp_packets:ABSOLUTE:600:0:U", "DS:gre_packets:ABSOLUTE:600:0:U", "DS:esp_packets:ABSOLUTE:600:0:U", "DS:other_packets:ABSOLUTE:600:0:U", "RRA:AVERAGE:0.75:1:4800", "RRA:AVERAGE:0.5:6:2400", "RRA:AVERAGE:0.5:24:1200", "RRA:AVERAGE:0.5:288:1500", ] print >> sys.stderr, "Creating %s" % filename rrdtool.create(*args)
def update(): DB = RRD_PATH + 'speedtest.rrd' if not os.path.isfile(DB): rrdtool.create(DB, '-s', '300', 'DS:in:GAUGE:600:0:U', 'DS:out:GAUGE:600:0:U', 'DS:ping:GAUGE:600:0:U', 'RRA:AVERAGE:0.5:1:576', 'RRA:AVERAGE:0.5:6:672', 'RRA:AVERAGE:0.5:24:732', 'RRA:AVERAGE:0.5:144:1460' ) stats = load() error = rrdtool.update(DB, '-t', 'in:out:ping', 'N:'+str(stats['in'])+':'+str(stats['out'])+':'+str(stats['ping']) ) if error: log(__file__, 'An error occured : ' + error) else: for period in PERIODS: graph(period) log(__file__, 'BandWith stats : %sMB/s up, %sMB/s down' % (stats['in']/(1024*1024), stats['out']/(1024*1024))) return
def rrdcreate(self): if not self.id: return None cmd = [] path = self.get_rrd_path() cmd.append(str(path)) # options cmd.append('--start') cmd.append(str(self.start.strftime("%s"))) cmd.append('--step') cmd.append(str(self.step)) if not self.overwrite: cmd.append('--no-overwrite') ds = [] for name in self.get_ds_list(): row = getattr(self, name, None) if row: ds.append(str(row.to_string())) cmd.append(ds) for name in self.get_rra_list(): row = getattr(self, name, None) if row: cmd.append(str(row.to_string())) rrdtool.create(*cmd)
def createDatabase(self): '''create rrd databases. Existing databases will not be overwritten''' params = globals() params.update(locals()) if not os.path.exists(self.rrd_power): logging.info( "creating new rrd database %s" % self.rrd_power) data_sources = [] for profile in DAILY_PROFILES: mi, ma = RANGES[profile] data_sources.append( 'DS:%s:GAUGE:120:%i:%i' % (profile, mi, ma)) rra = [ # short-term 'RRA:AVERAGE:0.5:%(SHORT_TERM_VALUES_PER_AGGREGATE)i:%(SHORT_TERM_NUM_VALUES)i', # medium term 'RRA:AVERAGE:0.5:%(MEDIUM_TERM_VALUES_PER_AGGREGATE)i:%(MEDIUM_TERM_NUM_VALUES)i', 'RRA:MIN:0.5:%(MEDIUM_TERM_VALUES_PER_AGGREGATE)i:%(MEDIUM_TERM_NUM_VALUES)i', 'RRA:MAX:0.5:%(MEDIUM_TERM_VALUES_PER_AGGREGATE)i:%(MEDIUM_TERM_NUM_VALUES)i', # longterm profile 'RRA:AVERAGE:0.5:%(LONG_TERM_VALUES_PER_AGGREGATE)i:%(LONG_TERM_NUM_VALUES)i', 'RRA:MIN:0.5:%(LONG_TERM_VALUES_PER_AGGREGATE)i:%(LONG_TERM_NUM_VALUES)i', 'RRA:MAX:0.5:%(LONG_TERM_VALUES_PER_AGGREGATE)i:%(LONG_TERM_NUM_VALUES)i', ] rra = [x % params for x in rra] rrdtool.create(self.rrd_power, '--step', str(HEART_BEAT), *data_sources + rra)
def create_database(dbname): print "CREATE DATABASE" rrdtool.create( "tree/database/" + dbname + ".rrd", "--step", "300", "--start", "0", "DS:pmin:GAUGE:600:U:U", "DS:pmax:GAUGE:600:U:U", "DS:pavg:GAUGE:600:U:U", "DS:plos:GAUGE:600:0:100", "RRA:AVERAGE:0.5:1:10080", "RRA:AVERAGE:0.5:5:8934", "RRA:AVERAGE:0.5:60:8784", "RRA:AVERAGE:0.5:1440:3660", "RRA:MIN:0.5:1:10080", "RRA:MIN:0.5:5:8934", "RRA:MIN:0.5:60:8784", "RRA:MIN:0.5:1440:3660", "RRA:MAX:0.5:1:10080", "RRA:MAX:0.5:5:8934", "RRA:MAX:0.5:60:8784", "RRA:MAX:0.5:1440:3660", )
def create_rrd_db(dsname, offs, step=60 ): rrdf=rrdpath+"/"+dsname+".rrd" try: os.remove(rrdf); except: win.addstr(6, 0, "Failed to remove "+rrdf); win.refresh(); finally: win.addstr(6, 0, "RRD file: "+rrdf+" has been removed"); win.refresh(); #win.addstr(7, 0, "Created a new RRD file: "+rrdfile+" "); #win.refresh(); try: rrdtool.create(str(rrdf), "--step", str(step), "--start", str(offs), "DS:"+str(dsname)+":GAUGE:120:0:24000", "RRA:AVERAGE:0.5:1:864000", "RRA:AVERAGE:0.5:60:129600", "RRA:AVERAGE:0.5:3600:13392") except Exception as e: win.addstr(7, 0, "Failed to create a new RRD file: "+rrdf+" "+str(e)); win.insstr(9, 0, " "); win.refresh(); curses.endwin(); exit();
def create_rrd(): print 'Creating RRD: ' + count_rrd # Create RRD to store counter and consumption: # 1 trigger cycle matches consumption of 1/revs_per_kWh # Counter is GAUGE (kWh) # Consumption is ABSOLUTE (W) # 1 value per minute for 3 days # 1 value per day for 30 days # 1 value per week for 10 years # Consolidation LAST for counter # Consolidation AVERAGE for consumption try: rrdtool.create(count_rrd, '--no-overwrite', '--step', '60', 'DS:counter:GAUGE:86400:0:1000000', 'DS:consum:ABSOLUTE:86400:0:1000000', 'RRA:LAST:0.5:1:4320', 'RRA:AVERAGE:0.5:1:4320', 'RRA:LAST:0.5:1440:30', 'RRA:AVERAGE:0.5:1440:30', 'RRA:LAST:0.5:10080:520', 'RRA:AVERAGE:0.5:10080:520') except Exception as e: print 'Error ' + str(e)
def create_rrd(rrd_dir, host, port): rrdtool.create(rrd_dir + host + "_" + port + "_mem_status.rrd", "--start", '-10s', "--step", "60", "DS:total_request:COUNTER:1800:0:U", "DS:get_request:COUNTER:1800:0:U", "DS:set_request:COUNTER:1800:0:U", "DS:total_mem:GAUGE:1800:0:U", "DS:used_mem:GAUGE:1800:0:U", "RRA:AVERAGE:0.5:1:360", "RRA:AVERAGE:0.5:5:288", "RRA:AVERAGE:0.5:30:336", "RRA:AVERAGE:0.5:120:372", "RRA:AVERAGE:0.5:1440:366", "RRA:AVERAGE:0.5:10080:262", "RRA:MAX:0.5:5:288", "RRA:MAX:0.5:30:336", "RRA:MAX:0.5:120:372", "RRA:MAX:0.5:1440:366", "RRA:MAX:0.5:10080:262", "RRA:MAX:0.5:10:228", "RRA:MIN:0.5:5:288", "RRA:MIN:0.5:30:336", "RRA:MIN:0.5:120:372", "RRA:MIN:0.5:1440:366", "RRA:MIN:0.5:10080:262", )
def update(): DB = RRD_PATH + 'voltages.rrd' ds = [] for sensor in cfg.Sensors: ds.append('DS:'+sensor+':GAUGE:600:0:U') if not os.path.isfile(DB): rrdtool.create(DB, '-s', '300', ds, 'RRA:AVERAGE:0.5:1:576', 'RRA:AVERAGE:0.5:6:672', 'RRA:AVERAGE:0.5:24:732', 'RRA:AVERAGE:0.5:144:1460' ) stats = {} for sensor in cfg.Sensors: stats[sensor] = load(sensor) error = rrdtool.update(DB, '-t', ':'.join(cfg.Sensors), 'N:'+':'.join(stats.values()) ) print ':'.join(stats.values()) if error: log(__file__, 'An error occured : ' + error) else: for period in PERIODS: for sensor in cfg.Sensors: graph(sensor, period) if hasattr(cfg, 'MergeGraphs') and cfg.MergeGraphs == 1: graphMerge(period) log(__file__, 'voltage update : %s' % str(stats)) return
def mem_check(): path = "rrd/mem.rrd" swap = "rrd/swap.rrd" if os.path.exists(path) == False: rrdtool.create(path, "--step", str(step), "DS:used:GAUGE:%i:0:U" % heartbeat, "DS:buffers:GAUGE:%i:0:U" % heartbeat, "DS:cached:GAUGE:%i:0:U" % heartbeat, "DS:swap:GAUGE:%i:0:U" % heartbeat, rra_month, rra_month_min, rra_month_max, rra_year, rra_year_min, rra_year_max ) if os.path.exists(swap) == False: rrdtool.create(swap, "--step", str(step), "DS:swap:GAUGE:%i:0:U" % heartbeat, rra_month, rra_month_min, rra_month_max, rra_year, rra_year_min, rra_year_max ) return
def _generate_rrd(self): ''' Create rrd file if rrdfile is None ''' stime = int(time.time()) - 1 * 86400 dpoints = 288 etime = stime + (dpoints * 300) fname = os.path.join(os.path.abspath(os.path.dirname('./')), 'test.rrd') rrdtool.create('test.rrd' , '--start' , str(stime) , "DS:input:COUNTER:600:U:U", "DS:output:COUNTER:600:U:U", "RRA:AVERAGE:0.5:1:600", "RRA:AVERAGE:0.5:6:700", "RRA:AVERAGE:0.5:24:775", "RRA:AVERAGE:0.5:288:797", "RRA:MAX:0.5:1:600", "RRA:MAX:0.5:6:700", "RRA:MAX:0.5:24:775", "RRA:MAX:0.5:444:797",) ctime = stime input = 0 output = 0 for i in xrange(dpoints): input += random.randrange(self.bandwidth / 2, self.bandwidth + self.bandwidth * 2) * 100 output += random.randrange(self.bandwidth / 2, self.bandwidth + self.bandwidth * 2) * 100 ctime += 300 rrdtool.update(fname , '%d:%d:%d' % (ctime , input, output)) return os.path.join(os.path.abspath(os.path.dirname('./')), "test.rrd")
def create_rrd(rrd_file,m_type="None"): if m_type=="host": ret = rrdtool.create( rrd_file, "--step", str(STEP) ,"--start", str(int(time.time())), "DS:cpu:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "DS:ram:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "DS:dr:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "DS:dw:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "DS:tx:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "DS:rx:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "DS:tmp:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "DS:pwr:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "RRA:MIN:0:1:200000", # Data stored for every five minute "RRA:AVERAGE:0.5:12:100", # Average data stored for every hour (300*12) "RRA:AVERAGE:0.5:288:50", # Average data stored for every day (300*288) "RRA:AVERAGE:0.5:8928:24", # Average data stored for every month (300*8928) "RRA:AVERAGE:0.5:107136:10")# Average data stored for every year (300*107136) else: ret = rrdtool.create( rrd_file, "--step", str(STEP) ,"--start", str(int(time.time())), "DS:cpu:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "DS:ram:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "DS:dr:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "DS:dw:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "DS:tx:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "DS:rx:GAUGE:%s:0:U" % str(TIME_DIFF_MS), "RRA:MIN:0:1:200000", # Data stored for every five minute "RRA:AVERAGE:0.5:12:100", # Average data stored for every hour (300*12) "RRA:AVERAGE:0.5:288:50", # Average data stored for every day (300*288) "RRA:AVERAGE:0.5:8928:24", # Average data stored for every month (300*8928) "RRA:AVERAGE:0.5:107136:10")# Average data stored for every year (300*107136) if ret: rrd_logger.warn(rrdtool.error())
#!/usr/bin/python #-*- coding: iso-8859-15 -*- # INDUINO METEOSTATION # http://induino.wordpress.com # # NACHO MAS 2013 import sys import rrdtool from meteoconfig import * #10s raw values for 3hour, 1min for 24 hours, 5 min for 24*7 hours, # 1hour for 1 year, 1day dor 10 years! ret = rrdtool.create( RRDFILE, "--step", "1", "--start", '0', "DS:HR:GAUGE:600:U:U", "DS:Thr:GAUGE:600:U:U", "DS:IR:GAUGE:600:U:U", "DS:Tir:GAUGE:600:U:U", "DS:P:GAUGE:600:U:U", "DS:Tp:GAUGE:600:U:U", "DS:Dew:GAUGE:600:U:U", "DS:Light:GAUGE:600:U:U", "DS:T:GAUGE:600:U:U", "DS:clouds:GAUGE:600:U:U", "DS:skyT:GAUGE:600:U:U", "DS:cloudFlag:GAUGE:600:U:U", "DS:dewFlag:GAUGE:600:U:U", "DS:frezzingFlag:GAUGE:600:U:U", "RRA:AVERAGE:0.5:1:10800", "RRA:AVERAGE:0.5:60:1440", "RRA:AVERAGE:0.5:300:1008", "RRA:AVERAGE:0.5:3600:8760", "RRA:AVERAGE:0.5:86400:3650", "RRA:MAX:0.5:1:10800", "RRA:MAX:0.5:60:1440", "RRA:MAX:0.5:300:1008", "RRA:MAX:0.5:3600:8760", "RRA:MAX:0.5:86400:3650", "RRA:MIN:0.5:1:10800", "RRA:MIN:0.5:60:1440", "RRA:MIN:0.5:300:1008", "RRA:MIN:0.5:3600:8760", "RRA:MIN:0.5:86400:3650") if ret: print rrdtool.error()
def get_mysqlstatus(db_host): global status global output connection1 = 0.0 slow_queries1 = 0.0 com_select1 = 0 com_update1 = 0 com_delete1 = 0 com_insert1 = 0 com_questions1 = 0 valid_count = 1 sql = 'show global status where Variable_name in (\ "Connections", \ "Slow_queries", \ "Qcache_hits",\ "Qcache_inserts", \ "Qcache_lowmem_prunes", \ "Qcache_not_cached", \ "Queries", \ "Questions", \ "Com_select", \ "Com_insert", \ "Com_update", \ "Com_delete", \ "Table_locks_waited", \ "Uptime" \ )' try: cursor.execute(sql) results = cursor.fetchall() for row in results: if row[0] == "Slow_queries": slow_queries = row[1] if row[0] == "Connections": connection = row[1] if row[0] == "Com_update": com_update = row[1] if row[0] == "Com_select": com_select = row[1] if row[0] == "Com_delete": com_delete = row[1] if row[0] == "Com_insert": com_insert = row[1] if row[0] == "Questions": questions = row[1] if os.path.exists(rrd_dir + host + "_mysql_status.rrd"): rrdtool.update( rrd_dir + host + "_mysql_status.rrd", '%s:%s:%s:%s:%s:%s:%s:%s' % (time.strftime( "%s", time.localtime(time.time() - 10)), str(connection), str(slow_queries), str(com_select), str(com_insert), str(com_update), str(com_delete), str(questions))) (start, ds, data) = rrdtool.fetch(rrd_dir + host + "_mysql_status.rrd", "AVERAGE", "-s", "-300s") for row in data: if row[0] != None: connection1 += row[0] slow_queries1 += row[1] com_select1 += row[2] com_insert1 += row[3] com_update1 += row[4] com_delete1 += row[5] com_questions1 += row[6] valid_count += 1 slow_queries1 = slow_queries1 / valid_count connection1 = connection1 / valid_count com_select1 = com_select1 / valid_count com_insert1 = com_insert1 / valid_count com_update1 = com_update1 / valid_count com_delete1 = com_delete1 / valid_count com_questions1 = com_questions1 / valid_count output += "slow_queries1:%f, connection1:%f, select:%f, insert:%f, update:%f, delete:%f, questions:%f" % ( slow_queries1, connection1, com_select1, com_insert1, com_update1, com_delete1, com_questions1) output += "connection:%s" % connection output += "slow_query:%s" % slow_queries if slow_queries1 > 100: output += "too many slow_queries" status = service_status['critical'] if connection1 > 500: output += "too many connections" status = service_status['critical'] else: rrdtool.create( rrd_dir + host + "_mysql_status.rrd", "--start", '-10s', "--step", "60", "DS:connections:COUNTER:1800:0:U", "DS:slow_queries:COUNTER:1800:0:U", "DS:com_select:COUNTER:1800:0:U", "DS:com_insert:COUNTER:1800:0:U", "DS:com_update:COUNTER:1800:0:U", "DS:com_delete:COUNTER:1800:0:U", "DS:Questions:COUNTER:1800:0:U", "RRA:AVERAGE:0.5:1:360", "RRA:AVERAGE:0.5:5:288", "RRA:AVERAGE:0.5:30:336", "RRA:AVERAGE:0.5:120:372", "RRA:AVERAGE:0.5:1440:366", "RRA:AVERAGE:0.5:10080:262", "RRA:MAX:0.5:5:288", "RRA:MAX:0.5:30:336", "RRA:MAX:0.5:120:372", "RRA:MAX:0.5:1440:366", "RRA:MAX:0.5:10080:262", "RRA:MAX:0.5:10:228", "RRA:MIN:0.5:5:288", "RRA:MIN:0.5:30:336", "RRA:MIN:0.5:120:372", "RRA:MIN:0.5:1440:366", "RRA:MIN:0.5:10080:262", ) except Exception, e: print e
# -*- coding: utf-8 -*- #!/usr/bin/python import rrdtool import time cur_time = str(int(time.time())) rrd = rrdtool.create( 'Flow.rrd', '--step', '300', '--start', cur_time, 'DS:eth0_in:COUNTER:600:0:U', 'DS:eth0_out:COUNTER:600:0:U', 'RRA:AVERAGE:0.5:1:600', 'RRA:AVERAGE:0.5:6:700', 'RRA:AVERAGE:0.5:24:775', 'RRA:AVERAGE:0.5:288:797', 'RRA:MAX:0.5:1:600', 'RRA:MAX:0.5:6:700', 'RRA:MAX:0.5:24:775', 'RRA:MAX:0.5:444:797', 'RRA:MIN:0.5:1:600', 'RRA:MIN:0.5:6:700', 'RRA:MIN:0.5:24:775', 'RRA:MIN:0.5:444:797') if rrd: print rrdtool.error()
def get(self, community, version_snmp, ip, port, oid): errorIndication, errorStatus, errorIndex, varBinds = next( getCmd(SnmpEngine(), CommunityData(str(community), mpModel=version_snmp), UdpTransportTarget((str(ip), port)), ContextData(), ObjectType(ObjectIdentity(str(oid))))) if errorIndication: print(errorIndication) elif errorStatus: print('%s at %s' % (errorStatus.prettyPrint(), errorIndex and varBinds[int(errorIndex) - 1][0] or '?')) else: for varBind in varBinds: result = str(varBind).split("=") return result[1] intcp = get("comunidadEquipo1_grupo4cm1", 1, "localhost", 161, "1.3.6.1.2.1.6.10.0") outtcp = get("comunidadEquipo1_grupo4cm1", 1, "localhost", 161, "1.3.6.1.2.1.6.11.0") query = "N:" + str(intcp) + ":" + str(outtcp) + "" ret = rrdtool.create("parctica2.rrd", "--start", 'N', "DS:intcp:COUNTER:600:U:U", "DS:outtcp:COUNTER:600:U:U", "RRA:AVERAGE:0.5:1:24", "RRA:AVERAGE:0.5:6:10") gra = rrdtool.graph("practica2.png", "--start", "N", "--end", "920808000", "DEF:practica2.rrd:intcp:AVERAGE", "DEF:practica2.rrd:outtcp:AVERAGE", "LINE1:myspeed#FF0000")
sys.path.append('/usr/lib/cgi-bin/ng') import config from extractprofile import SocketProfile import rrdtool profId = 'stats/all' rrdFileName = '/var/lib/profile-stats-logger/stats.rrd' if not os.path.exists(rrdFileName): rrdtool.create( rrdFileName, '--step', '10', 'DS:pcache_hit:DERIVE:60:0:U', 'DS:pcache_miss_absent:DERIVE:60:0:U', 'DS:pcache_miss_expired:DERIVE:60:0:U', 'DS:pcache_miss_invalid:DERIVE:60:0:U', 'RRA:AVERAGE:0.1:1:8640', # 10s intervals for 24 hours 'RRA:AVERAGE:0.1:30:8640', # 5 min intervals for 1 month 'RRA:AVERAGE:0.1:360:8640' # 1 hour intervals for 1 year ) nextTime = math.floor(time.time() / 10) * 10 + 10 while True: t = time.time() while t < nextTime: time.sleep(nextTime - t) t = time.time() nextTime += 10
#!/usr/bin/env python import rrdtool ret = rrdtool.create("netPred.rrd", "--start", 'N', "--step", '100', "DS:inoctets:COUNTER:600:U:U", "DS:outoctets:COUNTER:600:U:U", "RRA:AVERAGE:0.5:1:2016", "RRA:HWPREDICT:1000:0.9:0.0035:288:3", "RRA:SEASONAL:288:0.9:2", "RRA:DEVSEASONAL:288:0.9:2", "RRA:DEVPREDICT:1000:4", "RRA:FAILURES:288:7:9:4") if ret: print rrdtool.error()
def create(self, filename): rrdtool.create(filename, "--step", str(self.step), self.data_sources, *self.archives)
def main(): codecs.getreader('utf-8')(sys.stdin) data = {} RULE_ORDER = ['DEF', 'VDEF', 'CDEF'] EOF = False config_filename = '/etc/loxone-rrd.conf' log(u'Starting up') opts, args = getopt.getopt(sys.argv[1:], 'c:', ['config=']) for o, a in opts: if o in ('-c', '--config'): config_filename = a config = load_config(config_filename) graph_interval = int( config.get('Parameters', {}).get('graph_interval', 120)) log("Setting up graph generation interval: {}".format(graph_interval)) t = threading.Thread(target=generate_graph, kwargs={ 'interval': graph_interval, 'config': config }) t.start() while not EOF: try: line = sys.stdin.readline() except (BaseException, Exception) as e: log("Error during read: {}".format(e)) EOF = True break if line == '': EOF = True break line = line.strip() r = pattern.search(line) if not r: continue (date, data_name, value) = r.groups() data_name = unicode(data_name, 'utf-8') ts = time.mktime( datetime.datetime.strptime(r.group(1), "%Y-%m-%d %H:%M:%S").timetuple()) if not config['Data'].get(data_name): log("Unknown data key: {}".format(data_name)) continue fname = u'{}.rrd'.format(data_name).encode('utf-8') if not os.path.isfile(fname): log("Create new RRD database: {}".format(data_name)) p = get_params(config['Data'][data_name]) try: rrdtool.create([fname] + p) except Exception as e: log("Error creating RRD: {}".format(e)) try: rrdtool.update(fname, '{}:{}'.format(ts, value)) except Exception as e: log("Error updating RRD: {}".format(e)) log("Exiting") t.join()
def _createRRD(self, hostname): self._step = 5 self._rrdSize = 500/5 self._predict = 250/5 self._season = 125/5 if True:#try: for fname in self._names: s = self._fname[fname] if fname == self._names[4]: for i in range(self._agents[hostname].getNumInterFs()): name = s.format(hostname, i, 'rrd') ret = rrdtool.create(name, '--start', 'N', '--step', str(self._step), 'DS:in:COUNTER:600:0:U', 'RRA:AVERAGE:0.5:1:' + str(self._rrdSize), 'RRA:HWPREDICT:' + str(self._predict) + ':0.5:0.025:' + str(self._season) + ':3', 'RRA:SEASONAL:' + str(self._season*2) + ':0.1:2', 'RRA:DEVSEASONAL:' + str(self._season*2) + ':0.1:2', 'RRA:DEVPREDICT:' + str(self._predict) + ':4', 'RRA:FAILURES:' + str(self._season*2) + ':5:5:4', 'DS:out:COUNTER:600:0:U', 'RRA:AVERAGE:0.5:1:' + str(self._rrdSize)) if ret: print name, rrdtool.error() elif fname == self._names[5]: #create ram name = s.format(hostname, 'rrd') ret = rrdtool.create(name, '--start', 'N', '--step', str(self._step), 'DS:ram:GAUGE:600:0:U', 'RRA:AVERAGE:0.5:2:' + str(self._rrdSize)) if ret: print name, rrdtool.error() elif fname == self._names[6]: #create cpu for i in range(self._agents[hostname].getNumCPUs()): name = s.format(hostname, i, 'rrd') ret = rrdtool.create(name, '--start', 'N', '--step', str(self._step), 'DS:load:GAUGE:600:0:100', 'RRA:AVERAGE:0.5:2:' + str(self._rrdSize)) if ret: print name, rrdtool.error() elif fname == self._names[7]: #hdd name = s.format(hostname, 'rrd') ret = rrdtool.create(name, '--start', 'N', '--step', str(self._step), 'DS:hdd:GAUGE:600:0:U', 'RRA:AVERAGE:0.5:2:' + str(self._rrdSize)) if ret: print name, rrdtool.error() else: name = s.format(hostname, 'rrd') ret = rrdtool.create(name, '--start', 'N', '--step', str(self._step), 'DS:in:COUNTER:600:0:U', 'DS:out:COUNTER:600:0:U', 'RRA:AVERAGE:0.5:2:' + str(self._rrdSize), 'RRA:AVERAGE:0.5:2:' + str(self._rrdSize)) if ret: print name, rrdtool.error() else:#except (KeyError, rrdtool.OperationalError) as e: print 'Error'
# make 4M ramdisk for graph os.popen('sudo mkdir /media/ramdisk') os.popen('sudo mount -t tmpfs -o size=4M tmpfs /media/ramdisk') #os.chdir('/media/ramdisk') if not os.path.isfile('temperatures.rrd'): print('create rrd') rrdtool.create( "temperatures.rrd", "--step","60", "DS:act_temp:GAUGE:120:-127:127", "DS:gpu:GAUGE:120:-127:127", "DS:cpu:GAUGE:120:-127:127", "DS:atmega:GAUGE:120:-127:127", "DS:sht:GAUGE:120:-127:127", "DS:bmp280:GAUGE:120:-127:127", "DS:mlxamb:GAUGE:120:-127:127", "DS:mlxobj:GAUGE:120:-127:127", "DS:ntc:GAUGE:120:-127:127", "RRA:MAX:0.5:1:1500", "RRA:MAX:0.5:10:1500", "RRA:MAX:0.5:60:1500") nextsensorcheck = 0 everysecond = 0 nexttm = 0 last_backlight_level = 0 a = 0
#!/usr/bin/python import sys import rrdtool ret = rrdtool.create("waterTemp.rrd", "--step", "300", "--start", '0', "DS:watertemp:GAUGE:600:U:U", "RRA:AVERAGE:0.5:1:576", "RRA:AVERAGE:0.5:6:672", "RRA:AVERAGE:0.5:24:732", "RRA:AVERAGE:0.5:144:1460") # 2 days of 5 mins averages # 2 Weeks of 1/2 hour averages # 2 Months of 2 hour averages # 2 years of 12 hour averages if ret: print rrdtool.error()
rrd = rrdtool.create( 'Flow.rrd', '--step', '300', # 指定rrdtool每隔多长时间就收到一个值,默认为5分钟 '--start', cur_time, # 指定rrdtool第一条记录的起始时间,必须是timestamp的格式; # DT 定义数据源eth0_in (入流量)、eth0_out (出流量)用于定义数据源,用于存放脚本的结果的变量; # 类型都为COUNTER (递增); # 600秒为心跳值,其含义是600秒没有收到值, 则会用UNKNOWN代替; # 0为最小值;最大值用U代替, 表示不确定 'DS:eth0_in:COUNTER:600:0:U', 'DS:eth0_out:COUNTER:600:0:U', # RRA定义格式为[RRA:CF:xff:steps:rows], # CF定义了AVERAGE、MAX、MIN三种数据合并方式 # xff定义为0.5, 表示一个CDP中的PDP值如超过一半值为UNKNOWN, 则该CDP的值就被标为UNKNOWN # 下列前4个RRA的定义说明如下, 其他定义与AVERAGE方式相似, # 区别是存最大值与最小值 # 每隔5分钟 (1*300秒)存一次数据的平均值, 存600笔, 即2.08天 # 每隔30分钟 (6*300秒)存一次数据的平均值, 存700笔, 即14.58天 (2周) # 每隔2小时 (24*300秒)存一次数据的平均值, 存775笔, 即64.58天 (2个月) # 每隔24小时 (288*300秒)存一次数据的平均值, 存797笔, 即797天 (2年) 'RRA:AVERAGE:0.5:1:600', 'RRA:AVERAGE:0.5:6:700', 'RRA:AVERAGE:0.5:24:775', 'RRA:AVERAGE:0.5:288:797', 'RRA:MAX:0.5:1:600', 'RRA:MAX:0.5:6:700', 'RRA:MAX:0.5:24:775', 'RRA:MAX:0.5:444:797', 'RRA:MIN:0.5:1:600', 'RRA:MIN:0.5:6:700', 'RRA:MIN:0.5:24:775', 'RRA:MIN:0.5:444:797')
def maintain_stats(init=False): # http://supportex.net/2011/09/rrd-python/ # (https://jira.toolserver.org/browse/DRTRIGON-74) global localdir # why is this needed here? nowhere else needed...?!?!! import platform, numpy, rrdtool # for statistics '-stats' #rrd_fn = os.path.join(localdir, "stats-01-%s.rrd" % platform.platform()) rrd_fn = os.path.join(localdir, "stats-01-%s.rrd" % platform.system()) if init: ret = rrdtool.create(rrd_fn, "--step", "86400", "--start", '0', "DS:sum_start:GAUGE:144000:U:U", "DS:sum_end:GAUGE:144000:U:U", "DS:sum_diff:GAUGE:144000:U:U", "DS:bot_sum_disc:GAUGE:144000:U:U", "DS:bot_subster:GAUGE:144000:U:U", "DS:bot_catimages:GAUGE:144000:U:U", "DS:bot_uptime_sum_disc:GAUGE:144000:U:U", "DS:bot_uptime_subster:GAUGE:144000:U:U", "DS:bot_uptime_catimages:GAUGE:144000:U:U", "DS:msg_unknown:GAUGE:144000:U:U", "DS:msg_info:GAUGE:144000:U:U", "DS:msg_warning:GAUGE:144000:U:U", "DS:msg_error:GAUGE:144000:U:U", "DS:msg_critical:GAUGE:144000:U:U", "DS:msg_debug:GAUGE:144000:U:U", "DS:metric1:GAUGE:144000:U:U", "DS:metric2:GAUGE:144000:U:U", "DS:metric3:GAUGE:144000:U:U", "DS:metric4:GAUGE:144000:U:U", "DS:metric5:GAUGE:144000:U:U", "DS:sum_diff_inter:COMPUTE:sum_start,sum_end,-", # (number of logfiles, quota, ...) "RRA:AVERAGE:0.5:1:600", "RRA:AVERAGE:0.5:6:700", "RRA:AVERAGE:0.5:24:775", "RRA:AVERAGE:0.5:288:797", "RRA:MIN:0.5:1:600", "RRA:MIN:0.5:6:700", "RRA:MIN:0.5:24:775", "RRA:MIN:0.5:444:797", "RRA:MAX:0.5:1:600", "RRA:MAX:0.5:6:700", "RRA:MAX:0.5:24:775", "RRA:MAX:0.5:444:797", "RRA:LAST:0.5:1:600", "RRA:LAST:0.5:6:700", "RRA:LAST:0.5:24:775", "RRA:LAST:0.5:444:797") else: (localdir, files, current) = oldlogfiles() stat, recent = logging_statistics(current, botcontinuous) # for item in current: # stat, recent = logging_statistics([item], botcontinuous) # if stat is None: # continue # end, start = numpy.array(stat['etiming']['end']), numpy.array(stat['etiming']['start']) # shape = min(end.shape[0], start.shape[0]) # runtime = end[:shape]-start[:shape]-7200 # -2*60*60 because of time jump during 'set TZ' # if runtime.any() and (runtime.min() < 0): # DST or not; e.g. ('CET', 'CEST') # runtime += 3600 # uptime = numpy.array(runtime).sum() (bot_uptime_sum_disc, bot_uptime_subster, bot_uptime_catimages) = (0, 0, 0) (metric1, metric2, metric3, metric4, metric5) = (0, 0, 0, 0, 0) val = (ecount['start'], ecount['end'], (ecount['start']-ecount['end']), ecount['sum_disc'], ecount['subster'], ecount['catimages'], bot_uptime_sum_disc, bot_uptime_subster, bot_uptime_catimages, mcount['unknown'], mcount['info'], mcount['warning'], mcount['error'], mcount['critical'], mcount['debug'], metric1, metric2, metric3, metric4, metric5,) # update ret = rrdtool.update(rrd_fn, ('N' + (':%s'*22)) % val); # show #for sched in ['hourly', 'daily', 'weekly', 'monthly']: for sched in ['daily', 'weekly', 'monthly']: fn = "/home/drtrigon/public_html/DrTrigonBot/test-%s.png" % (sched) # ret = rrdtool.graph( fn, "--start", "-1%s" %(sched[0]), "--vertical-label=Num", # '--watermark=DrTrigonBot.TS', # "-w 800", # "DEF:m1_num=%s:metric1:AVERAGE" % rrd_fn, # "DEF:m2_num=%s:metric2:AVERAGE" % rrd_fn, # "LINE1:m1_num#0000FF:metric1\\r", # "LINE2:m2_num#00FF00:metric2\\r", # "GPRINT:m1_num:AVERAGE:Avg m1\: %6.0lf ", # "GPRINT:m1_num:MAX:Max m1\: %6.0lf \\r", # "GPRINT:m2_num:AVERAGE:Avg m2\: %6.0lf ", # "GPRINT:m2_num:MAX:Max m2\: %6.0lf \\r") ret = rrdtool.graph( fn, "--start", "-1%s" %(sched[0]), "--vertical-label=Num", '--watermark=DrTrigonBot.TS', "DEF:end_num=%s:sum_end:AVERAGE" % rrd_fn, "DEF:start_num=%s:sum_start:AVERAGE" % rrd_fn, "DEF:subster_num=%s:bot_subster:AVERAGE" % rrd_fn, "DEF:sum_disc_num=%s:bot_sum_disc:AVERAGE" % rrd_fn, "DEF:catimages_num=%s:bot_catimages:AVERAGE" % rrd_fn, "LINE1:end_num#0000FF:end\\r", "LINE2:start_num#00FF00:start\\r", "LINE4:subster_num#00FF00:subster\\r", "LINE5:sum_disc_num#00FF00:sum_disc\\r", "LINE6:catimages_num#00FF00:catimages\\r", "GPRINT:end_num:AVERAGE:Avg end\: %6.0lf ", "GPRINT:end_num:MAX:Max end\: %6.0lf \\r", "GPRINT:start_num:AVERAGE:Avg start\: %6.0lf ", "GPRINT:start_num:MAX:Max start\: %6.0lf \\r")
def main(): try: argp = argparse.ArgumentParser(description=__doc__) argp.add_argument('rrdfile', help='The name of the RRD to use.') argp.add_argument( '-S', '--step', default=100, type=int, help= 'Interval in seconds with which data will be fed into the RRD (default: 100 seconds). Will be used when creating a new RRD and as loop delay on continous reading.' ) argp.add_argument('-C', '--create', action="store_true", help='create new round-robin database') argp.add_argument( '-c', '--continuous', action="store_true", help= 'Continuously reading system temperature and storing to rrd. Using --step as loop delay.' ) argp.add_argument("-q", "--quiet", action="store_true", help='Quiet mode. No output.') argp.add_argument('-v', '--verbose', action='count', default=0, help='Increase output verbosity.') argp.add_argument('-l', '--locale', default="de_DE", help='Set time locale for logging output') # argp.add_argument('--read-test', action="store_true", # help='just read cpu temp until Ctrl+C without using a RRD anyway.') args = argp.parse_args() if args.quiet: logging.basicConfig(level=99) elif args.verbose > 1: logging.basicConfig(level=logging.DEBUG) elif args.verbose == 1: logging.basicConfig(level=logging.INFO) else: logging.basicConfig(level=logging.WARNING) #else: # logging.basicConfig(level=logging.ERROR) locale.setlocale(locale.LC_TIME, args.locale) _log.info("Locale is set to {0}".format( locale.getlocale(locale.LC_TIME))) if args.step > 300: _log.warning( "Step size {0} is too large! Reducing to maximum of 300 seconds." .format(args.step)) step = 300 elif args.step < 10: _log.warning( "Step size {0} is too small! In order not to poll thermal data file too often producing too much system load setting step size to minimum of 10 seconds." .format(args.step)) step = 10 else: _log.warning("Step size is set to {0} seconds.".format(args.step)) step = args.step if args.create: if os.path.isfile(args.rrdfile): print("Cannot create {0}".format(args.rrdfile)) print("Already exists!") else: a = int(round(300 / step)) print("Creating {0} using a step size of {1} seconds..".format( args.rrdfile, str(step))) rrdtool.create( args.rrdfile, "--step", str(step), "DS:temp:GAUGE:{0}:-20:110".format(step * 2), # aggregate data points of 5 minutes and save for 24h "RRA:AVERAGE:0.5:{0}:288".format(a), "RRA:MIN:0.5:{0}:288".format(a), "RRA:MAX:0.5:{0}:288".format(a), # aggregate data points of 1 hour and save for 30 days "RRA:AVERAGE:0.5:{0}:720".format(a * 12), "RRA:MIN:0.5:{0}:720".format(a * 12), "RRA:MAX:0.5:{0}:720".format(a * 12), # aggregate data points of 1 day and save for 1 year "RRA:AVERAGE:0.5:{0}:365".format(a * 288), "RRA:MIN:0.5:{0}:365".format(a * 288), "RRA:MAX:0.5:{0}:365".format(a * 288)) print("Done.") # elif args.read_test: # print("Start reading...") # while True: # print("{0} °C".format(read_cputemp())) # time.sleep(5) else: if os.path.isfile(args.rrdfile): if args.continuous: _log.info( "Starting continuous reading system temperatue every {0} seconds" .format(step)) while True: t = update_rrd(args.rrdfile) if t is not None: _log.info("{0}: {1} °C".format( time.strftime("%x %X"), t)) else: _log.info("{0}: ---".format( time.strftime("%x %X"))) time.sleep(step) else: t = update_rrd(args.rrdfile) print("Just once reading system temperatue: {0} °C".format( t)) else: _log.error("Cannot access {0}! File does not exist!".format( args.rrdfile)) except KeyboardInterrupt: print("\nAbbruch durch Benutzer Ctrl+C") except TypeError as e: exc_type, exc_obj, exc_tb = sys.exc_info() _log.error("Type Error: {0} in line {1}".format(e, exc_tb.tb_lineno)) except RuntimeError as e: _log.error("RuntimeError: ", e) except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] _log.error("Unexpected error {0}: {1} in {2} line {3}".format( exc_type, e, fname, exc_tb.tb_lineno)) finally: print("Finally ended")
def on_message(mosq, obj, msg): # Paho returns a byte string for message.payload. We need to decode it to get it in string form. payload = msg.payload.decode('UTF-8') logger.debug("Message received on topic: %s with payload: %s" % (msg.topic, payload)) pl = extract_float(payload) if pl == None: logger.debug("Unable to get float from payload: %s" % payload) return logger.info("Message received on topic " + msg.topic + " with QoS " + str(msg.qos) + " and payload %s " % str(pl)) components = msg.topic.split("/") # Get the last part of the path file_name = components.pop() # append .info to info file name info_file_name = "%s.info" % file_name # append .rrd extension to database file file_name = "%s.rrd" % file_name #Get the root directory from the config dir_name = get_config_item("daemon", "data_dir", "/var/lib/mqtt2rrd/") # Append the path components to the root creating any missing directories while (len(components) > 0): dir_name = os.path.join(dir_name, components.pop(0)) if not os.path.isdir(dir_name): os.mkdir(dir_name) logger.debug("Created directory: %s for topic: %s" % (dir_name, msg.topic)) file_path = os.path.join(dir_name, file_name) graph_name = msg.topic.replace("/", "_") graph_name = graph_name.replace(".", "_") if len(graph_name) > 19: graph_name = graph_name[:19] ds = "DS:%s:GAUGE:120:U:U" % graph_name ds = str(ds) if not os.path.exists(file_path): # Create the info file info = { 'topic': msg.topic, 'created': time.time(), 'friendly_name': get_config_item(msg.topic, "friendly_name", msg.topic) } info_fpath = os.path.join(dir_name, info_file_name) f = open(info_fpath, "w") json.dump(info, f) f.close() # Create the RRD file try: step = get_config_item(msg.topic, "step", 60) RRAstr = get_config_item( msg.topic, "archives", "RRA:AVERAGE:0.5:2:30,RRA:AVERAGE:0.5:5:288,RRA:AVERAGE:0.5:30:336,RRA:AVERAGE:0.5:60:1488,RRA:AVERAGE:0.5:720:744,RRA:AVERAGE:0.5:1440:265" ) RRAs = [] for i in RRAstr.split(","): i = i.lstrip(" ") i = i.rstrip(" ") i = str(i) RRAs.append(i) logger.info("Creating RRD file: %s for topic: %s" % (file_path, msg.topic)) rrdtool.create(str(file_path), "--step", str(step), "--start", "0", ds, *RRAs) except rrdtool.error as e: logger.error("Could not create RRD for topic: %s: %s" % (ds, str(e))) try: logger.info("Updating: %s with value: %s" % (file_path, pl)) rrdtool.update(str(file_path), str("N:%f" % pl)) except rrdtool.error as e: logger.error("Could not log value: %s to RRD %s for topic: %s: %s" % (pl, file_path, msg.topic, str(e)))
try: with open(filename): pass print "Datenbankdatei gefunden: " + filename i = 1 except IOError: print "Ich erzeuge eine neue Datenbank: " + filename ret = rrdtool.create( "%s" % (filename), "--step", "%s" % (steps), "--start", '0', "DS:dht22_sensortemp:GAUGE:2000:U:U", "DS:dht22_sensorhum:GAUGE:2000:U:U", "DS:dht22_lat:GAUGE:2000:U:U", "DS:dht22_uml:GAUGE:2000:U:U", "DS:dht22_heat:GAUGE:2000:U:U", "DS:dht22_cool:GAUGE:2000:U:U", "DS:dht22_lbf:GAUGE:2000:U:U", "RRA:AVERAGE:0.5:1:2160", "RRA:AVERAGE:0.5:5:2016", "RRA:AVERAGE:0.5:15:2880", "RRA:AVERAGE:0.5:60:8760", ) i = 1 writeVerbose('************************************************************') settings = readSettings() tempstart = int(time.time()) tempstart1 = tempstart
import sys import rrdtool project_dir = "rrd" ret = rrdtool.create("rrd/temps.rrd", "--step", "30", "--no-overwrite", "DS:temp:GAUGE:240:-20:110", "RRA:AVERAGE:0.5:1:1200", "RRA:MIN:0.5:12:2400", "RRA:MAX:0.5:12:2400", "RRA:AVERAGE:0.5:12:2400", "RRA:LAST:0.5:1:10") if ret: print rrdtool.error()
from easysnmp import snmp_get from easysnmp import exceptions as exce import rrdtool community = "public" hostname = "localhost" version = 1 session = Session(hostname=hostname, community=community, version=version) if_num = int(session.get('ifNumber.0').value) max_sleep_time = 5.0 if not os.path.isfile("rrdfiles/outoctets.rrd"): rrdtool.create('rrdfiles/outoctets.rrd', '--step', '5', 'DS:ifOutOctets:COUNTER:10:0:4294967295', 'RRA:AVERAGE:0.5:1:720', 'RRA:AVERAGE:0.5:12:300') if not os.path.isfile("rrdfiles/inoctets.rrd"): rrdtool.create('rrdfiles/inoctets.rrd', '--step', '5', 'DS:ifInOctets:COUNTER:10:0:4294967295', 'RRA:AVERAGE:0.5:1:720', 'RRA:AVERAGE:0.5:12:300') while True: initial_time = time.time() outBytes = 0 inBytes = 0 for i in range(1, if_num + 1): outBytes += int(session.get(f'ifOutOctets.{i}').value) inBytes += int(session.get(f'ifInOctets.{i}').value) #ricorda di non sommare la loopback (forse)
try: if os.path.isfile(rrdparms['file'] + ".rrd"): print("RRD file %s exists, skipping" % rrdparms['file']) continue rrd_params = list() print("creating: " + rrdparms['file'] + ".rrd") for sensor, params in SENSORS.iteritems(): if params[0] == rrd: # TODO min:max rrd_params.append("DS:" + params[3] + ":GAUGE:300:U:U") if rrd_params: rrd_params += [ "RRA:AVERAGE:0.5:1:576", "RRA:AVERAGE:0.5:2:2016", "RRA:AVERAGE:0.5:4:52560", "RRA:AVERAGE:0.5:24:175200", "RRA:MAX:0.5:1:576", "RRA:MAX:0.5:2:2016", "RRA:MAX:0.5:4:52560", "RRA:MAX:0.5:24:175200", "RRA:MIN:0.5:1:576", "RRA:MIN:0.5:2:2016", "RRA:MIN:0.5:4:52560", "RRA:MIN:0.5:24:175200" ] print(rrd_params) ret = rrdtool.create(rrdparms['file'] + ".rrd", "--step", "300", "--start", '0', *rrd_params) if ret: print(rrdtool.error()) else: print("Skipping " + rrdparms['file'] + ", no sensors defined") except rrdtool.error as e: print("ERROR rrdtool: " + e.message) except Exception as e: print("ERROR " + sys.exc_info()[0])
γ : 0.1 period : 10 ''' ret = rrdtool.create( "netP.rrd", "--start", 'N', "--step", '300', "DS:inoctets:COUNTER:600:U:U", "DS:outoctets:COUNTER:600:U:U", "RRA:AVERAGE:0.5:1:20", "RRA:HWPREDICT:30:0.1:0.0035:10", #RRA:HWPREDICT:rows:alpha:beta:seasonal period[:rra-num] "RRA:SEASONAL:10:0.1:2", #RRA:SEASONAL:seasonal period:gamma:rra-num[:smoothing-window=fraction] "RRA:DEVSEASONAL:10:0.1:2", #RRA:DEVSEASONAL:seasonal period:gamma:rra-num[:smoothing-window=fraction] "RRA:DEVPREDICT:30:4", #RRA:DEVPREDICT:rows:rra-num "RRA:FAILURES:30:7:9:4" #RRA:FAILURES:rows:threshold:window length:rra-num ) #HWPREDICT rra-num is the index of the SEASONAL RRA. #SEASONAL rra-num is the index of the HWPREDICT RRA. #DEVPREDICT rra-num is the index of the DEVSEASONAL RRA. #DEVSEASONAL rra-num is the index of the HWPREDICT RRA. #FAILURES rra-num is the index of the DEVSEASONAL RRA.
def on_message(mosq, obj, msg): logger.debug("Message received on topic: %s with payload: %s." % (msg.topic, msg.payload)) pl = extract_float(msg.payload) if pl == None: logger.debug("Unable to get float from payload: %s" % msg.payload) return logger.info("Message received on topic " + msg.topic + " with QoS " + str(msg.qos) + " and payload %d " % pl) components = msg.topic.split("/") file_name = components.pop() info_file_name = "%s.info" % file_name file_name = "%s.rrd" % file_name dir_name = get_config_item("daemon", "data_dir", "/var/lib/mqtt2rrd/") while (len(components) > 0): dir_name = os.path.join(dir_name, components.pop(0)) if not os.path.isdir(dir_name): os.mkdir(dir_name) logger.debug("Created directory: %s for topic: %s" % (dir_name, msg.topic)) file_path = os.path.join(dir_name, file_name) graph_name = msg.topic.replace("/", "_") graph_name = graph_name.replace(".", "_") if len(graph_name) > 19: graph_name = graph_name[:19] ds = "DS:val:GAUGE:300:U:U" # % graph_name # ds = str(ds) if not os.path.exists(file_path): # Create the info file info = { 'topic': msg.topic, 'created': time.time(), 'friendly_name': get_config_item(msg.topic, "friendly_name", msg.topic) } info_fpath = os.path.join(dir_name, info_file_name) f = open(info_fpath, "w") json.dump(info, f) f.close() # Create the RRD file try: step = get_config_item(msg.topic, "step", 60) RRAstr = get_config_item( msg.topic, "archives", "RRA:AVERAGE:0.5:2:720,RRA:AVERAGE:0.5:8:540,RRA:AVERAGE:0.5:12:840,RRA:AVERAGE:0.5:16:1260,RRA:AVERAGE:0.5:20:2160,RRA:AVERAGE:0.5:32:4050,RRA:AVERAGE:0.5:60:4272,RRA:AVERAGE:0.5:120:4380,RRA:AVERAGE:0.5:240:4272,RRA:AVERAGE:0.5:720:5475,RRA:AVERAGE:0.5:1440:3650" ) RRAs = [] for i in RRAstr.split(","): i = i.lstrip(" ") i = i.rstrip(" ") i = str(i) RRAs.append(i) logger.info("Creating RRD file: %s for topic: %s" % (file_path, msg.topic)) rrdtool.create(str(file_path), "--step", str(step), "--start", "0", ds, *RRAs) except rrdtool.error as e: logger.error("Could not create RRD for topic: %s: %s" % (ds, str(e))) try: logger.info("Updating: %s with value: %s" % (file_path, pl)) rrdtool.update(str(file_path), str("N:%f" % float(pl))) except rrdtool.error as e: logger.error("Could not log value: %s to RRD %s for topic: %s: %s" % (pl, file_path, msg.topic, str(e)))
print("---- Data grab complete ----") for serial, value, units, name, source_node in g: print(r"%2.3f : Found sensor %s with value %s %s and name %s" % (time(), serial, value, units, name)) rrd = str(rrd_dir + serial + config.get("rrd", "ext")) if not os.path.isfile(rrd): #create rrd if none exists print("Creating new RRD " + rrd) rrdtool.create( rrd, "--step", "60", "DS:val:GAUGE:120:-100:100", # Accept data between -100 and +100 as valid "RRA:AVERAGE:0.5:1:525600", # A year of minutes "RRA:AVERAGE:0.5:60:8760", # A year of hours "RRA:MAX:0.5:60:8760", # A year of hours "RRA:MIN:0.5:60:8760", # A year of hours ) #update data rrdtool.update(rrd, "N:" + str(value)) #store latest values try: (n, x, y, z, h, w, d) = sensors[serial] except: (n, x, y, z, h, w, d) = ("Auto-detected " + name, 0, 0, 0, 0, 0, 0) session.add(Probe(serial, n, x, y, z, h, w, d))
# PDP:Primary Data Point 。正常情况下每个 interval RRDtool 都会收到一个值;RRDtool 在收到脚本给来的值后会计算出另外一个值(例如平均值),这个 值就是 PDP ;这个值代表的一般是“xxx/秒”的含义。注意,该值不一定等于RRDtool 收到的那个值。除非是GAUGE ,可以看下面的例子就知道了 # CF:CF 就是 Consolidation Function 的缩写。也就是合并(统计)功能。有 AVERAGE、MAX、MIN、LAST 四种分别表示对多个PDP 进行取平均、取最大值、取最小值、取当前值四种类型。具体作用等到 update 操作时再说。 # CDP:Consolidation Data Point 。RRDtool 使用多个 PDP 合并为(计算出)一个 CDP。也就是执行上面 的CF 操作后的结果。这个值就是存入 RRA的数据,绘图时使用的也是这些数据 # 1、创建rrd数据库 rrd = rrdtool.create( 'Flow.rrd', '--step', '300', '--start', cur_time, # 定义数据源eth0_in(流入量),eth0_out(流出量);类型都为COUNTER(递增); # 600s为心跳值,其含义为600s没有收到值,则会用UNKNOWN代替;0为最小值,最大值为U代替,表示不确定 'DS:etho_in:COUNTER:600:0:U', 'DS:etho_out:COUNTER:600:0:U', 'RRA:AVERAGE:0.5:1:600', # 1即300s,总共存600个 'RRA:AVERAGE:0.5:6:700', 'RRA:AVERAGE:0.5:24:775', 'RRA:AVERAGE:0.5:28:797', 'RRA:MAX:0.5:1:600', 'RRA:MAX:0.5:6:700', 'RRA:MAX:0.5:24:775', 'RRA:MAX:0.5:28:797', 'RRA:MIN:0.5:1:600', 'RRA:MIN:0.5:6:700', 'RRA:MIN:0.5:24:775', 'RRA:MIN:0.5:28:797') if rrd: print(rrdtool.error()) # 2、采用updatev()方法更新数据库 # update filename [--template|-t ds-name[:ds-name]...] N|timestamp:value[:value...] [timestamp:value[:value...] ...] 方法,
parser.add_argument( "rrdfile", nargs='?', default=config.get('WeatherRadio', 'RRDSENSORSFILE'), help="RRD file holding all time series of the raw sensor data") parser.add_argument("-s", "--source", help="Source file holding already captured data") args = parser.parse_args() rrd_args = [args.rrdfile, "--step", "300"] if (args.source): rrd_args += ["--source", args.source] # 5min raw values for 24 hours, 15 min for 7*24 hours, 1hour for 10 years. ret = rrdtool.create( rrd_args, "DS:BME280_Temp:GAUGE:600:U:U", "DS:BME280_Pres:GAUGE:600:U:U", "DS:BME280_Hum:GAUGE:600:U:U", "DS:DHT_Temp:GAUGE:600:U:U", "DS:DHT_Hum:GAUGE:600:U:U", "DS:MLX90614_Tamb:GAUGE:600:U:U", "DS:MLX90614_Tobj:GAUGE:600:U:U", "DS:TSL2591_Lux:GAUGE:600:U:U", "RRA:AVERAGE:0.5:5m:24h", "RRA:AVERAGE:0.5:15m:7d", "RRA:AVERAGE:0.5:1h:1y", "RRA:AVERAGE:0.5:1d:10y", "RRA:MIN:0.5:5m:24h", "RRA:MIN:0.5:15m:7d", "RRA:MIN:0.5:1h:1y", "RRA:MIN:0.5:1d:10y", "RRA:MAX:0.5:5m:24h", "RRA:MAX:0.5:15m:7d", "RRA:MAX:0.5:1h:1y", "RRA:MAX:0.5:1d:10y") if ret: print(rrdtool.error())
rrdtoolpath = HaloRadio.conf['cron.rrdtool_path'] Time = int(time.time()) rrdfilename = "%s/halostat.rrd" % ( rrddir ) pngfileprefix= "%s/halostat-" % ( pngdir ) try: os.stat( rrdfilename ) except OSError: rrdtool.create( rrdfilename, "-b %d" % ( Time - 300 ), "-s 300", "DS:WebUsers:GAUGE:600:0:1024", "DS:Listeners:GAUGE:600:0:1024", "DS:QueueLen:GAUGE:600:0:10240", "RRA:AVERAGE:0.5:1:288", "RRA:AVERAGE:0.5:7:288", "RRA:MAX:0.5:7:288", "RRA:AVERAGE:0.5:31:288", "RRA:MAX:0.5:31:288", "RRA:AVERAGE:0.5:365:288", "RRA:MAX:0.5:365:288", ) import HaloRadio.SessionListMaker as SessionListMaker slm = SessionListMaker.SessionListMaker() slm.GetActive() WebUsers = len(slm.list) import HaloRadio.Util as Util Listeners = Util.GetListeners()
import rrdtool #rrd_fn = "quota-%s.rrd" % platform.platform() rrd_fn = "quota-%s.rrd" % platform.system() if '-rrdinit' in sys.argv: ret = rrdtool.create( rrd_fn, "--step", "86400", "--start", '0', "DS:metric1:GAUGE:144000:U:U", "DS:metric2:GAUGE:144000:U:U", "RRA:AVERAGE:0.5:1:600", "RRA:AVERAGE:0.5:6:700", "RRA:AVERAGE:0.5:24:775", "RRA:AVERAGE:0.5:288:797", # "RRA:MIN:0.5:1:600", # "RRA:MIN:0.5:6:700", # "RRA:MIN:0.5:24:775", # "RRA:MIN:0.5:444:797", "RRA:MAX:0.5:1:600", "RRA:MAX:0.5:6:700", "RRA:MAX:0.5:24:775", "RRA:MAX:0.5:444:797") else: # update (metric1, metric2) = (percent, alarm) ret = rrdtool.update(rrd_fn, 'N:%s:%s' % (metric1, metric2)) # show
import rrdtool ret = rrdtool.create("trend.rrd", "--start",'N', "--step",'60', "DS:CPUload:GAUGE:600:U:U", "RRA:AVERAGE:0.5:1:24") if ret: print (rrdtool.error())
# Strip off the header and CRC then convert it using the unit function pointer data = rec[3:-2] return v.unit(combineBytes(data), v.times) if __name__ == "__main__": """ Test getting data from the Commander and print it to the screen """ rrdfile = os.path.expanduser("~/solarDb.rrd") print rrdfile if not os.path.exists(rrdfile): ret = rrdtool.create( rrdfile, "--step", "1", "--start", '0', "DS:0x3000:GAUGE:2000:U:U", "DS:0x3001:GAUGE:2000:U:U", "DS:0x331A:GAUGE:2000:U:U", "RRA:AVERAGE:0.5:1s:10d", "RRA:AVERAGE:0.5:1m:90d", "RRA:AVERAGE:0.5:1h:18M", "RRA:AVERAGE:0.5:1d:10y", "RRA:MAX:0.5:1s:10d", "RRA:MAX:0.5:1m:90d", "RRA:MAX:0.5:1h:18M", "RRA:MAX:0.5:1d:10y") print "Created", os.path.exists(rrdfile) ser = getRs485() try: # The ID of the device we are going to communicate with. deviceId = 0x01 for _ in xrange(1): toLog = {} for addr, v in sorted(mappings.RegistersA.iteritems()): value = communicate(ser, deviceId, addr, v, debug=False) print "%s \"%s\": %s" % (hex(addr), v.name, value)
args=parser.parse_args() # 5min raw values for 24 hours, 15 min for 7*24 hours, 1hour for 1 year, # 1day dor 10 years. ret = rrdtool.create(args.outfile, "-r", args.infile, "--step", "300", "DS:Temperature=Temperature[1]:GAUGE:600:U:U", # pressure data ignored since we change to sea level "DS:Pressure=Pressure[1]:GAUGE:600:U:U", "DS:Humidity=Humidity[1]:GAUGE:600:U:U", "DS:CloudCover=CloudCover[1]:GAUGE:600:U:U", "DS:SkyTemperature=SkyTemperature[1]:GAUGE:600:U:U", "DS:DewPoint=DewPoint[1]:GAUGE:600:U:U", "DS:SQM=SQM[1]:GAUGE:600:U:U", "DS:WindSpeed=WindSpeed[1]:GAUGE:600:U:U", "DS:WindGust:GAUGE:600:U:U", "DS:WindDirection=WindDirection[1]:GAUGE:600:U:U", "RRA:AVERAGE:0.5:1:288", "RRA:AVERAGE:0.5:3:672", "RRA:AVERAGE:0.5:12:8760", "RRA:AVERAGE:0.5:288:3650", "RRA:MIN:0.5:1:288", "RRA:MIN:0.5:3:672", "RRA:MIN:0.5:12:8760", "RRA:MIN:0.5:288:3650", "RRA:MAX:0.5:1:288", "RRA:MAX:0.5:5:672", "RRA:MAX:0.5:12:8760", "RRA:MAX:0.5:288:3650") if ret: print rrdtool.error()
# xff: fraction of <nsteps> that can have UNKNOWN as the value # nsteps: number of steps used for calculation # nrows: number of records to keep start = (int(time.time()) / INTERVAL - 1) * INTERVAL dataDefs = [ rrdFile, '--start', str(start), '--step', str(INTERVAL) ] for c in JOB_COUNTERS: dataDefs.append('DS:' + c + ':GAUGE:600:0:U') dataDefs.append('RRA:LAST:0:1:' + str(3600 / INTERVAL * 24 * 7)) rrdtool.create(*tuple(dataDefs)) try: # change selinux context of the RRD so that it can be read by a apache-invoked PHP script selinux.chcon(rrdFile, 'unconfined_u:object_r:httpd_var_run_t:s0') except: pass try: rrdtool.update(rrdFile, str(timestamp) + ':' + str(counts)) except: pass if args.graphDir: if ADD_MAX_SLOTS: