def create(self): """ Creates a new RRD database """ ds1 = DS(dsName=self.value_name, dsType=self.type, heartbeat=self.heartbeat) dss = [ds1] rras = [] # 1 days-worth of n heartbeat samples --> 60/1 * 24 rra1 = RRA(cf="AVERAGE", xff=0.5, steps=1, rows=int(self.heartbeat / 1.0 * 24)) # 7 days-worth of n heartbeat samples --> 60/5 * 24 * 7 rra2 = RRA(cf="AVERAGE", xff=0.5, steps=5, rows=int(self.heartbeat / 5.0 * 24 * 7)) # 30 days-worth of n heartbeat samples --> 60/60 * 24 * 30 rra3 = RRA(cf="AVERAGE", xff=0.5, steps=60, rows=int(self.heartbeat / 60.0 * 24 * 30)) # 365 days worth of n heartbeat samples --> 60/120 * 24 * 365 rra4 = RRA(cf="AVERAGE", xff=0.5, steps=120, rows=int(self.heartbeat / 120.0 * 24 * 365)) # 10 years worth of n heartbeat samples --> 60/180 * 24 * 365 * 10 rra5 = RRA(cf="AVERAGE", xff=0.5, steps=180, rows=int(self.heartbeat / 180.0 * 24 * 365 * 10)) rras.extend([rra1, rra2, rra3, rra4, rra5]) rrd = RRD( os.path.join("history/", "%s.rrd" % self.value_id), step=self.heartbeat, ds=dss, rra=rras, start=self.time ) rrd.create(debug=False)
def init_rdd(self): # Initiates RRD-archive # Creates the new one if absent or need to reset filename = options.rrd_file if not options.rrd_reset and access(filename, F_OK): myRRD = RRD(filename) else: heartbeat=options.stats_period*2 dataSources = [ DataSource(dsName='agents_u', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='t_unique', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='t_started', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='t_completed', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='t_failed', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='bytes', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='cpu', dsType='DERIVE', heartbeat=heartbeat,minval=0), DataSource(dsName='duration', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='duration_avg', dsType='GAUGE', heartbeat=heartbeat), ] roundRobinArchives = [] for (_steps, _rows) in options.rrd_rra: roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=_steps, rows=_rows)) roundRobinArchives.append(RRA(cf='MAX', xff=0.5, steps=_steps, rows=_rows)) myRRD = RRD(filename, ds=dataSources, rra=roundRobinArchives, step=options.stats_period) myRRD.create(debug=True) return myRRD
def RrdCreate(rrdfile): '''Creates a RRD database.''' dataSources = [] roundRobinArchives = [] dataSources.append(DataSource( dsName='temperature', dsType='GAUGE', heartbeat=600, minval=-50, maxval=100)) dataSources.append(DataSource( dsName='humidity', dsType='GAUGE', heartbeat=600, minval=0, maxval=100)) dataSources.append(DataSource( dsName='mq9', dsType='GAUGE', heartbeat=600)) dataSources.append(DataSource( dsName='dust_pc', dsType='GAUGE', heartbeat=600, minval=0)) dataSources.append(DataSource( dsName='dust_raw', dsType='GAUGE', heartbeat=600)) # Keep all values for 10 days roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=10*24*60)) # Keep 15-minute averages for one year days roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=15, rows=365*24*4)) # Keep 1-hour averages for 10 years roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=60, rows=10*365*24)) myRRD = RRD( rrdfile, step=60, ds=dataSources, rra=roundRobinArchives) myRRD.create()
def create(stringName, key): if debug: print "Enter Function create(stringName, key)" # Let's create and RRD file and dump some data in it dss = [] ds1 = DS(dsName='kW', dsType='GAUGE', heartbeat=600) #alle 10 Minuten einen Wert dss.append(ds1) rras = [] #round robin archives mit, xff=0.5 also wenn 20 Minuten kein wert kommt wirds leer angezeigt: rra1 = RRA(cf='AVERAGE', xff=0.5, steps=1, rows=144) #alle 10 Minuten ein Wert rra2 = RRA(cf='AVERAGE', xff=0.5, steps=6, rows=24) #24h mal 1h rra3 = RRA(cf='AVERAGE', xff=0.5, steps=24, rows=30) #30 Tage mal 24h rra4 = RRA(cf='AVERAGE', xff=0.5, steps=30, rows=12) #12 Monate mal 30 Tage rra5 = RRA(cf='AVERAGE', xff=0.5, steps=12, rows=10) #10 Jahre mal 12 Monate rras.append(rra1) rras.append(rra2) rras.append(rra3) rras.append(rra4) rras.append(rra5) #round robbin database file anlegen mit der Startzeit startTime (jetzt) #myRRD = RRD(baseDir + stringName + "_" + key + ".rrd", ds=dss, rra=rras, start=startTime) myRRD = RRD(baseDir + stringName + "_" + key + ".rrd", ds=dss, rra=rras, start=1483228800) myRRD.create() myRRD.update() if debug: myRRD.info()
def main(self, argv): """ Create an RRD file with values 0-9 entered at 1 second intervals from 1980-01-01 00:00:00 (the first date that rrdtool allows) """ from pyrrd.rrd import DataSource, RRA, RRD start = int(datetime(1980, 1, 1, 0, 0).strftime('%s')) dss = [] rras = [] filename = os.path.join(self.build_dir, 'test.rrd') rows = 12 step = 10 dss.append( DataSource(dsName='speed', dsType='GAUGE', heartbeat=2 * step)) rras.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=rows)) rras.append(RRA(cf='AVERAGE', xff=0.5, steps=12, rows=rows)) my_rrd = RRD(filename, ds=dss, rra=rras, start=start, step=step) my_rrd.create() for i, t in enumerate( range(start + step, start + step + (rows * step), step)): self.log.debug( 'DATA: %s %s (%s)' % (t, i, datetime.fromtimestamp(t))) my_rrd.bufferValue(t, i) # Add further data 1 second later to demonstrate that the rrd # lastupdatetime does not necessarily fall on a step boundary t += 1 i += 1 self.log.debug('DATA: %s %s (%s)' % (t, i, datetime.fromtimestamp(t))) my_rrd.bufferValue(t, i) my_rrd.update()
def RrdCreate(rrdfile): '''Creates a RRD database.''' dataSources = [] roundRobinArchives = [] dataSources.append( DataSource(dsName='temperature', dsType='GAUGE', heartbeat=600, minval=-50, maxval=100)) dataSources.append( DataSource(dsName='humidity', dsType='GAUGE', heartbeat=600, minval=0, maxval=100)) dataSources.append(DataSource(dsName='mq9', dsType='GAUGE', heartbeat=600)) dataSources.append( DataSource(dsName='dust_pc', dsType='GAUGE', heartbeat=600, minval=0)) dataSources.append( DataSource(dsName='dust_raw', dsType='GAUGE', heartbeat=600)) # Keep all values for 10 days roundRobinArchives.append( RRA(cf='AVERAGE', xff=0.5, steps=1, rows=10 * 24 * 60)) # Keep 15-minute averages for one year days roundRobinArchives.append( RRA(cf='AVERAGE', xff=0.5, steps=15, rows=365 * 24 * 4)) # Keep 1-hour averages for 10 years roundRobinArchives.append( RRA(cf='AVERAGE', xff=0.5, steps=60, rows=10 * 365 * 24)) myRRD = RRD(rrdfile, step=60, ds=dataSources, rra=roundRobinArchives) myRRD.create()
def create(namerrd,fieldname,starttime,typeofinfo): try: dataSources = [] roundRobinArchives = [] dataSources = get_ds(fieldname) if typeofinfo == 'hardware' : dict = {} dict = config_info['hardware'] s = dict['func'] step = dict['step'] funcProfile = globals()[s] roundRobinArchives = funcProfile() elif typeofinfo == 'netusage': dict = {} dict = config_info['netusage'] s = dict['func'] step = int(dict['step']) funcProfile = globals()[s] roundRobinArchives = funcProfile() myRRD = RRD(filename=namerrd,ds=dataSources, rra=roundRobinArchives, start=starttime,step=step) myRRD.create() return (True,'Create is successfull.') except Exception,e: return (False,str(e))
def main(self, argv): """ Create an RRD file with values 0-9 entered at 1 second intervals from 1980-01-01 00:00:00 (the first date that rrdtool allows) """ from pyrrd.rrd import DataSource, RRA, RRD start = int(datetime(1980, 1, 1, 0, 0).strftime('%s')) dss = [] rras = [] filename = os.path.join(self.build_dir, 'test.rrd') rows = 12 step = 10 dss.append( DataSource(dsName='speed', dsType='GAUGE', heartbeat=2 * step)) rras.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=rows)) rras.append(RRA(cf='AVERAGE', xff=0.5, steps=12, rows=rows)) my_rrd = RRD(filename, ds=dss, rra=rras, start=start, step=step) my_rrd.create() for i, t in enumerate( range(start + step, start + step + (rows * step), step)): self.log.debug('DATA: %s %s (%s)' % (t, i, datetime.fromtimestamp(t))) my_rrd.bufferValue(t, i) # Add further data 1 second later to demonstrate that the rrd # lastupdatetime does not necessarily fall on a step boundary t += 1 i += 1 self.log.debug('DATA: %s %s (%s)' % (t, i, datetime.fromtimestamp(t))) my_rrd.bufferValue(t, i) my_rrd.update()
def _createRRD(self, filename): """ create an rrd file which fits our requirements """ # Let's setup some data sources for our RRD dss = [] for source in dataSources: dss.append(DS(dsName=source, dsType='GAUGE', heartbeat=900)) # An now let's setup how our RRD will archive the data rras = [] # 1 days-worth of one-minute samples --> 60/1 * 24 rra1 = RRA(cf='AVERAGE', xff=0, steps=1, rows=1440) # 7 days-worth of five-minute samples --> 60/5 * 24 * 7 rra2 = RRA(cf='AVERAGE', xff=0, steps=5, rows=2016) # 30 days-worth of one hour samples --> 60/60 * 24 * 30 rra3 = RRA(cf='AVERAGE', xff=0, steps=60, rows=720) # 1 year-worth of half day samples --> 60/60 * 24/12 * 365 rra4 = RRA(cf='AVERAGE', xff=0, steps=720, rows=730) rras.extend([rra1, rra2, rra3, rra4]) # With those setup, we can now created the RRD myRRD = RRD(filename, step=step, ds=dss, rra=rras, start=int(time.time())) myRRD.create(debug=False) return myRRD
class ExternalBackendTestCase(TestCase): def setUp(self): ds = [DataSource(dsName="speed", dsType="COUNTER", heartbeat=600)] rra = [ RRA(cf="AVERAGE", xff=0.5, steps=1, rows=24), RRA(cf="AVERAGE", xff=0.5, steps=6, rows=10) ] self.rrdfile = tempfile.NamedTemporaryFile() self.rrd = RRD(self.rrdfile.name, ds=ds, rra=rra, start=920804400) self.rrd.create() def test_updateError(self): self.rrd.bufferValue(1261214678, 612) self.rrd.bufferValue(1261214678, 612) self.assertRaises(ExternalCommandError, self.rrd.update) expected = ("illegal attempt to update using time 1261214678 " "when last update time is 1261214678 (minimum one second " "step)") try: self.rrd.update() except ExternalCommandError as error: self.assertTrue(str(error).startswith("ERROR:")) self.assertTrue(str(error).endswith(expected)) def test_infoWriteMode(self): expectedOutput = """ rra = [{'rows': 24, 'database': None, 'cf': 'AVERAGE', 'cdp_prep': None, 'beta': None, 'seasonal_period': None, 'steps': 1, 'window_length': None, 'threshold': None, 'alpha': None, 'pdp_per_row': None, 'xff': 0.5, 'ds': [], 'gamma': None, 'rra_num': None}, {'rows': 10, 'database': None, 'cf': 'AVERAGE', 'cdp_prep': None, 'beta': None, 'seasonal_period': None, 'steps': 6, 'window_length': None, 'threshold': None, 'alpha': None, 'pdp_per_row': None, 'xff': 0.5, 'ds': [], 'gamma': None, 'rra_num': None}] filename = /tmp/tmpQCLRj0 start = 920804400 step = 300 values = [] ds = [{'name': 'speed', 'min': 'U', 'max': 'U', 'unknown_sec': None, 'minimal_heartbeat': 600, 'value': None, 'rpn': None, 'type': 'COUNTER', 'last_ds': None}] ds[speed].name = speed ds[speed].min = U ds[speed].max = U ds[speed].minimal_heartbeat = 600 ds[speed].type = COUNTER rra[0].rows = 24 rra[0].cf = AVERAGE rra[0].steps = 1 rra[0].xff = 0.5 rra[0].ds = [] rra[1].rows = 10 rra[1].cf = AVERAGE rra[1].steps = 6 rra[1].xff = 0.5 rra[1].ds = [] """.strip().split("\n") originalStdout = sys.stdout sys.stdout = StringIO() self.assertTrue(os.path.exists(self.rrdfile.name)) self.rrd.info() for obtained, expected in zip(sys.stdout.getvalue().split("\n"), expectedOutput): if obtained.startswith("filename"): self.assertTrue(expected.strip().startswith("filename")) else: self.assertEquals(obtained.strip(), expected.strip()) sys.stdout = originalStdout
def create_rrd(self, gpuid): ret = RRD('/'.join([self.rrd_dir, '%s.rrd' % gpuid]), ds=[ DataSource(dsName='utilization', dsType='GAUGE', heartbeat=10) ], rra=[RRA(cf='MIN', xff=0.5, steps=1, rows=360)], step=10, start=int(time())) ret.create() return ret
def main(args): filename = 'test.rrd' if not os.path.exists(filename): dataSources = [] roundRobinArchives = [] dataSource = DataSource(dsName='speed', dsType='COUNTER', heartbeat=600) print "dataSource.name:", dataSource.name dataSources.append(dataSource) roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=24)) roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=6, rows=10)) myRRD = RRD(filename, ds=dataSources, rra=roundRobinArchives, start=time.time()) myRRD.create() #myRRD.update() #write_2_file(myRRD) else: import random myRRD = RRD(filename) myRRD.bufferValue(time.time(), random.randrange(12393, 12423)) #======================================================================= # myRRD.bufferValue('920805900', '12363') # myRRD.bufferValue('920806200', '12373') # myRRD.bufferValue('920806500', '12383') # myRRD.update() # # myRRD.bufferValue('920806800', '12393') # myRRD.bufferValue('920807100', '12399') # myRRD.bufferValue('920807400', '12405') # myRRD.bufferValue('920807700', '12411') # myRRD.bufferValue('920808000', '12415') # myRRD.bufferValue('920808300', '12420') # myRRD.bufferValue('920808600', '12422') # myRRD.bufferValue('920808900', '12423') #======================================================================= myRRD.update() #write_2_file(myRRD) print os.path.isfile(filename) print len(open(filename).read())
def init_rdd(self): # Initiates RRD-archive # Creates the new one if absent or need to reset filename = options.rrd_file if not options.rrd_reset and access(filename, F_OK): myRRD = RRD(filename) else: heartbeat = options.stats_period * 2 dataSources = [ DataSource(dsName='agents_u', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='t_unique', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='t_started', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='t_completed', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='t_failed', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='bytes', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='cpu', dsType='DERIVE', heartbeat=heartbeat, minval=0), DataSource(dsName='duration', dsType='ABSOLUTE', heartbeat=heartbeat), DataSource(dsName='duration_avg', dsType='GAUGE', heartbeat=heartbeat), ] roundRobinArchives = [] for (_steps, _rows) in options.rrd_rra: roundRobinArchives.append( RRA(cf='AVERAGE', xff=0.5, steps=_steps, rows=_rows)) roundRobinArchives.append( RRA(cf='MAX', xff=0.5, steps=_steps, rows=_rows)) myRRD = RRD(filename, ds=dataSources, rra=roundRobinArchives, step=options.stats_period) myRRD.create(debug=True) return myRRD
def create_rrd(filename, args): dses = [] rras = [] for ds in args['ds']: dses.append(DataSource(dsName=ds, dsType=args['ds'][ds], heartbeat=600)) rras.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=288)) rras.append(RRA(cf='AVERAGE', xff=0.5, steps=12, rows=744)) rras.append(RRA(cf='AVERAGE', xff=0.5, steps=24, rows=1116)) rras.append(RRA(cf='AVERAGE', xff=0.5, steps=48, rows=2191)) myRRD = RRD(filename, ds=dses, rra=rras, start=int(now) - 60) myRRD.create()
def create_rrd(filename, args): dses = [] rras = [] for ds in args['ds']: dses.append( DataSource( dsName=ds, dsType=args['ds'][ds], heartbeat=600)) rras.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=288)) rras.append(RRA(cf='AVERAGE', xff=0.5, steps=12, rows=744)) rras.append(RRA(cf='AVERAGE', xff=0.5, steps=24, rows=1116)) rras.append(RRA(cf='AVERAGE', xff=0.5, steps=48, rows=2191)) myRRD = RRD( filename, ds=dses, rra=rras, start=int(now)-60) myRRD.create()
def create_db(cls, filename, step, start, interface_speeds={"eth0": 6 * 1024**2 / 8, # 6Mbit/s "wlan0": 300 * 1024**2 / 8 }): dss = [] for iface, speed in interface_speeds.items(): dss.extend([ DataSource(dsName="%s_out" % iface, dsType='COUNTER', heartbeat=3*step, minval=0, maxval=speed), DataSource(dsName="%s_in" % iface, dsType='COUNTER', heartbeat=3*step, minval=0, maxval=speed) ]) db = RRD(filename, ds=dss, rra=cls.generate_archives(step), start=start, step=step) db.create() return db
class StatsRecorder: def __init__(self, filename): if os.path.isfile(filename): self.rrd = RRD(filename) else: dataSources = [] dataSources.append( DataSource(dsName='q1', dsType='GAUGE', heartbeat=600, minval=0) ) dataSources.append( DataSource(dsName='q2', dsType='GAUGE', heartbeat=600, minval=0) ) dataSources.append( DataSource(dsName='q3', dsType='GAUGE', heartbeat=600, minval=0) ) dataSources.append( DataSource(dsName='lo', dsType='GAUGE', heartbeat=600, minval=0) ) dataSources.append( DataSource(dsName='hi', dsType='GAUGE', heartbeat=600, minval=0) ) dataSources.append( DataSource(dsName='total', dsType='GAUGE', heartbeat=600, minval=0) ) roundRobinArchives = [] roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=8640)) # 24h at 1 sample per 10 secs roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=90, rows=2880)) # 1 month at 1 sample per 15 mins roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=2880, rows=5475)) # 5 years at 1 sample per 8 hours self.rrd = RRD(filename, step=10, ds=dataSources, rra=roundRobinArchives, start=int(time.time())) self.rrd.create() self.bucket = { 'a': [], 'b': [] } self.current_bucket = 'a' def add(self, value): self.bucket[self.current_bucket].append(value) def save(self): bucket = self.current_bucket if self.current_bucket == 'a': self.current_bucket = 'b' else: self.current_bucket = 'a' stats = corestats.Stats(self.bucket[bucket]) q1 = stats.percentile(25) q2 = stats.percentile(50) q3 = stats.percentile(75) lo = stats.min() hi = stats.max() total = stats.count() self.bucket[bucket] = [] self.rrd.bufferValue(str(int(time.time())), q1, q2, q3, lo, hi, total) self.rrd.update()
def rrdtool_create(self, filename): """ Create an rrdtool database if it doesn't exist """ # Create the directory if it doesn't exist. directory = "/".join(filename.split("/")[:-1]) if not os.path.isdir(directory): os.makedirs(directory) # Step interval for Primary Data Points (pdp) pdp_step = self.frequency.seconds + (self.frequency.days * 86400) # Heartbeat can be 'whatev', but twice the pdpd_step is good heartbeat = 2 * pdp_step # We only keep a single simple datasource. sources = [DataSource(dsName="sum", dsType="GAUGE", heartbeat=heartbeat)] # TODO -- this should be a user-definable number. It is equivalent to # "how many data points do I want to see on any one graph at any given # time." The higher it is, the cooler your graphs look. The higher it # is, the more disk space is consumed. The higher it is, the more # memory is consumed client-side. target_resolution = 60 # This function calculates how many PDP steps should be involved in the # calculation of a single Consolidated Data Point (CDP). cdp_steps = lambda tspan: (tspan / pdp_step) / target_resolution # Just a lookup of how many seconds per 'timespan' timespans = {"hour": 3600, "day": 86400, "week": 604800, "month": 2629744, "quarter": 7889231, "year": 31556926} self.log.info("Building rrd %s. %i cdp steps per hour." % (filename, cdp_steps(timespans["hour"]))) # Here we build a series of round robin Archives of various resolutions # and consolidation functions archives = [] for consolidation_function in ["AVERAGE", "MAX"]: archives += [ RRA(cf=consolidation_function, xff=0.5, rows=target_resolution, steps=cdp_steps(seconds_per_timespan)) for name, seconds_per_timespan in timespans.iteritems() ] # Actually build the round robin database from the parameters we built rrd = RRD(filename, start=int(time.time()), step=pdp_step, ds=sources, rra=archives) rrd.create()
def create(self): """ Creates a new RRD database """ ds1 = DS(dsName=self.value_name, dsType=self.type, heartbeat=self.heartbeat) dss = [ds1] rras = [] # 1 days-worth of n heartbeat samples --> 60/1 * 24 rra1 = RRA(cf='AVERAGE', xff=0.5, steps=1, rows=int(self.heartbeat / 1.0 * 24)) # 7 days-worth of n heartbeat samples --> 60/5 * 24 * 7 rra2 = RRA(cf='AVERAGE', xff=0.5, steps=5, rows=int(self.heartbeat / 5.0 * 24 * 7)) # 30 days-worth of n heartbeat samples --> 60/60 * 24 * 30 rra3 = RRA(cf='AVERAGE', xff=0.5, steps=60, rows=int(self.heartbeat / 60.0 * 24 * 30)) # 365 days worth of n heartbeat samples --> 60/120 * 24 * 365 rra4 = RRA(cf='AVERAGE', xff=0.5, steps=120, rows=int(self.heartbeat / 120.0 * 24 * 365)) # 10 years worth of n heartbeat samples --> 60/180 * 24 * 365 * 10 rra5 = RRA(cf='AVERAGE', xff=0.5, steps=180, rows=int(self.heartbeat / 180.0 * 24 * 365 * 10)) rras.extend([rra1, rra2, rra3, rra4, rra5]) rrd = RRD(os.path.join("history/", "%s.rrd" % self.value_id), step=self.heartbeat, ds=dss, rra=rras, start=self.time) rrd.create(debug=False)
class ExternalBackendTestCase(TestCase): def setUp(self): ds = [ DataSource(dsName="speed", dsType="COUNTER", heartbeat=600)] rra = [ RRA(cf="AVERAGE", xff=0.5, steps=1, rows=24), RRA(cf="AVERAGE", xff=0.5, steps=6, rows=10)] self.rrdfile = tempfile.NamedTemporaryFile() self.rrd = RRD(self.rrdfile.name, ds=ds, rra=rra, start=920804400) self.rrd.create() def test_updateError(self): self.rrd.bufferValue(1261214678, 612) self.rrd.bufferValue(1261214678, 612) self.assertRaises(ExternalCommandError, self.rrd.update) try: self.rrd.update() except ExternalCommandError, error: self.assertEquals(str(error), ("ERROR: illegal attempt to update using time 1261214678 " "when last update time is 1261214678 (minimum one second step)"))
def rrdtool_create(self, filename): """ Create an rrdtool database if it doesn't exist """ # Our hearbeat is twice the step interval. step = 15 heartbeat = 8*step sources = [ DataSource( dsName='sum', dsType='GAUGE', heartbeat=heartbeat) ] archives = [ RRA(cf='AVERAGE', xff=0.5, steps=1, rows=244), RRA(cf='AVERAGE', xff=0.5, steps=24, rows=244), RRA(cf='AVERAGE', xff=0.5, steps=168, rows=244), RRA(cf='AVERAGE', xff=0.5, steps=672, rows=244), RRA(cf='AVERAGE', xff=0.5, steps=5760, rows=374), ] rrd = RRD(filename, ds=sources, rra=archives, start=int(time.time())) rrd.create()
class ExternalBackendTestCase(TestCase): def setUp(self): ds = [DataSource(dsName="speed", dsType="COUNTER", heartbeat=600)] rra = [ RRA(cf="AVERAGE", xff=0.5, steps=1, rows=24), RRA(cf="AVERAGE", xff=0.5, steps=6, rows=10) ] self.rrdfile = tempfile.NamedTemporaryFile() self.rrd = RRD(self.rrdfile.name, ds=ds, rra=rra, start=920804400) self.rrd.create() def test_updateError(self): self.rrd.bufferValue(1261214678, 612) self.rrd.bufferValue(1261214678, 612) self.assertRaises(ExternalCommandError, self.rrd.update) try: self.rrd.update() except ExternalCommandError, error: self.assertEquals(str(error), ( "ERROR: illegal attempt to update using time 1261214678 " "when last update time is 1261214678 (minimum one second step)" ))
def create(self): ''' Create the RRD ''' # 5 minute average for daily view self._rra.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=288)) # 30 minute average for weekly view self._rra.append(RRA(cf='AVERAGE', xff=0.5, steps=6, rows=336)) self._rra.append(RRA(cf='MAX', xff=0.5, steps=6, rows=336)) self._rra.append(RRA(cf='MIN', xff=0.5, steps=6, rows=336)) # 2 hour average for monthly view self._rra.append(RRA(cf='AVERAGE', xff=0.5, steps=24, rows=360)) self._rra.append(RRA(cf='MAX', xff=0.5, steps=24, rows=360)) self._rra.append(RRA(cf='MIN', xff=0.5, steps=24, rows=360)) # 24 hour average for yearly view self._rra.append(RRA(cf='AVERAGE', xff=0.5, steps=288, rows=365)) self._rra.append(RRA(cf='MAX', xff=0.5, steps=288, rows=365)) self._rra.append(RRA(cf='MIN', xff=0.5, steps=288, rows=365)) print "Create %s" % (self._rrd_file) my_rrd = RRD(self._rrd_file, ds=self._ds, rra=self._rra) my_rrd.create()
def create_db(cls, filename, step, start): dss = [] for i in range(SysStat.num_cpu() + 1): # maxval is the total number of jiffies in the interval if i == 0: n, cpu_mul = "", SysStat.num_cpu() else: n, cpu_mul = "_%d" % (i-1), 1 dss.extend([ DataSource(dsName='cpu_user%s' % n, dsType='COUNTER', heartbeat=3*step, minval=0, maxval=cpu_mul * SysStat.jiffies() * step), DataSource(dsName='cpu_nice%s' % n, dsType='COUNTER', heartbeat=3*step, minval=0, maxval=cpu_mul * SysStat.jiffies() * step), DataSource(dsName='cpu_sys%s' % n, dsType='COUNTER', heartbeat=3*step, minval=0, maxval=cpu_mul * SysStat.jiffies() * step), DataSource(dsName='cpu_idle%s' % n, dsType='COUNTER', heartbeat=3*step, minval=0, maxval=cpu_mul * SysStat.jiffies() * step), DataSource(dsName='cpu_iowt%s' % n, dsType='COUNTER', heartbeat=3*step, minval=0, maxval=cpu_mul * SysStat.jiffies() * step) ]) db = RRD(filename, ds=dss, rra=cls.generate_archives(step), start=start, step=step) db.create() return db
def _createRRD(self, filename): """ create an rrd file which fits our requirements """ # Let's setup some data sources for our RRD dss = [] for source in dataSources: dss.append(DS(dsName=source, dsType='GAUGE', heartbeat=900)) # An now let's setup how our RRD will archive the data rras = [] # 1 days-worth of one-minute samples --> 60/1 * 24 rra1 = RRA(cf='AVERAGE', xff=0, steps=1, rows=1440) # 7 days-worth of five-minute samples --> 60/5 * 24 * 7 rra2 = RRA(cf='AVERAGE', xff=0, steps=5, rows=2016) # 30 days-worth of one hour samples --> 60/60 * 24 * 30 rra3 = RRA(cf='AVERAGE', xff=0, steps=60, rows=720) # 1 year-worth of half day samples --> 60/60 * 24/12 * 365 rra4 = RRA(cf='AVERAGE', xff=0, steps=720, rows=730) rras.extend([rra1, rra2, rra3, rra4]) # With those setup, we can now created the RRD myRRD = RRD(filename, step=step, ds=dss, rra=rras, start=int(time.time())) myRRD.create(debug=False) return myRRD
def create_rrd_if(cfg, _type, **kw): cfg.update(kw) _path = cfg["_path"] if not path.exists(_path): raise IOError("Non existing path %(path)s" % cfg) fn = path.join(_path, _type.endswith(".rrd") and _type or "%s.rrd" % _type) if path.isfile(fn): #should check RRD magic number if I was paranoid raise IOError("already exists %s" % fn) ds = [ DataSource(**{ k:s_dct.get(k,cfg["_default"].get(k, "arg")) for k in set(s_dct.keys()) | set(cfg["_default"].keys()) }) for s_dct in cfg[_type]["source"]] rra = [ "RRA:MIN:0.5:10:57600", "RRA:AVERAGE:0.5:1:57600", "RRA:AVERAGE:0.5:10:57600", "RRA:AVERAGE:0.5:100:57600", "RRA:LAST:0:10:57600", "RRA:MAX:0:10:57600", ] archive = RRD(fn,rra=rra, ds=ds, **cfg.get("_rrd_option",{})) return archive.create()
ds4 = DS(dsName="ds_out_bits", dsType="ABSOLUTE", heartbeat=900) dss.extend([ds1, ds2, ds3, ds4]) # An now let's setup how our RRD will archive the data rras = [] # 1 days-worth of one-minute samples --> 60/1 * 24 rra1 = RRA(cf="AVERAGE", xff=0, steps=1, rows=1440) # 7 days-worth of five-minute samples --> 60/5 * 24 * 7 rra2 = RRA(cf="AVERAGE", xff=0, steps=5, rows=2016) # 30 days-worth of five-minute samples --> 60/60 * 24 * 30 rra3 = RRA(cf="AVERAGE", xff=0, steps=60, rows=720) rras.extend([rra1, rra2, rra3]) # With those setup, we can now created the RRD myRRD = RRD(filename, step=step, ds=dss, rra=rras, start=startTime) myRRD.create(debug=False) # Let's suck in that data... the data file has the following format: # DS TIME:VALUE [TIME:VALUE [TIME:VALUE]...] # and the lines are in a completely arbitrary order. data = {} # First, we need to get everything indexed by time for line in open(datafile).readlines(): line = line.strip() lineParts = line.split(" ") dsName = lineParts[0] for timedatum in lineParts[1:]: time, datum = timedatum.split(":") # For each time index, let's have a dict data.setdefault(time, {}) # Now let's add the DS names and its data for this time to the
def graph_totals(ip=None): graph_type = ip.split('.')[2] + "-" + ip.split('.')[3] if ip else 'network' graph_setups = [ ('total_bytes', 'Bytes'), ('total_pkts', 'Packets'), ('total_flows', 'Flows'), ('total_log_bytes', 'logBytes'), ('total_log_pkts', 'logPackets'), ('total_log_flows', 'logFlows'), ('int_ip_entropy', 'IntIPEntropy'), ('ext_ip_entropy', 'ExtIPEntropy'), ('d_int_ip_entropy', 'deltaIntIPEntropy'), ('d_ext_ip_entropy', 'deltaExtIPEntropy'), ('wireless_retries', 'nRetries') ] dss = [] for graph in graph_setups: dss.append( DS(dsName=graph[0], dsType='GAUGE', heartbeat=900) ) dbl_graph_setups = [ ('ivo_bytes', 'Bytes'), ('ivo_pkts', 'Pkts'), ('ivo_flows', 'Flows')] for graph in dbl_graph_setups: dss.append( DS(dsName='in_'+graph[0], dsType='GAUGE', heartbeat=900) ) dss.append( DS(dsName='out_'+graph[0], dsType='GAUGE', heartbeat=900) ) myRRD = RRD(rrd_file % graph_type, step=60, ds=dss, rra=rras, start=startTime-60) myRRD.create(debug=False) counter = 0 for flow_key in keys: if ip: if ip in flows[flow_key]['internal']: in_bytes, out_bytes = (flows[flow_key])['internal'][ip]['in_bytes'], (flows[flow_key])['internal'][ip]['out_bytes'] in_pkts, out_pkts = (flows[flow_key])['internal'][ip]['in_pkts'], (flows[flow_key])['internal'][ip]['out_pkts'] in_flows, out_flows = (flows[flow_key])['internal'][ip]['in_flows'], (flows[flow_key])['internal'][ip]['out_flows'] total_bytes = in_bytes + out_bytes total_pkts = in_pkts + out_pkts total_flows = in_flows + out_flows log_bytes, log_pkts, log_flows = log(total_bytes, 2), log(total_pkts, 2), log(total_flows, 2) nretries = (flows[flow_key])['internal'][ip]['nretries'] else: in_bytes = out_bytes = in_pkts = out_pkts = in_flows = out_flows = 'U' total_bytes = total_pkts = total_flows = 'U' log_bytes = log_pkts = log_flows = 'U' nretries = 'U' myRRD.bufferValue( int(flow_key), total_bytes, total_pkts, total_flows, log_bytes, log_pkts, log_flows, flow_entropies[flow_key]['external'][ip] if ip in flow_entropies[flow_key]['external'] else 0, 0, delta_flow_entropies[flow_key]['external'][ip] if ip in flow_entropies[flow_key]['external'] else 0, 0,#delta_flow_entropies[flow_key]['internal'][ip], nretries, in_bytes, out_bytes, in_pkts, out_pkts, in_flows, out_flows, ) else: in_bytes, out_bytes = (flows[flow_key])['in_bytes'], (flows[flow_key])['out_bytes'] in_pkts, out_pkts = (flows[flow_key])['in_pkts'], (flows[flow_key])['out_pkts'] in_flows, out_flows = (flows[flow_key])['in_flows'], (flows[flow_key])['out_flows'] total_bytes = in_bytes + out_bytes total_pkts = in_pkts + out_pkts total_flows = in_flows + out_flows log_bytes = log(total_bytes, 2) if total_bytes else 0 log_pkts = log(total_pkts, 2) if total_pkts else 0 log_flows = log(total_flows, 2) if total_flows else 0 nretries = (flows[flow_key])['nretries'] myRRD.bufferValue( int(flow_key), total_bytes, total_pkts, total_flows, log_bytes, log_pkts, log_flows, flow_entropies[flow_key]['global_external'], 0,#flow_entropies[flow_key]['global_internal'], delta_flow_entropies[flow_key]['global_external'], 0,#delta_flow_entropies[flow_key]['global_internal'], nretries, in_bytes, out_bytes, in_pkts, out_pkts, in_flows, out_flows, ) counter += 1 if counter % 10 == 0: myRRD.update() myRRD.update() for idx, (feature, label) in enumerate(graph_setups[:-1]): def1 = DEF(rrdfile=myRRD.filename, vname=label, dsName=dss[idx].name) vdef1 = VDEF(vname='avg', rpn='%s,AVERAGE' % def1.vname) vdef2 = VDEF(vname='min', rpn='%s,MINIMUM' % def1.vname) vdef3 = VDEF(vname='max', rpn='%s,MAXIMUM' % def1.vname) vdef4 = VDEF(vname='stdev', rpn='%s,STDEV' % def1.vname) cdef1 = CDEF(vname='slightlyhigh', rpn='%s,avg,stdev,+,GE,%s,UNKN,IF' % (def1.vname, def1.vname)) cdef2 = CDEF(vname='abnormallyhigh', rpn='%s,avg,stdev,1.5,*,+,GE,%s,UNKN,IF' % (def1.vname, def1.vname)) cdef3 = CDEF(vname='vhigh', rpn='%s,avg,stdev,2.0,*,+,GE,%s,UNKN,IF' % (def1.vname, def1.vname)) cdef4 = CDEF(vname='slightlylow', rpn='%s,avg,stdev,-,LE,%s,UNKN,IF' % (def1.vname, def1.vname)) cdef5 = CDEF(vname='abnormallylow', rpn='%s,avg,stdev,1.5,*,-,LE,%s,UNKN,IF' % (def1.vname, def1.vname)) cdef6 = CDEF(vname='vlow', rpn='%s,avg,stdev,2.0,*,-,LE,%s,UNKN,IF' % (def1.vname, def1.vname)) area1 = AREA(defObj=def1, color='#00FF00') area2 = AREA(defObj=cdef1, color='#FFFF00') area3 = AREA(defObj=cdef2, color='#FF9900') area4 = AREA(defObj=cdef3, color='#FF0000') area5 = AREA(defObj=cdef4, color='#FFFF00') area6 = AREA(defObj=cdef4, color='#FF9900') area7 = AREA(defObj=cdef4, color='#FF0000') gprint1 = GPRINT(vdef1, 'Average %.2lf') gprint2 = GPRINT(vdef2, 'Min %.2lf') gprint3 = GPRINT(vdef3, 'Max %.2lf') gprint4 = GPRINT(vdef4, 'Stdev %.2lf') g = Graph(graph_file % (graph_type, feature), start=int(keys[0]), end=int(keys[-1]) ) g.data.extend([def1, vdef1, vdef2, vdef3, vdef4, cdef1, cdef2, cdef3, cdef4, cdef5, cdef6, area1, area2, area3, area4, area5, area6, area7, gprint1, gprint2, gprint3, gprint4 ]) if idx > 5: g.width = 380 else: g.width = 540 g.height = 100 g.write() wireless_index = len(graph_setups) - 1 wireless_feature, wireless_label = graph_setups[wireless_index] def1 = DEF(rrdfile=myRRD.filename, vname=wireless_label, dsName=dss[wireless_index].name) line1 = LINE(defObj=def1, color='#FF0000') g = Graph(graph_file % (graph_type, wireless_feature), start=int(keys[0]), end=int(keys[-1]) ) g.data.extend([def1, line1]) g.width = 1800 g.height = 80 g.write() for idx, (feature, label) in enumerate(dbl_graph_setups): def1 = DEF(rrdfile=myRRD.filename, vname=label+'IN', dsName=(dss[len(dss)-(len(dbl_graph_setups)*2)+(idx*2)]).name) def2 = DEF(rrdfile=myRRD.filename, vname=label+'OUT', dsName=(dss[len(dss)-(len(dbl_graph_setups)*2)+(idx*2)+1]).name) cdef1 = CDEF(vname=label[0]+'IN', rpn='%s' % def1.vname) cdef2 = CDEF(vname=label[0]+'OUT', rpn='%s,-1,*' % def2.vname) area1 = AREA(defObj=cdef1, color='#FF0000') area2 = AREA(defObj=cdef2, color='#00FF00') g = Graph(graph_file % (graph_type, feature), start=int(keys[0]), end=int(keys[-1])) g.data.extend([def1, def2, cdef1, cdef2, area1, area2]) g.width = 380 g.height = 100 g.write()
def main(): ns = parser.parse_args() signal.signal(signal.SIGINT, sigint_handler) dss = [ DataSource(dsName='vsz', dsType='GAUGE', heartbeat=2), DataSource(dsName='rss', dsType='GAUGE', heartbeat=2) ] rras = [ RRA(cf='AVERAGE', xff=0.5, steps=10, rows=3000), #RRA(cf='AVERAGE', xff=0.5, steps=1, rows=100) RRA(cf='LAST', xff=0.5, steps=1, rows=300000) ] try: os.remove(rrd_file) os.remove(graph_file) except OSError: pass rrd = RRD(rrd_file, ds=dss, rra=rras, step=1) rrd.create() start = time.time() print("Starting at %d." % start) while KEEP_GOING: if ns.children: vsz, rss = pid_and_subs_memory(ns.pid) else: vsz, rss = pid_memory(ns.pid) #print("sample {} {}".format(size, rss)) if vsz == 0 and rss == 0: break rrd.bufferValue(time.time(), vsz, rss) rrd.update() time.sleep(1) end = time.time() print("Sampling finishes: %d." % end) # #rrdtool fetch foo.rrd AVERAGE --end=now --start=now-50s # command = [ # 'rrdtool', # 'fetch', # rrd_file, # 'AVERAGE', # '--end', # str(int(end)), # '--start', # str(int(start)) # ] # ps = subprocess.Popen(command) # ps.wait() #CDEF:mem_used_x=mem_used,1024,\* \ #LINE2:mem_used_x#D7CC00:mem_used command = [ 'rrdtool', 'graph', '--title', ns.graph_name, graph_file, '--start', str(int(start)), '--end', str(int(end)), # 'DEF:vsz={}:vsz:AVERAGE'.format(rrd_file), 'DEF:rss={}:rss:AVERAGE'.format(rrd_file), # 'CDEF:vsz_k=vsz,1024,*', 'CDEF:rss_k=rss,1024,*', # 'LINE:vsz_k#4287f5:Virtual', 'LINE:rss_k#42d7f5:Residential', ] ps = subprocess.check_output(command) print(ps) sys.exit(0)
def rrdcreate(rrdname): endTime = int(time.time()) - 600 starttime = endTime - 360000 myrrd = RRD(filename=rrdname, step=settings.step, ds=ds(), rra=rra(), start=starttime) myrrd.create(debug=False) return myrrd
def rrdtool_create(self, filename): """ Create an rrdtool database if it doesn't exist """ # Create the directory if it doesn't exist. directory = '/'.join(filename.split('/')[:-1]) if not os.path.isdir(directory): os.makedirs(directory) # Step interval for Primary Data Points (pdp) pdp_step = self.frequency.seconds + (self.frequency.days * 86400) # Heartbeat can be 'whatev', but twice the pdpd_step is good heartbeat = 2 * pdp_step # We only keep a single simple datasource. sources = [ DataSource( dsName='sum', dsType='GAUGE', heartbeat=heartbeat ) ] # TODO -- this should be a user-definable number. It is equivalent to # "how many data points do I want to see on any one graph at any given # time." The higher it is, the cooler your graphs look. The higher it # is, the more disk space is consumed. The higher it is, the more # memory is consumed client-side. target_resolution = 60 # This function calculates how many PDP steps should be involved in the # calculation of a single Consolidated Data Point (CDP). cdp_steps = lambda tspan : (tspan / pdp_step) / target_resolution # Just a lookup of how many seconds per 'timespan' timespans = { 'hour' : 3600, 'day' : 86400, 'week' : 604800, 'month' : 2629744, 'quarter' : 7889231, 'year' : 31556926, } self.log.info("Building rrd %s. %i cdp steps per hour." % ( filename, cdp_steps(timespans['hour']))) # Here we build a series of round robin Archives of various resolutions # and consolidation functions archives = [] for consolidation_function in ['AVERAGE', 'MAX']: archives += [ RRA(cf=consolidation_function, xff=0.5, rows=target_resolution, steps=cdp_steps(seconds_per_timespan)) for name, seconds_per_timespan in timespans.iteritems() ] # Actually build the round robin database from the parameters we built rrd = RRD( filename, start=int(time.time()), step=pdp_step, ds=sources, rra=archives, ) rrd.create()
def draw_graph(data, group): ## Graph bytes_in, bytes_out, request, by time+group filename = 'network.rrd' graphfile_traffic = 'traffic%s.png' %group # graphfileLg_traffic = 'traffic-large.png' graphfile_request = 'request%s.png' %group # graphfileLg_request = 'request-large' #define times hour = 60 * 60 day = 24 * 60 * 60 week = 7 * day month = day * 30 quarter = month * 3 half = 365 * day / 2 year = 365 * day delta = settings.DELTA * hour step = 1 endTime = int(time.time()) - 600 startTime = endTime - 360000 maxSteps = int((endTime-startTime)/step) # create RRD file # DSTYPE # Counter:Use this format with value of snmp MIB like traffic counter or # packet number for a interface. # Gauge:Use this format for value like temperature, indicator of pressure. # Derive:Use this format if you variation or settings.DELTA between a moment and # an another moment like the rate of of people entering or leaving a # room and derive works exactly like COUNTER but without overflow checks. # Absolute:Use this format when you count the number of mail after an alert. # # HEARTBEAT # Is define the frequency between each update of value in the database but some time # it is possible to have UNKNOWN value. # MIN AND MAX are optional parameters witch define the range of your data source (DS). # If your value is out of the range the value will be defined as UNKNOWN. # If you don not know exactly the range of you value you can set the MIN and MAX value with # U for unknown dss = [] ds1 = DS(dsName='bytes_out', dsType='ABSOLUTE', heartbeat=200) ds2 = DS(dsName='bytes_in', dsType='ABSOLUTE', heartbeat=200) ds3 = DS(dsName='request', dsType='COUNTER', heartbeat=200) dss.extend([ds1, ds2, ds3]) rras1 = [] rra1 = RRA(cf='AVERAGE', xff=0.5, steps=1, rows=1440) rra2 = RRA(cf='AVERAGE', xff=0.5, steps=6, rows=2016) rra3 = RRA(cf='AVERAGE', xff=0.5, steps=60, rows=720) rras1.extend([rra1, rra2, rra3]) myRRD = RRD(filename, step=step, ds=dss, rra=rras1, start=startTime) myRRD.create(debug=False) ## RRD update counter = 0 for i in data: counter += 1 bytes_in = i['bytes_in'] bytes_out = i['bytes_out'] requests = i['request'] times = i['time'] print bytes_out/1000000 myRRD.bufferValue(times, bytes_out, bytes_in, requests) if counter % 100 == 0: myRRD.update(debug=True) myRRD.update(debug=True) ## RRD graph def1 = DEF(rrdfile=myRRD.filename, vname='output', dsName=ds1.name) def2 = DEF(rrdfile=myRRD.filename, vname='input', dsName=ds2.name) def3 = DEF(rrdfile=myRRD.filename, vname='request', dsName=ds3.name) vdef11 = VDEF(vname='max_out', rpn='%s,MAXIMUM' % def1.vname) vdef12 = VDEF(vname='avg_out', rpn='%s,AVERAGE' % def1.vname) vdef21 = VDEF(vname='max_in', rpn='%s,MAXIMUM' % def2.vname) vdef22 = VDEF(vname='avg_in', rpn='%s,AVERAGE' % def2.vname) vdef31 = VDEF(vname='max_request', rpn='%s,MAXIMUM' % def3.vname) vdef32 = VDEF(vname='avg_request', rpn='%s,AVERAGE' % def3.vname) line1 = LINE(2, defObj=def1, color='#2029CC', legend='Out') line2 = LINE(2, defObj=def2, color='#00FF00', legend='In') line3 = LINE(2, defObj=def3, color='#FF0000', legend='Request') gprint11 = GPRINT(vdef11, 'max\\: %5.1lf %Sbps') gprint12 = GPRINT(vdef12, 'avg\\: %5.1lf %Sbps\\n') gprint21 = GPRINT(vdef21, 'max\\: %5.1lf %Sbps') gprint22 = GPRINT(vdef22, 'avg\\: %5.1lf %Sbps\\n') gprint31 = GPRINT(vdef31, 'max\\: %5.1lf %S') gprint32 = GPRINT(vdef32, 'avg\\: %5.1lf %S\\n') # ColorAttributes ca = ColorAttributes() ca.back = '#CCCDE2' #background ca.canvas = '#FFFFFF'#the background of the actual graph ca.shadea = '#000000'#left and top border ca.shadeb = '#111111'#right and bottom border ca.mgrid = '#6666CC' #maior grid ca.axis = '#000000' #axis of the graph ca.frame = '#CCCDE2' #line around the color spots ca.font = '#000000' #color of the font ca.arrow = '#CC0000' # arrow head pointing up and forward ## graph traffic g = Graph(graphfile_traffic, end=endTime, vertical_label='Bytes/s', color=ca) g.data.extend([def1, def2, vdef11, vdef12, vdef21, vdef22, line1, gprint11, gprint12, line2, gprint21, gprint22]) g.title = '"report traffic %s"'%group g.start=endTime - delta g.step = step g.width = 397 g.height = 182 g.write(debug=True) # g.filename = graphfileLg_traffic # g.width = 800 # g.height = 400 # g.write() ## graph request g1 = Graph(graphfile_request, end=endTime, vertical_label='Request/s', color=ca) g1.data.extend([def3, vdef31, vdef32, line3, gprint31, gprint32]) g1.title = '"report request %s"'%group g1.start=endTime - settings.DELTA g1.step = step g1.width = 397 g1.height = 182 g1.write(debug=False)
def draw_total(res): ## graph total(bytes_out, bytes_in, request) by time # define name filename = 'total.rrd' graphfile_total_traffic = 'total_traffic.png' # graphfileLg_total_traffic = 'total_traffic-large.png' graphfile_total_request = 'total_request.png' # graphfileLg_total_request = 'total_request-large' #define times hour = 60 * 60 day = 24 * 60 * 60 week = 7 * day month = day * 30 quarter = month * 3 half = 365 * day / 2 year = 365 * day delta = settings.DELTA * hour step = 1 endTime = int(time.time()) - 600 startTime = endTime - 360000 maxSteps = int((endTime-startTime)/step) ## Create RRD dss = [] ds1 = DS(dsName='total_bytes_out', dsType='ABSOLUTE', heartbeat=200) ds2 = DS(dsName='total_bytes_in', dsType='ABSOLUTE', heartbeat=200) ds3 = DS(dsName='total_request', dsType='ABSOLUTE', heartbeat=200) dss.extend([ds1, ds2, ds3]) rras1 = [] rra1 = RRA(cf='AVERAGE', xff=0.5, steps=1, rows=1440) rra2 = RRA(cf='AVERAGE', xff=0.5, steps=6, rows=2016) rra3 = RRA(cf='AVERAGE', xff=0.5, steps=60, rows=720) rras1.extend([rra1, rra2, rra3]) myRRD = RRD(filename, step=step, ds=dss, rra=rras1, start=startTime) myRRD.create(debug=False) ## RRD update counter = 0 for i in res: counter += 1 total_bytes_in = int(i['total_bytes_in']) total_bytes_out = int(i['total_bytes_out']) total_requests = int(i['total_request']) t_times = int(i['time']) print total_bytes_out/1000000 myRRD.bufferValue(t_times, total_bytes_out, total_bytes_in, total_requests) if counter % 100 == 0: myRRD.update(debug=True) myRRD.update(debug=True) ## RRD graph def1 = DEF(rrdfile=myRRD.filename, vname='output', dsName=ds1.name, cdef='AVERAGE') def2 = DEF(rrdfile=myRRD.filename, vname='input', dsName=ds2.name, cdef='AVERAGE') def3 = DEF(rrdfile=myRRD.filename, vname='request', dsName=ds3.name, cdef='AVERAGE') # Out vdef11 = VDEF(vname='max_out', rpn='%s,MAXIMUM' % def1.vname) vdef12 = VDEF(vname='avg_out', rpn='%s,AVERAGE' % def1.vname) vdef13 = VDEF(vname='min_out', rpn='%s,MINIMUM' % def1.vname) line1 = LINE(2, defObj=def1, color='#2029CC', legend='Out') gprint11 = GPRINT(vdef11, 'max\\: %5.1lf %Sbps') gprint12 = GPRINT(vdef12, 'avg\\: %5.1lf %Sbps') gprint13 = GPRINT(vdef13, 'min\\: %5.1lf %Sbps\\n') # In vdef21 = VDEF(vname='max_in', rpn='%s,MAXIMUM' % def2.vname) vdef22 = VDEF(vname='avg_in', rpn='%s,AVERAGE' % def2.vname) line2 = LINE(2, defObj=def2, color='#00FF00', legend='In') gprint21 = GPRINT(vdef21, 'max\\: %5.1lf %Sbps') gprint22 = GPRINT(vdef22, 'avg\\: %5.1lf %Sbps\\n') # Request vdef31 = VDEF(vname='max_request', rpn='%s,MAXIMUM' % def3.vname) vdef32 = VDEF(vname='avg_request', rpn='%s,AVERAGE' % def3.vname) line3 = LINE(2, defObj=def3, color='#FF0000', legend='Request') gprint31 = GPRINT(vdef31, 'max\\: %5.1lf %S') gprint32 = GPRINT(vdef32, 'avg\\: %5.1lf %S\\n') # ColorAttributes ca = ColorAttributes() ca.back = '#CCCDE2' #background ca.canvas = '#FFFFFF'#the background of the actual graph ca.shadea = '#000000'#left and top border ca.shadeb = '#111111'#right and bottom border ca.mgrid = '#6666CC' #major grid ca.axis = '#000000' #axis of the graph ca.frame = '#CCCDE2' #line around the color spots ca.font = '#000000' #color of the font ca.arrow = '#CC0000' # arrow head pointing up and forward ## g = Graph(graphfile_total_traffic, end=endTime, vertical_label='Bytes/s', color=ca) g.data.extend([def1, def2, vdef11, vdef12, vdef13, vdef21, vdef22, line1, gprint11, gprint12, gprint13, line2, gprint21, gprint22]) g.title = '"report total traffic"' g.start = endTime - delta g.step = step g.width = 397 g.height = 182 g.write(debug=True) # g.filename = graphfileLg_total_traffic # g.width = 800 # g.height = 400 # g.write() # ## g1 = Graph(graphfile_total_request, end=endTime, vertical_label='Request/s', color=ca) g1.data.extend([def3, vdef31, vdef32, line3, gprint31, gprint32]) g1.title = '"report total request"' g1.start = endTime - settings.DELTA g1.step = step g1.width = 397 g1.height = 182 g1.write(debug=True)
def GenerateGraph(): data = db.GetDataHumidityRrd(10000) #print len(data) filename = 'humidity.rrd' graphfile = 'humidity.png' graphfileLg = 'humidity-large.png' day = 24 * 60 * 60 week = 7 * day month = day * 30 quarter = month * 3 half = 365 * day / 2 year = 365 * day startTime = data[0][0] -1 endTime = data[-1][0] step = 1000 maxSteps = int((endTime-startTime)/step) # Let's create and RRD file and dump some data in it dss = [] ds1 = DS(dsName='humidity', dsType='GAUGE', heartbeat=60) dss.extend([ds1]) #week: RA:AVERAGE:0.5:6:336 #For Daily Graph, every 5 minute average for 24 hours: #RRA:AVERAGE:0.5:1:288 rra1 = RRA(cf='AVERAGE', xff=0.5, steps=1, rows=1440) #For Weekly Graph, every 30 minute average for 7 days: #RRA:AVERAGE:0.5:6:336 #rra1 = RRA(cf='AVERAGE', xff=0.5, steps=6, rows=336) #For Monthly Graph, every 2 hour average for 30 days: #RRA:AVERAGE:0.5:24:360 #rra1 = RRA(cf='AVERAGE', xff=0.5, steps=32, rows=1080) #For Yearly Graph, every 1 day average for 365 days: #RRA:AVERAGE:0.5:288:365 #rra1 = RRA(cf='AVERAGE', xff=0.5, steps=96, rows=365) rras = [] #rra1 = RRA(cf='AVERAGE', xff=0.5, steps=24, rows=1460) rras.append(rra1) myRRD = RRD(filename, ds=dss, rra=rras, start=startTime) myRRD.create() # let's generate some data... currentTime = startTime i = 0 for row in data: timestamp = row[0] value1 = row[1] # lets update the RRD/purge the buffer ever 100 entires i = i + 1 if i % 100 == 0: myRRD.update(debug=False) # when you pass more than one value to update buffer like this, # they get applied to the DSs in the order that the DSs were # "defined" or added to the RRD object. myRRD.bufferValue(timestamp, value1) # add anything remaining in the buffer myRRD.update() # Let's set up the objects that will be added to the graph def1 = DEF(rrdfile=myRRD.filename, vname='anturi1', dsName=ds1.name) vdef1 = VDEF(vname='myavg', rpn='%s,AVERAGE' % def1.vname) sensor1 = LINE(defObj=def1, color='#4544FC', legend='anturi1') line1 = LINE(defObj=vdef1, color='#01FF13', legend='Average', stack=True) # Let's configure some custom colors for the graph ca = ColorAttributes() ca.back = '#000000' ca.canvas = '#000000' ca.shadea = '#000000' ca.shadeb = '#111111' ca.mgrid = '#CCCCCC' ca.axis = '#FFFFFF' ca.frame = '#AAAAAA' ca.font = '#FFFFFF' ca.arrow = '#FFFFFF' # Now that we've got everything set up, let's make a graph #startTime = endTime - 3 * month g = Graph(graphfile, start=startTime, end=endTime, vertical_label='kosteus', color=ca) g.data.extend([def1, vdef1, sensor1]) g.write() g.filename = graphfileLg g.width = 690 g.height = 300 g.write()
class BindingsBackendTestCase(TestCase): def setUp(self): self.ds = [ DataSource(dsName="speed", dsType="COUNTER", heartbeat=600)] self.rra = [ RRA(cf="AVERAGE", xff=0.5, steps=1, rows=24), RRA(cf="AVERAGE", xff=0.5, steps=6, rows=10)] self.rrdfile = tempfile.NamedTemporaryFile() self.rrd = RRD(self.rrdfile.name, ds=self.ds, rra=self.rra, start=920804400, backend=bindings) self.rrd.create() def test_infoWriteMode(self): expectedOutput = """ rra = [{'rows': 24, 'database': None, 'cf': 'AVERAGE', 'cdp_prep': None, 'beta': None, 'seasonal_period': None, 'steps': 1, 'window_length': None, 'threshold': None, 'alpha': None, 'pdp_per_row': None, 'xff': 0.5, 'ds': [], 'gamma': None, 'rra_num': None}, {'rows': 10, 'database': None, 'cf': 'AVERAGE', 'cdp_prep': None, 'beta': None, 'seasonal_period': None, 'steps': 6, 'window_length': None, 'threshold': None, 'alpha': None, 'pdp_per_row': None, 'xff': 0.5, 'ds': [], 'gamma': None, 'rra_num': None}] filename = /tmp/tmpQCLRj0 start = 920804400 step = 300 values = [] ds = [{'name': 'speed', 'min': 'U', 'max': 'U', 'unknown_sec': None, 'minimal_heartbeat': 600, 'value': None, 'rpn': None, 'type': 'COUNTER', 'last_ds': None}] ds[speed].name = speed ds[speed].min = U ds[speed].max = U ds[speed].minimal_heartbeat = 600 ds[speed].type = COUNTER rra[0].rows = 24 rra[0].cf = AVERAGE rra[0].steps = 1 rra[0].xff = 0.5 rra[0].ds = [] rra[1].rows = 10 rra[1].cf = AVERAGE rra[1].steps = 6 rra[1].xff = 0.5 rra[1].ds = [] """.strip().split("\n") output = StringIO() self.assertTrue(os.path.exists(self.rrdfile.name)) self.rrd.info(useBindings=True, stream=output) for obtained, expected in zip( output.getvalue().split("\n"), expectedOutput): if obtained.startswith("filename"): self.assertTrue(expected.strip().startswith("filename")) else: self.assertEquals(obtained.strip(), expected.strip()) def test_infoReadMode(self): expectedOutput = """ filename = "/tmp/tmpP4bTTy" rrd_version = "0003" step = 300 last_update = 920804400 header_size = 800 ds[speed].index = 0 ds[speed].type = "COUNTER" ds[speed].minimal_heartbeat = 600 ds[speed].min = NaN ds[speed].max = NaN ds[speed].last_ds = "U" ds[speed].value = 0.0000000000e+00 ds[speed].unknown_sec = 0 rra[0].cf = "AVERAGE" rra[0].rows = 24 rra[0].cur_row = 3 rra[0].pdp_per_row = 1 rra[0].xff = 5.0000000000e-01 rra[0].cdp_prep[0].value = NaN rra[0].cdp_prep[0].unknown_datapoints = 0 rra[1].cf = "AVERAGE" rra[1].rows = 10 rra[1].cur_row = 2 rra[1].pdp_per_row = 6 rra[1].xff = 5.0000000000e-01 rra[1].cdp_prep[0].value = NaN rra[1].cdp_prep[0].unknown_datapoints = 0 """ rrd = RRD(filename=self.rrdfile.name, mode="r", backend=bindings) output = StringIO() self.assertTrue(os.path.exists(self.rrdfile.name)) rrd.info(useBindings=True, stream=output) for obtained, expected in zip( output.getvalue().split("\n"), expectedOutput): print "obtained:", obtained print "expected:", expected if obtained.startswith("filename"): self.assertTrue(expected.strip().startswith("filename")) else: self.assertEquals(obtained.strip(), expected.strip()) sys.stdout = originalStdout
ds4 = DS(dsName='ds_out_bits', dsType='ABSOLUTE', heartbeat=900) dss.extend([ds1, ds2, ds3, ds4]) # An now let's setup how our RRD will archive the data rras = [] # 1 days-worth of one-minute samples --> 60/1 * 24 rra1 = RRA(cf='AVERAGE', xff=0, steps=1, rows=1440) # 7 days-worth of five-minute samples --> 60/5 * 24 * 7 rra2 = RRA(cf='AVERAGE', xff=0, steps=5, rows=2016) # 30 days-worth of five-minute samples --> 60/60 * 24 * 30 rra3 = RRA(cf='AVERAGE', xff=0, steps=60, rows=720) rras.extend([rra1, rra2, rra3]) # With those setup, we can now created the RRD myRRD = RRD(filename, step=step, ds=dss, rra=rras, start=startTime) myRRD.create(debug=False) # Let's suck in that data... the data file has the following format: # DS TIME:VALUE [TIME:VALUE [TIME:VALUE]...] # and the lines are in a completely arbitrary order. data = {} # First, we need to get everything indexed by time for line in open(datafile).readlines(): line = line.strip() lineParts = line.split(' ') dsName = lineParts[0] for timedatum in lineParts[1:]: time, datum = timedatum.split(':') # For each time index, let's have a dict data.setdefault(time, {}) # Now let's add the DS names and its data for this time to the
"%a %b %d %H:%M:%S %Z %Y"))) from pyrrd.rrd import DataSource, RRA, RRD filename = '/tmp/test.rrd' roundRobinArchives = [] dataSources = [] dataSource = DataSource( dsName="rtt", dsType="GAUGE", heartbeat=1) dataSources.append(dataSource) roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=3600*24*7)) roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=60, rows=4)) roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=60, rows=12)) rrd = RRD(filename, ds=dataSources, rra=roundRobinArchives, start=timestamp-1) if not os.path.isfile(rrd.filename): rrd.create() i = 0 for rtt in parse_rtt(lines): print i, rtt rrd.bufferValue(timestamp+i, int(1000 * rtt)) i += 1 if i % 100 == 0: rrd.update() rrd.update() from pyrrd.graph import DEF, CDEF, VDEF, LINE, AREA, GPRINT, COMMENT, Graph comment = COMMENT("RTT from SMO to Creagan Dearga") rttus = DEF(rrdfile=rrd.filename, vname="rttus", dsName="rtt") rttms = CDEF(vname="rttms", rpn="%s,1000,/" % rttus.vname) rtt = LINE(defObj=rttms, color="#2299ff", legend="RTT")
class SystemCounts(object): """Handle the rrd for the system counts""" _datafile = 'systemcount.rrd' _outputfile = 'systemcount.png' @property def datafile(self): return os.path.join(self.data_root, self._datafile) @property def outputfile(self): return os.path.join(self.output_root, self._outputfile) def _boostrap(self): """Put together out bits""" self.dss = [] self.ds1 = DS(dsName='bookmark_count', dsType='GAUGE', heartbeat=hour) self.ds2 = DS(dsName='unique_count', dsType='GAUGE', heartbeat=hour) self.ds3 = DS(dsName='tag_count', dsType='GAUGE', heartbeat=hour) self.dss.extend([self.ds1, self.ds2, self.ds3]) self.rras = [] rra1 = RRA(cf='AVERAGE', xff=0.5, steps=24, rows=8760) self.rras.append(rra1) self.myRRD = RRD(self.datafile, ds=self.dss, rra=self.rras, start=int(time.mktime(start_date.timetuple()))) if not os.path.exists(self.datafile): # make sure we create the directory if not os.path.exists(os.path.dirname(self.datafile)): os.makedirs(os.path.dirname(self.datafile)) self.myRRD.create() def __init__(self, data_root, output_root): """Bootstrap, does the data file exist, etc""" self.data_root = data_root self.output_root = output_root self._boostrap() def output(self, months=3): """Render out the image of the rrd""" def1 = DEF(rrdfile=self.datafile, vname='bookmark_count', dsName=self.ds1.name) def2 = DEF(rrdfile=self.datafile, vname='unique_count', dsName=self.ds2.name) def3 = DEF(rrdfile=self.datafile, vname='tag_count', dsName=self.ds3.name) line1 = LINE(defObj=def1, color='#01FF13', legend='Bookmarks', stack=True) line2 = LINE(defObj=def2, color='#DA7202', legend='Unique', stack=True) line3 = LINE(defObj=def3, color='#BD4902', legend='Tags', stack=True) # area1 = AREA(defObj=def1, color='#FFA902', legend='Bookmarks') # area2 = AREA(defObj=def2, color='#DA7202', legend='Unique') # area3 = AREA(defObj=def3, color='#BD4902', legend='Tags') # Let's configure some custom colors for the graph ca = ColorAttributes() ca.back = '#333333' ca.canvas = '#333333' ca.shadea = '#000000' ca.shadeb = '#111111' ca.mgrid = '#CCCCCC' ca.axis = '#FFFFFF' ca.frame = '#AAAAAA' ca.font = '#FFFFFF' ca.arrow = '#FFFFFF' # Now that we've got everything set up, let's make a graph start_date = time.mktime((today - timedelta(weeks=28)).timetuple()) end_date = time.mktime(today.timetuple()) g = Graph(self.outputfile, start=int(start_date), end=int(end_date), vertical_label='count', color=ca) g.data.extend([def1, def2, def3, line3, line2, line1]) if not os.path.exists(os.path.dirname(self.outputfile)): os.makedirs(os.path.dirname(self.outputfile)) g.write() def mark(self, tstamp, bmarks, uniques, tags): """Update the database with some data""" timestamp = time.mktime(tstamp.timetuple()) self.myRRD.bufferValue(int(timestamp), bmarks, uniques, tags) def update(self): """Update the underlying rrd data""" try: self.myRRD.update(debug=False) except ExternalCommandError, exc: print "ERROR", str(exc)
maxSteps = int((endTime - startTime) / step) # Let's create and RRD file and dump some data in it dss = [] ds1 = DS(dsName='speed', dsType='GAUGE', heartbeat=900) ds2 = DS(dsName='silliness', dsType='GAUGE', heartbeat=900) ds3 = DS(dsName='insanity', dsType='GAUGE', heartbeat=900) ds4 = DS(dsName='dementia', dsType='GAUGE', heartbeat=900) dss.extend([ds1, ds2, ds3, ds4]) rras = [] rra1 = RRA(cf='AVERAGE', xff=0.5, steps=24, rows=1460) rras.append(rra1) myRRD = RRD(filename, ds=dss, rra=rras, start=startTime) myRRD.create() # let's generate some data... currentTime = startTime for i in xrange(maxSteps): currentTime += step # lets update the RRD/purge the buffer ever 100 entires if i % 100 == 0: myRRD.update(debug=False) # let's do two different sets of periodic values value1 = int(sin(i % 200) * 1000) value2 = int(sin((i % 2000) / (200 * random())) * 200) value3 = int(sin((i % 4000) / (400 * random())) * 400) value4 = int(sin((i % 6000) / (600 * random())) * 600) # when you pass more than one value to update buffer like this, # they get applied to the DSs in the order that the DSs were
class RRDController(object): def __init__(self, rrdfile, static_path): self.rrdfile = rrdfile self.static_path = static_path def delete(self): os.unlink(self.rrdfile) def create(self): if os.path.exists(self.rrdfile): self.rrd = RRD(self.rrdfile) return dss = [] ds1 = DS(dsName="requests", dsType="COUNTER", heartbeat=120, minval=0, maxval=100000000) ds2 = DS(dsName="connections", dsType="ABSOLUTE", heartbeat=120, minval=0, maxval=60000) ds3 = DS(dsName="reading", dsType="ABSOLUTE", heartbeat=120, minval=0, maxval=60000) ds4 = DS(dsName="writing", dsType="ABSOLUTE", heartbeat=120, minval=0, maxval=60000) ds5 = DS(dsName="waiting", dsType="ABSOLUTE", heartbeat=120, minval=0, maxval=60000) dss.extend([ds1,ds2,ds3,ds4,ds5]) rras = [] rra1 = RRA(cf="AVERAGE", xff=0.5, steps=1, rows=2880) rra2 = RRA(cf="AVERAGE", xff=0.5, steps=30, rows=672) rra3 = RRA(cf="AVERAGE", xff=0.5, steps=120, rows=732) rra4 = RRA(cf="AVERAGE", xff=0.5, steps=720, rows=1460) rras.extend([rra1, rra2, rra3, rra4]) self.rrd = RRD(self.rrdfile, step=60, ds=dss, rra=rras) self.rrd.create(debug=False) time.sleep(2) def update(self, connections, requests, reading, writing, waiting): self.rrd.bufferValue("%d:%d:%d:%d:%d:%d" % (time.time(),connections, requests, reading, writing, waiting)) self.rrd.update(template="connections:requests:reading:writing:waiting", debug=True) def graph_request(self, period='day'): def1 = DEF(rrdfile=self.rrdfile, vname='request', dsName="requests", cdef="AVERAGE") vdef1 = VDEF(vname='max', rpn='request,MAXIMUM') vdef2 = VDEF(vname='avg', rpn='request,AVERAGE') vdef3 = VDEF(vname='last', rpn='request,LAST') area1 = AREA(defObj=def1, color='#336600', legend='Requests') gprint1 = GPRINT(vdef1, "Max\\: %5.1lf %S") gprint2 = GPRINT(vdef2, "Avg\\: %5.1lf %S") gprint3 = GPRINT(vdef3, "Current\\: %5.1lf %Sreq/sec") ca = ColorAttributes() ca.back = '#333333' ca.canvas = '#333333' ca.shadea = '#000000' ca.shadeb = '#111111' ca.mgrid = '#CCCCCC' ca.axis = '#FFFFFF' ca.frame = '#AAAAAA' ca.font = '#FFFFFF' ca.arrow = '#FFFFFF' img = "request-%s.png" % period imgname = self.static_path +"/"+ img start = '-1'+period g = Graph(imgname, imgformat='PNG', step=start, vertical_label='request/sec', color=ca, width=700, height=150) g.data.extend([def1, vdef1, vdef2, vdef3, area1, gprint1, gprint2, gprint3]) g.write() def graph_connection(self, period='day'): def1 = DEF(rrdfile=self.rrdfile, vname='connections', dsName="connections", cdef="AVERAGE") def2 = DEF(rrdfile=self.rrdfile, vname='reading', dsName="reading", cdef="AVERAGE") def3 = DEF(rrdfile=self.rrdfile, vname='writing', dsName="writing", cdef="AVERAGE") def4 = DEF(rrdfile=self.rrdfile, vname='waiting', dsName="waiting", cdef="AVERAGE") # TOTAL vdef1 = VDEF(vname='max', rpn='connections,MAXIMUM') vdef2 = VDEF(vname='avg', rpn='connections,AVERAGE') vdef3 = VDEF(vname='last', rpn='connections,LAST') vdef4 = VDEF(vname='min', rpn='connections,MINIMUM') line1 = LINE(1, defObj=def1, color='#22FF22', legend='Total') gprint1 = GPRINT(vdef1, "Max\\: %5.1lf %S") gprint2 = GPRINT(vdef2, "Avg\\: %5.1lf %S") gprint3 = GPRINT(vdef3, "Current\\: %5.1lf %S") gprint4 = GPRINT(vdef4, "Min\\: %5.1lf %S\\n") # READING reading_vdef1 = VDEF(vname='rmax', rpn='reading,MAXIMUM') reading_vdef2 = VDEF(vname='ravg', rpn='reading,AVERAGE') reading_vdef3 = VDEF(vname='rlast', rpn='reading,LAST') reading_vdef4 = VDEF(vname='rmin', rpn='reading,MINIMUM') line2 = LINE(1, defObj=def2, color='#0022FF', legend='Reading') reading_gprint1 = GPRINT(reading_vdef1, "Max\\: %5.1lf %S") reading_gprint2 = GPRINT(reading_vdef2, "Avg\\: %5.1lf %S") reading_gprint3 = GPRINT(reading_vdef3, "Current\\: %5.1lf %S") reading_gprint4 = GPRINT(reading_vdef4, "Min\\: %5.1lf %S\\n") # writing writing_vdef1 = VDEF(vname='wmax', rpn='writing,MAXIMUM') writing_vdef2 = VDEF(vname='wavg', rpn='writing,AVERAGE') writing_vdef3 = VDEF(vname='wlast', rpn='writing,LAST') writing_vdef4 = VDEF(vname='wmin', rpn='writing,MINIMUM') line3 = LINE(1, defObj=def3, color='#FF0000', legend='Writing') writing_gprint1 = GPRINT(writing_vdef1, "Max\\: %5.1lf %S") writing_gprint2 = GPRINT(writing_vdef2, "Avg\\: %5.1lf %S") writing_gprint3 = GPRINT(writing_vdef3, "Current\\: %5.1lf %S") writing_gprint4 = GPRINT(writing_vdef4, "Min\\: %5.1lf %S\\n") # WAITING waiting_vdef1 = VDEF(vname='wamax', rpn='waiting,MAXIMUM') waiting_vdef2 = VDEF(vname='waavg', rpn='waiting,AVERAGE') waiting_vdef3 = VDEF(vname='walast', rpn='waiting,LAST') waiting_vdef4 = VDEF(vname='wamin', rpn='waiting,MINIMUM') line4 = LINE(1, defObj=def4, color='#00AAAA', legend='Waiting') waiting_gprint1 = GPRINT(waiting_vdef1, "Max\\: %5.1lf %S") waiting_gprint2 = GPRINT(waiting_vdef2, "Avg\\: %5.1lf %S") waiting_gprint3 = GPRINT(waiting_vdef3, "Current\\: %5.1lf %S") waiting_gprint4 = GPRINT(waiting_vdef4, "Min\\: %5.1lf %S\\n") ca = ColorAttributes() ca.back = '#333333' ca.canvas = '#333333' ca.shadea = '#000000' ca.shadeb = '#111111' ca.mgrid = '#CCCCCC' ca.axis = '#FFFFFF' ca.frame = '#AAAAAA' ca.font = '#FFFFFF' ca.arrow = '#FFFFFF' img = "connection-%s.png" % period imgname = self.static_path +"/"+ img start = '-1'+period g = Graph(imgname, imgformat='PNG', step=start, vertical_label='connections', color=ca, width=700, height=150) g.data.extend([def1, vdef1, vdef2, vdef3, vdef4, line1, gprint1, gprint2, gprint3, gprint4]) g.data.extend([def2, reading_vdef1, reading_vdef2, reading_vdef3, reading_vdef4, line2, reading_gprint1, reading_gprint2, reading_gprint3, reading_gprint4]) g.data.extend([def3, writing_vdef1, writing_vdef2, writing_vdef3, writing_vdef4, line3, writing_gprint1, writing_gprint2, writing_gprint3, writing_gprint4]) g.data.extend([def4, waiting_vdef1, waiting_vdef2, waiting_vdef3, waiting_vdef4, line4, waiting_gprint1, waiting_gprint2, waiting_gprint3, waiting_gprint4]) g.write() def graph(self, period='day'): self.graph_request(period) self.graph_connection(period)
class BindingsBackendTestCase(TestCase): def setUp(self): self.ds = [DataSource(dsName="speed", dsType="COUNTER", heartbeat=600)] self.rra = [ RRA(cf="AVERAGE", xff=0.5, steps=1, rows=24), RRA(cf="AVERAGE", xff=0.5, steps=6, rows=10) ] self.rrdfile = tempfile.NamedTemporaryFile() self.rrd = RRD(self.rrdfile.name, ds=self.ds, rra=self.rra, start=920804400, backend=bindings) self.rrd.create() def test_infoWriteMode(self): expectedOutput = """ rra = [{'rows': 24, 'database': None, 'cf': 'AVERAGE', 'cdp_prep': None, 'beta': None, 'seasonal_period': None, 'steps': 1, 'window_length': None, 'threshold': None, 'alpha': None, 'pdp_per_row': None, 'xff': 0.5, 'ds': [], 'gamma': None, 'rra_num': None}, {'rows': 10, 'database': None, 'cf': 'AVERAGE', 'cdp_prep': None, 'beta': None, 'seasonal_period': None, 'steps': 6, 'window_length': None, 'threshold': None, 'alpha': None, 'pdp_per_row': None, 'xff': 0.5, 'ds': [], 'gamma': None, 'rra_num': None}] filename = /tmp/tmpQCLRj0 start = 920804400 step = 300 values = [] ds = [{'name': 'speed', 'min': 'U', 'max': 'U', 'unknown_sec': None, 'minimal_heartbeat': 600, 'value': None, 'rpn': None, 'type': 'COUNTER', 'last_ds': None}] ds[speed].name = speed ds[speed].min = U ds[speed].max = U ds[speed].minimal_heartbeat = 600 ds[speed].type = COUNTER rra[0].rows = 24 rra[0].cf = AVERAGE rra[0].steps = 1 rra[0].xff = 0.5 rra[0].ds = [] rra[1].rows = 10 rra[1].cf = AVERAGE rra[1].steps = 6 rra[1].xff = 0.5 rra[1].ds = [] """.strip().split("\n") output = StringIO() self.assertTrue(os.path.exists(self.rrdfile.name)) self.rrd.info(useBindings=True, stream=output) for obtained, expected in zip(output.getvalue().split("\n"), expectedOutput): if obtained.startswith("filename"): self.assertTrue(expected.strip().startswith("filename")) else: self.assertEqual(obtained.strip(), expected.strip()) def test_infoReadMode(self): expectedOutput = """ filename = "/tmp/tmpP4bTTy" rrd_version = "0003" step = 300 last_update = 920804400 header_size = 800 ds[speed].index = 0 ds[speed].type = "COUNTER" ds[speed].minimal_heartbeat = 600 ds[speed].min = NaN ds[speed].max = NaN ds[speed].last_ds = "U" ds[speed].value = 0.0000000000e+00 ds[speed].unknown_sec = 0 rra[0].cf = "AVERAGE" rra[0].rows = 24 rra[0].cur_row = 3 rra[0].pdp_per_row = 1 rra[0].xff = 5.0000000000e-01 rra[0].cdp_prep[0].value = NaN rra[0].cdp_prep[0].unknown_datapoints = 0 rra[1].cf = "AVERAGE" rra[1].rows = 10 rra[1].cur_row = 2 rra[1].pdp_per_row = 6 rra[1].xff = 5.0000000000e-01 rra[1].cdp_prep[0].value = NaN rra[1].cdp_prep[0].unknown_datapoints = 0 """ rrd = RRD(filename=self.rrdfile.name, mode="r", backend=bindings) output = StringIO() self.assertTrue(os.path.exists(self.rrdfile.name)) rrd.info(useBindings=True, stream=output) for obtained, expected in zip(output.getvalue().split("\n"), expectedOutput): print(("obtained:", obtained)) print(("expected:", expected)) if obtained.startswith("filename"): self.assertTrue(expected.strip().startswith("filename")) else: self.assertEqual(obtained.strip(), expected.strip()) sys.stdout = originalStdout
class RRDManip(object): def __init__(self, filename, step=None, dataSources=None, roundRobinArchives=None): """ 实例化 RRDManip 类对象。 :param filename: the name of the RRD you to manipulative :param dataSources: 相关的 data Source 队列 :param roundRobinArchives: 相关的 rra 队列 """ if not isinstance(dataSources, list) and \ not isinstance(dataSources, tuple): dataSources = [dataSources] if not isinstance(roundRobinArchives, list) and \ not isinstance(roundRobinArchives, tuple): roundRobinArchives = [roundRobinArchives] self.dataSources = dataSources self.roundRobinArchives = roundRobinArchives self.filename = filename self.step = step self.rrd = None def ensure_rrd(self): """ Ensures that an RRD file is created. """ if os.path.isfile(self.filename): # the rrd file alread exist self.rrd = RRD(self.filename) else: self.create_rrd() def create_rrd(self): """ Creates an RRD file. """ dataSources = [DataSource(**ds) for ds in self.dataSources] roundRobinArchives = [RRA(**rra) for rra in self.roundRobinArchives] # start 时间设定为当前时间的一天前,86400 即一天内包含的秒数 past_one_day = int(time.time()) - 86400 self.rrd = RRD(self.filename, start=past_one_day, step=self.step, ds=dataSources, rra=roundRobinArchives) self.rrd.create() def update(self, timestamp, values): """ Feeds data values into an RRD. """ timestamp = int(timestamp) if not isinstance(values, list) and not isinstance(values, tuple): values = [values] self.rrd.bufferValue(timestamp, *values) try: self.rrd.update() except: # 防止 脏数据 污染 update vslues self.rrd.values = [] def fetch(self, cf='AVERAGE', resolution=None, start=None, end=None, returnStyle="ds"): """ Fetch data values from an RRD. :param returnStyle: 指定返回的数据格式,包括有'ds' 和 'time' """ return self.rrd.fetch(cf, resolution, start, end, returnStyle)
from pyrrd.graph import DEF, CDEF, VDEF from pyrrd.graph import LINE, AREA, GPRINT from pyrrd.graph import ColorAttributes, Graph filename = '%s.rrd' % os.path.splitext(os.path.basename(__file__))[0] # Let's create and RRD file and dump some data in it dss = [] rras = [] ds1 = DS(dsName='speed', dsType='COUNTER', heartbeat=600) dss.append(ds1) rra1 = RRA(cf='AVERAGE', xff=0.5, steps=1, rows=24) rra2 = RRA(cf='AVERAGE', xff=0.5, steps=6, rows=10) rras.extend([rra1, rra2]) myRRD = RRD(filename, ds=dss, rra=rras, start=920804400) myRRD.create() myRRD.bufferValue('920805600', '12363') myRRD.bufferValue('920805900', '12363') myRRD.bufferValue('920806200', '12373') myRRD.bufferValue('920806500', '12383') myRRD.bufferValue('920806800', '12393') myRRD.bufferValue('920807100', '12399') myRRD.bufferValue('920807400', '12405') myRRD.bufferValue('920807700', '12411') myRRD.bufferValue('920808000', '12415') myRRD.bufferValue('920808300', '12420') myRRD.bufferValue('920808600', '12422') myRRD.bufferValue('920808900', '12423') myRRD.update()
class SystemCounts(object): """Handle the rrd for the system counts""" _datafile = 'systemcount.rrd' _outputfile = 'systemcount.png' @property def datafile(self): return os.path.join(self.data_root, self._datafile) @property def outputfile(self): return os.path.join(self.output_root, self._outputfile) def _boostrap(self): """Put together out bits""" self.dss = [] self.ds1 = DS(dsName='bookmark_count', dsType='GAUGE', heartbeat=hour) self.ds2 = DS(dsName='unique_count', dsType='GAUGE', heartbeat=hour) self.ds3 = DS(dsName='tag_count', dsType='GAUGE', heartbeat=hour) self.dss.extend([self.ds1, self.ds2, self.ds3]) self.rras = [] rra1 = RRA(cf='AVERAGE', xff=0.5, steps=24, rows=8760) self.rras.append(rra1) self.myRRD = RRD(self.datafile, ds=self.dss, rra=self.rras, start=int(time.mktime(start_date.timetuple()))) if not os.path.exists(self.datafile): # make sure we create the directory if not os.path.exists(os.path.dirname(self.datafile)): os.makedirs(os.path.dirname(self.datafile)) self.myRRD.create() def __init__(self, data_root, output_root): """Bootstrap, does the data file exist, etc""" self.data_root = data_root self.output_root = output_root self._boostrap() def output(self, months=3): """Render out the image of the rrd""" def1 = DEF(rrdfile=self.datafile, vname='bookmark_count', dsName=self.ds1.name) def2 = DEF(rrdfile=self.datafile, vname='unique_count', dsName=self.ds2.name) def3 = DEF(rrdfile=self.datafile, vname='tag_count', dsName=self.ds3.name) line1 = LINE(defObj=def1, color='#01FF13', legend='Bookmarks', stack=True) line2 = LINE(defObj=def2, color='#DA7202', legend='Unique', stack=True) line3 = LINE(defObj=def3, color='#BD4902', legend='Tags', stack=True) # area1 = AREA(defObj=def1, color='#FFA902', legend='Bookmarks') # area2 = AREA(defObj=def2, color='#DA7202', legend='Unique') # area3 = AREA(defObj=def3, color='#BD4902', legend='Tags') # Let's configure some custom colors for the graph ca = ColorAttributes() ca.back = '#333333' ca.canvas = '#333333' ca.shadea = '#000000' ca.shadeb = '#111111' ca.mgrid = '#CCCCCC' ca.axis = '#FFFFFF' ca.frame = '#AAAAAA' ca.font = '#FFFFFF' ca.arrow = '#FFFFFF' # Now that we've got everything set up, let's make a graph start_date = time.mktime((today - timedelta(weeks=28)).timetuple()) end_date = time.mktime(today.timetuple()) g = Graph(self.outputfile, start=int(start_date), end=int(end_date), vertical_label='count', color=ca) g.data.extend([def1, def2, def3, line3, line2, line1]) if not os.path.exists(os.path.dirname(self.outputfile)): os.makedirs(os.path.dirname(self.outputfile)) g.write() def mark(self, tstamp, bmarks, uniques, tags): """Update the database with some data""" timestamp = time.mktime(tstamp.timetuple()) self.myRRD.bufferValue(int(timestamp), bmarks, uniques, tags) def update(self): """Update the underlying rrd data""" try: self.myRRD.update(debug=False) except ExternalCommandError, exc: print "ERROR", str(exc)
class Simulator(object): NUM_NODES = 1000 EVENTS = ["ADDFILE", "DELFILE", "ADDNODE", "DELNODE"] RATE_ADDFILE = 1.0 / 10 RATE_DELFILE = 1.0 / 20 RATE_ADDNODE = 1.0 / 3000 RATE_DELNODE = 1.0 / 4000 P_NODEAVAIL = 1.0 def __init__(self): self.time = 1164783600 # small numbers of seconds since the epoch confuse rrdtool self.prevstamptime = int(self.time) ds = DataSource(ds_name='utilizationds', ds_type='GAUGE', heartbeat=1) rra = RRA(cf='AVERAGE', xff=0.1, steps=1, rows=1200) self.rrd = RRD("/tmp/utilization.rrd", ds=[ds], rra=[rra], start=self.time) self.rrd.create() self.introducer = q = Introducer(self) self.all_nodes = [ Node(randomid(), q, self) for i in range(self.NUM_NODES) ] q.all_nodes = self.all_nodes self.next = [] self.schedule_events() self.verbose = False self.added_files = 0 self.added_data = 0 self.deleted_files = 0 self.published_files = [] self.failed_files = 0 self.lost_data_bytes = 0 # bytes deleted to make room for new shares def stamp_utilization(self, utilization): if int(self.time) > (self.prevstamptime + 1): self.rrd.bufferValue(self.time, utilization) self.prevstamptime = int(self.time) def write_graph(self): self.rrd.update() self.rrd = None import gc gc.collect() def1 = graph.DataDefinition(vname="a", rrdfile='/tmp/utilization.rrd', ds_name='utilizationds') area1 = graph.Area(value="a", color="#990033", legend='utilizationlegend') g = graph.Graph('/tmp/utilization.png', imgformat='PNG', width=540, height=100, vertical_label='utilizationverticallabel', title='utilizationtitle', lower_limit=0) g.data.append(def1) g.data.append(area1) g.write() def add_file(self): size = random.randrange(1000) n = random.choice(self.all_nodes) if self.verbose: print("add_file(size=%d, from node %s)" % (size, n)) fileid = randomid() able = n.publish_file(fileid, size) if able: able, tried = able self.added_files += 1 self.added_data += size self.published_files.append(tried) else: self.failed_files += 1 def lost_data(self, size): self.lost_data_bytes += size def delete_file(self): all_nodes = self.all_nodes[:] random.shuffle(all_nodes) for n in all_nodes: if n.delete_file(): self.deleted_files += 1 return print("no files to delete") def _add_event(self, etype): rate = getattr(self, "RATE_" + etype) next = self.time + random.expovariate(rate) self.next.append((next, etype)) self.next.sort() def schedule_events(self): types = set([e[1] for e in self.next]) for etype in self.EVENTS: if not etype in types: self._add_event(etype) def do_event(self): time, etype = self.next.pop(0) assert time > self.time # current_time = self.time self.time = time self._add_event(etype) if etype == "ADDFILE": self.add_file() elif etype == "DELFILE": self.delete_file() elif etype == "ADDNODE": pass #self.add_node() elif etype == "DELNODE": #self.del_node() pass # self.print_stats(current_time, etype) def print_stats_header(self): print("time: added failed lost avg_tried") def print_stats(self, time, etype): if not self.published_files: avg_tried = "NONE" else: avg_tried = sum(self.published_files) / len(self.published_files) print(time, etype, self.added_data, self.failed_files, self.lost_data_bytes, avg_tried, len(self.introducer.living_files), self.introducer.utilization)
class Simulator: NUM_NODES = 1000 EVENTS = ["ADDFILE", "DELFILE", "ADDNODE", "DELNODE"] RATE_ADDFILE = 1.0 / 10 RATE_DELFILE = 1.0 / 20 RATE_ADDNODE = 1.0 / 3000 RATE_DELNODE = 1.0 / 4000 P_NODEAVAIL = 1.0 def __init__(self): self.time = 1164783600 # small numbers of seconds since the epoch confuse rrdtool self.prevstamptime = int(self.time) ds = DataSource(ds_name='utilizationds', ds_type='GAUGE', heartbeat=1) rra = RRA(cf='AVERAGE', xff=0.1, steps=1, rows=1200) self.rrd = RRD("/tmp/utilization.rrd", ds=[ds], rra=[rra], start=self.time) self.rrd.create() self.introducer = q = Introducer(self) self.all_nodes = [Node(randomid(), q, self) for i in range(self.NUM_NODES)] q.all_nodes = self.all_nodes self.next = [] self.schedule_events() self.verbose = False self.added_files = 0 self.added_data = 0 self.deleted_files = 0 self.published_files = [] self.failed_files = 0 self.lost_data_bytes = 0 # bytes deleted to make room for new shares def stamp_utilization(self, utilization): if int(self.time) > (self.prevstamptime+1): self.rrd.bufferValue(self.time, utilization) self.prevstamptime = int(self.time) def write_graph(self): self.rrd.update() self.rrd = None import gc gc.collect() def1 = graph.DataDefinition(vname="a", rrdfile='/tmp/utilization.rrd', ds_name='utilizationds') area1 = graph.Area(value="a", color="#990033", legend='utilizationlegend') g = graph.Graph('/tmp/utilization.png', imgformat='PNG', width=540, height=100, vertical_label='utilizationverticallabel', title='utilizationtitle', lower_limit=0) g.data.append(def1) g.data.append(area1) g.write() def add_file(self): size = random.randrange(1000) n = random.choice(self.all_nodes) if self.verbose: print "add_file(size=%d, from node %s)" % (size, n) fileid = randomid() able = n.publish_file(fileid, size) if able: able, tried = able self.added_files += 1 self.added_data += size self.published_files.append(tried) else: self.failed_files += 1 def lost_data(self, size): self.lost_data_bytes += size def delete_file(self): all_nodes = self.all_nodes[:] random.shuffle(all_nodes) for n in all_nodes: if n.delete_file(): self.deleted_files += 1 return print "no files to delete" def _add_event(self, etype): rate = getattr(self, "RATE_" + etype) next = self.time + random.expovariate(rate) self.next.append((next, etype)) self.next.sort() def schedule_events(self): types = set([e[1] for e in self.next]) for etype in self.EVENTS: if not etype in types: self._add_event(etype) def do_event(self): time, etype = self.next.pop(0) assert time > self.time # current_time = self.time self.time = time self._add_event(etype) if etype == "ADDFILE": self.add_file() elif etype == "DELFILE": self.delete_file() elif etype == "ADDNODE": pass #self.add_node() elif etype == "DELNODE": #self.del_node() pass # self.print_stats(current_time, etype) def print_stats_header(self): print "time: added failed lost avg_tried" def print_stats(self, time, etype): if not self.published_files: avg_tried = "NONE" else: avg_tried = sum(self.published_files) / len(self.published_files) print time, etype, self.added_data, self.failed_files, self.lost_data_bytes, avg_tried, len(self.introducer.living_files), self.introducer.utilization