示例#1
0
    def rrdtool_log(self, count, category, key):
        """ Log a message to an category's corresponding rrdtool databse """

        # rrdtool doesn't like spaces
        key = key.replace(' ', '_')

        filename = rrd_dir + '/' + category + '/' + key + '.rrd'

        if not category in rrd_categories:
            raise ValueError, "Invalid category %s" % category

        if not os.path.isfile(filename):
            self.rrdtool_create(filename)
            # rrdtool complains if you stuff data into a freshly created
            # database less than one second after you created it.  We could do a
            # number of things to mitigate this:
            #   - sleep for 1 second here
            #   - return from this function and not log anything only on the
            #     first time we see a new data key (a new country, a new
            #     filename).
            #   - pre-create our databases at startup based on magical knowledge
            #     of what keys we're going to see coming over the AMQP line
            #
            # For now, we're just going to return.
            return

        # TODO -- Is this an expensive operation (opening the RRD)?  Can we make
        # this happen less often?
        rrd = RRD(filename)

        rrd.bufferValue(str(int(time.time())), str(count))

        # This flushes the values to file.
        # TODO -- Can we make this happen less often?
        rrd.update()
示例#2
0
    def main(self, argv):
        """
        Create an RRD file with values 0-9 entered at 1 second intervals from
        1980-01-01 00:00:00 (the first date that rrdtool allows)
        """
        from pyrrd.rrd import DataSource, RRA, RRD
        start = int(datetime(1980, 1, 1, 0, 0).strftime('%s'))
        dss = []
        rras = []
        filename = os.path.join(self.build_dir, 'test.rrd')

        rows = 12
        step = 10

        dss.append(
            DataSource(dsName='speed', dsType='GAUGE', heartbeat=2 * step))
        rras.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=rows))
        rras.append(RRA(cf='AVERAGE', xff=0.5, steps=12, rows=rows))
        my_rrd = RRD(filename, ds=dss, rra=rras, start=start, step=step)
        my_rrd.create()

        for i, t in enumerate(
            range(start + step, start + step + (rows * step), step)):
            self.log.debug(
                'DATA: %s %s (%s)' % (t, i, datetime.fromtimestamp(t)))
            my_rrd.bufferValue(t, i)

        # Add further data 1 second later to demonstrate that the rrd
        # lastupdatetime does not necessarily fall on a step boundary
        t += 1
        i += 1
        self.log.debug('DATA: %s %s (%s)' % (t, i, datetime.fromtimestamp(t)))
        my_rrd.bufferValue(t, i)

        my_rrd.update()
示例#3
0
    def main(self, argv):
        """
        Create an RRD file with values 0-9 entered at 1 second intervals from
        1980-01-01 00:00:00 (the first date that rrdtool allows)
        """
        from pyrrd.rrd import DataSource, RRA, RRD
        start = int(datetime(1980, 1, 1, 0, 0).strftime('%s'))
        dss = []
        rras = []
        filename = os.path.join(self.build_dir, 'test.rrd')

        rows = 12
        step = 10

        dss.append(
            DataSource(dsName='speed', dsType='GAUGE', heartbeat=2 * step))
        rras.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=rows))
        rras.append(RRA(cf='AVERAGE', xff=0.5, steps=12, rows=rows))
        my_rrd = RRD(filename, ds=dss, rra=rras, start=start, step=step)
        my_rrd.create()

        for i, t in enumerate(
                range(start + step, start + step + (rows * step), step)):
            self.log.debug('DATA: %s %s (%s)' %
                           (t, i, datetime.fromtimestamp(t)))
            my_rrd.bufferValue(t, i)

        # Add further data 1 second later to demonstrate that the rrd
        # lastupdatetime does not necessarily fall on a step boundary
        t += 1
        i += 1
        self.log.debug('DATA: %s %s (%s)' % (t, i, datetime.fromtimestamp(t)))
        my_rrd.bufferValue(t, i)

        my_rrd.update()
示例#4
0
    def _rrdtool_log(self, count, filename):
        """ Workhorse for rrdtool logging.  Shouldn't be called directly. """

        if not os.path.isfile(filename):
            self.rrdtool_create(filename)
            # rrdtool complains if you stuff data into a freshly created
            # database less than one second after you created it.  We could do a
            # number of things to mitigate this:
            #   - sleep for 1 second here
            #   - return from this function and not log anything only on the
            #     first time we see a new data key (a new country, a new
            #     filename).
            #   - pre-create our databases at startup based on magical knowledge
            #     of what keys we're going to see coming over the AMQP line
            #
            # For now, we're just going to return.
            return

        # TODO -- Is this an expensive operation (opening the RRD)?  Can we make
        # this happen less often?
        rrd = RRD(filename)

        rrd.bufferValue(str(int(time.time())), str(count))

        # This flushes the values to file.
        # TODO -- Can we make this happen less often?
        rrd.update()
示例#5
0
class RRDB(object):

  def __init__(self, filename):
    self.db = RRD(filename)

  def store(self, values):
    self.db.bufferValue(int(time.time()), *values)
    self.db.update()

  @classmethod
  def generate_archives(cls, step, rows=1440,
                        day_periods=[2, 14, 60, 180, 720]):
    rras = []
    for days in day_periods:
      # how many primary data points (we get one each step)
      # go into a consolidated data point
      PDPs = 86400 * days / step / rows
      rras.extend([
        RRA(cf='AVERAGE', xff=0.1, rows=rows, steps=PDPs),
        RRA(cf='MIN', xff=0.1, rows=rows, steps=PDPs),
        RRA(cf='MAX', xff=0.1, rows=rows, steps=PDPs),
      ])
    return rras

  @classmethod
  def create_db(cls):
    raise NotImplementedError("Create DB is not implemented")

  def graph(self, outfile):
    raise NotImplementedError("graph method should be overriden")
示例#6
0
    def _rrdtool_log(self, count, filename):
        """ Workhorse for rrdtool logging.  Shouldn't be called directly. """

        if not os.path.isfile(filename):
            self.rrdtool_create(filename)
            # rrdtool complains if you stuff data into a freshly created
            # database less than one second after you created it.  We could do a
            # number of things to mitigate this:
            #   - sleep for 1 second here
            #   - return from this function and not log anything only on the
            #     first time we see a new data key (a new country, a new
            #     filename).
            #   - pre-create our databases at startup based on magical knowledge
            #     of what keys we're going to see coming over the AMQP line
            #
            # For now, we're just going to return.
            return

        # TODO -- Is this an expensive operation (opening the RRD)?  Can we make
        # this happen less often?
        rrd = RRD(filename)

        rrd.bufferValue(str(int(time.time())), str(count))

        # This flushes the values to file.
        # TODO -- Can we make this happen less often?
        rrd.update()
示例#7
0
def create(stringName, key):
        if debug: print "Enter Function create(stringName, key)"
	# Let's create and RRD file and dump some data in it
	dss = []
	ds1 = DS(dsName='kW', dsType='GAUGE', heartbeat=600) #alle 10 Minuten einen Wert
	dss.append(ds1)

        rras = [] #round robin archives mit, xff=0.5 also wenn 20 Minuten kein wert kommt wirds leer angezeigt:
	rra1 = RRA(cf='AVERAGE', xff=0.5, steps=1, rows=144) #alle 10 Minuten ein Wert
	rra2 = RRA(cf='AVERAGE', xff=0.5, steps=6, rows=24)  #24h mal 1h
	rra3 = RRA(cf='AVERAGE', xff=0.5, steps=24, rows=30) #30 Tage mal 24h
	rra4 = RRA(cf='AVERAGE', xff=0.5, steps=30, rows=12) #12 Monate mal 30 Tage
	rra5 = RRA(cf='AVERAGE', xff=0.5, steps=12, rows=10) #10 Jahre mal 12 Monate
	rras.append(rra1)
	rras.append(rra2)
	rras.append(rra3)
	rras.append(rra4)
	rras.append(rra5)

        #round robbin database file anlegen mit der Startzeit startTime (jetzt)
	#myRRD = RRD(baseDir + stringName + "_" + key + ".rrd", ds=dss, rra=rras, start=startTime)
	myRRD = RRD(baseDir + stringName + "_" + key + ".rrd", ds=dss, rra=rras, start=1483228800)
	myRRD.create()

	myRRD.update()
        if debug: myRRD.info()
示例#8
0
class ExternalBackendTestCase(TestCase):
    def setUp(self):
        ds = [DataSource(dsName="speed", dsType="COUNTER", heartbeat=600)]
        rra = [
            RRA(cf="AVERAGE", xff=0.5, steps=1, rows=24),
            RRA(cf="AVERAGE", xff=0.5, steps=6, rows=10)
        ]
        self.rrdfile = tempfile.NamedTemporaryFile()
        self.rrd = RRD(self.rrdfile.name, ds=ds, rra=rra, start=920804400)
        self.rrd.create()

    def test_updateError(self):
        self.rrd.bufferValue(1261214678, 612)
        self.rrd.bufferValue(1261214678, 612)
        self.assertRaises(ExternalCommandError, self.rrd.update)
        expected = ("illegal attempt to update using time 1261214678 "
                    "when last update time is 1261214678 (minimum one second "
                    "step)")
        try:
            self.rrd.update()
        except ExternalCommandError as error:
            self.assertTrue(str(error).startswith("ERROR:"))
            self.assertTrue(str(error).endswith(expected))

    def test_infoWriteMode(self):
        expectedOutput = """
            rra = [{'rows': 24, 'database': None, 'cf': 'AVERAGE', 'cdp_prep': None, 'beta': None, 'seasonal_period': None, 'steps': 1, 'window_length': None, 'threshold': None, 'alpha': None, 'pdp_per_row': None, 'xff': 0.5, 'ds': [], 'gamma': None, 'rra_num': None}, {'rows': 10, 'database': None, 'cf': 'AVERAGE', 'cdp_prep': None, 'beta': None, 'seasonal_period': None, 'steps': 6, 'window_length': None, 'threshold': None, 'alpha': None, 'pdp_per_row': None, 'xff': 0.5, 'ds': [], 'gamma': None, 'rra_num': None}]
            filename = /tmp/tmpQCLRj0
            start = 920804400
            step = 300
            values = []
            ds = [{'name': 'speed', 'min': 'U', 'max': 'U', 'unknown_sec': None, 'minimal_heartbeat': 600, 'value': None, 'rpn': None, 'type': 'COUNTER', 'last_ds': None}]
            ds[speed].name = speed
            ds[speed].min = U
            ds[speed].max = U
            ds[speed].minimal_heartbeat = 600
            ds[speed].type = COUNTER
            rra[0].rows = 24
            rra[0].cf = AVERAGE
            rra[0].steps = 1
            rra[0].xff = 0.5
            rra[0].ds = []
            rra[1].rows = 10
            rra[1].cf = AVERAGE
            rra[1].steps = 6
            rra[1].xff = 0.5
            rra[1].ds = []
            """.strip().split("\n")
        originalStdout = sys.stdout
        sys.stdout = StringIO()
        self.assertTrue(os.path.exists(self.rrdfile.name))
        self.rrd.info()
        for obtained, expected in zip(sys.stdout.getvalue().split("\n"),
                                      expectedOutput):
            if obtained.startswith("filename"):
                self.assertTrue(expected.strip().startswith("filename"))
            else:
                self.assertEquals(obtained.strip(), expected.strip())
        sys.stdout = originalStdout
示例#9
0
 def insert(self):
     """ 
     Inserts new data in the RRD database 
     """
     rrd = RRD(os.path.join("history/", "%s.rrd" % self.value_id))
     rrd.bufferValue(self.time, self.value_value)
     rrd.update()
     print self.time, self.value_value
示例#10
0
 def insert(self):
     """ 
     Inserts new data in the RRD database 
     """
     rrd = RRD(os.path.join("history/", "%s.rrd" % self.value_id))
     rrd.bufferValue(self.time, self.value_value)
     rrd.update()
     print self.time, self.value_value
示例#11
0
def RrdProcess(rrdfile, samples):
    '''Reads given samples and stores them in the RRD database.'''
    # TODO: Optionally update the database only periodically.
    rrd = RRD(rrdfile)
    for sample in samples:
        logging.debug("Saving sample %s", sample)
        rrd.bufferValue(sample.time, sample.temperature, sample.humidity,
                        sample.mq9, sample.dust_pc, sample.dust_raw)
        rrd.update(debug=True)
        # Flush the print statements executed so far.
        sys.stdout.flush()
示例#12
0
def RrdProcess(rrdfile, samples):
    '''Reads given samples and stores them in the RRD database.'''
    # TODO: Optionally update the database only periodically.
    rrd = RRD(rrdfile)
    for sample in samples:
        logging.debug("Saving sample %s", sample)
        rrd.bufferValue(sample.time, sample.temperature, sample.humidity,
                        sample.mq9, sample.dust_pc, sample.dust_raw)
        rrd.update(debug=True)
        # Flush the print statements executed so far.
        sys.stdout.flush()
示例#13
0
def main(args):

    filename = 'test.rrd'

    if not os.path.exists(filename):
        dataSources = []
        roundRobinArchives = []

        dataSource = DataSource(dsName='speed',
                                dsType='COUNTER',
                                heartbeat=600)
        print "dataSource.name:", dataSource.name
        dataSources.append(dataSource)

        roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=24))
        roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=6, rows=10))

        myRRD = RRD(filename,
                    ds=dataSources,
                    rra=roundRobinArchives,
                    start=time.time())
        myRRD.create()
        #myRRD.update()
        #write_2_file(myRRD)

    else:

        import random

        myRRD = RRD(filename)
        myRRD.bufferValue(time.time(), random.randrange(12393, 12423))

        #=======================================================================
        # myRRD.bufferValue('920805900', '12363')
        # myRRD.bufferValue('920806200', '12373')
        # myRRD.bufferValue('920806500', '12383')
        # myRRD.update()
        #
        # myRRD.bufferValue('920806800', '12393')
        # myRRD.bufferValue('920807100', '12399')
        # myRRD.bufferValue('920807400', '12405')
        # myRRD.bufferValue('920807700', '12411')
        # myRRD.bufferValue('920808000', '12415')
        # myRRD.bufferValue('920808300', '12420')
        # myRRD.bufferValue('920808600', '12422')
        # myRRD.bufferValue('920808900', '12423')
        #=======================================================================
        myRRD.update()

        #write_2_file(myRRD)

        print os.path.isfile(filename)
        print len(open(filename).read())
示例#14
0
def update(stringName, timestamp, key, value):
        if debug: print "Enter Function update(stringName, key, value)"
        
        #round robbing database file öffnen
	myRRD = RRD(baseDir + stringName + "_" + key + ".rrd")

	#Wert in round robbin database eintragen
        try:
            myRRD.bufferValue(timestamp, value)
	    myRRD.update()
        except:
            print "value in the past"
        
        if debug: myRRD.info()
示例#15
0
class StatsRecorder:
  def __init__(self, filename):
    if os.path.isfile(filename):
      self.rrd = RRD(filename)
    else:
      dataSources = []
      dataSources.append( DataSource(dsName='q1', dsType='GAUGE', heartbeat=600, minval=0) )
      dataSources.append( DataSource(dsName='q2', dsType='GAUGE', heartbeat=600, minval=0) )
      dataSources.append( DataSource(dsName='q3', dsType='GAUGE', heartbeat=600, minval=0) )
      dataSources.append( DataSource(dsName='lo', dsType='GAUGE', heartbeat=600, minval=0) )
      dataSources.append( DataSource(dsName='hi', dsType='GAUGE', heartbeat=600, minval=0) )
      dataSources.append( DataSource(dsName='total', dsType='GAUGE', heartbeat=600, minval=0) )

      roundRobinArchives = []
      roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=8640)) # 24h at 1 sample per 10 secs
      roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=90, rows=2880)) # 1 month at 1 sample per 15 mins
      roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=2880, rows=5475)) # 5 years at 1 sample per 8 hours

      self.rrd = RRD(filename, step=10, ds=dataSources, rra=roundRobinArchives, start=int(time.time()))
      self.rrd.create()

    self.bucket = { 'a': [], 'b': [] }
    self.current_bucket = 'a'

  def add(self, value):
    self.bucket[self.current_bucket].append(value)

  def save(self):
    bucket = self.current_bucket

    if self.current_bucket == 'a':
      self.current_bucket = 'b'
    else:
      self.current_bucket = 'a'

    stats = corestats.Stats(self.bucket[bucket])

    q1 = stats.percentile(25)
    q2 = stats.percentile(50)
    q3 = stats.percentile(75)
    lo = stats.min()
    hi = stats.max()
    total = stats.count()

    self.bucket[bucket] = []

    self.rrd.bufferValue(str(int(time.time())), q1, q2, q3, lo, hi, total)
    self.rrd.update()
示例#16
0
class Storage:
    # our storage object
    _rrd = None

    def __init__(self, filename="heatpumpMonitor.rrd"):
        if not os.path.isfile(filename):
            self._rrd = self._createRRD(filename)
        else:
            self._rrd = RRD(filename)

    def _createRRD(self, filename):
        """ create an rrd file which fits our requirements """

        # Let's setup some data sources for our RRD
        dss = []
        for source in dataSources:
            dss.append(DS(dsName=source, dsType='GAUGE', heartbeat=900))

        # An now let's setup how our RRD will archive the data
        rras = []
        # 1 days-worth of one-minute samples --> 60/1 * 24
        rra1 = RRA(cf='AVERAGE', xff=0, steps=1, rows=1440)
        # 7 days-worth of five-minute samples --> 60/5 * 24 * 7
        rra2 = RRA(cf='AVERAGE', xff=0, steps=5, rows=2016)
        # 30 days-worth of one hour samples --> 60/60 * 24 * 30
        rra3 = RRA(cf='AVERAGE', xff=0, steps=60, rows=720)
        # 1 year-worth of half day samples --> 60/60 * 24/12 * 365
        rra4 = RRA(cf='AVERAGE', xff=0, steps=720, rows=730)
        rras.extend([rra1, rra2, rra3, rra4])

        # With those setup, we can now created the RRD
        myRRD = RRD(filename,
                    step=step,
                    ds=dss,
                    rra=rras,
                    start=int(time.time()))
        myRRD.create(debug=False)
        return myRRD

    def add(self, aDict):
        """ adds the provided values to the rrd database with the current datetime """
        # we need to put the dict an correct line
        tmp = []
        for source in dataSources:
            tmp.append(aDict.get(source) or "U")
        self._rrd.bufferValue(int(time.time()), *tmp)
        self._rrd.update(debug=False)
示例#17
0
class Storage:
    # our storage object
    _rrd = None

    def __init__(self, filename="heatpumpMonitor.rrd"):
        if not os.path.isfile(filename):
            self._rrd = self._createRRD(filename)
        else:
            self._rrd = RRD(filename)

    def _createRRD(self, filename):
        """ create an rrd file which fits our requirements """

        # Let's setup some data sources for our RRD
        dss = []
        for source in dataSources:
          dss.append(DS(dsName=source, dsType='GAUGE', heartbeat=900))

        # An now let's setup how our RRD will archive the data
        rras = []
        # 1 days-worth of one-minute samples --> 60/1 * 24
        rra1 = RRA(cf='AVERAGE', xff=0, steps=1, rows=1440)
        # 7 days-worth of five-minute samples --> 60/5 * 24 * 7
        rra2 = RRA(cf='AVERAGE', xff=0, steps=5, rows=2016)
        # 30 days-worth of one hour samples --> 60/60 * 24 * 30
        rra3 = RRA(cf='AVERAGE', xff=0, steps=60, rows=720)
        # 1 year-worth of half day samples --> 60/60 * 24/12 * 365
        rra4 = RRA(cf='AVERAGE', xff=0, steps=720, rows=730)
        rras.extend([rra1, rra2, rra3, rra4])

        # With those setup, we can now created the RRD
        myRRD = RRD(filename, step=step, ds=dss, rra=rras, start=int(time.time()))
        myRRD.create(debug=False)
        return myRRD

    def add(self, aDict):
        """ adds the provided values to the rrd database with the current datetime """
        # we need to put the dict an correct line
        tmp = []
        for source in dataSources:
            tmp.append(aDict.get(source) or "U")
        self._rrd.bufferValue(int(time.time()), *tmp)
        self._rrd.update(debug=False)
示例#18
0
def update(namerrd,vals,updatedtime):
    try:
        myRRD = RRD(namerrd)
        countitem = len(vals)
        #myRRD.update(debug=False)
        if (countitem == 1):
            var1 = vals[0]   
            myRRD.bufferValue(updatedtime ,var1)
            myRRD.update() 
        elif (countitem == 2):
            var1 = vals[0]
            var2 = vals[1]
            myRRD.bufferValue(updatedtime,var1,var2)
            myRRD.update()  
        elif (countitem == 3):
            var1 = vals[0]
            var2 = vals[1]
            var3 = vals[2]
            myRRD.bufferValue(updatedtime,var1,var2,var3)
            myRRD.update()   
             
        return (True,'Update is successfull. ')
    
    except Exception,e: 
        return (False,str(e))
示例#19
0
class ExternalBackendTestCase(TestCase):

    def setUp(self):
        ds = [
            DataSource(dsName="speed", dsType="COUNTER", heartbeat=600)]
        rra = [
            RRA(cf="AVERAGE", xff=0.5, steps=1, rows=24),
            RRA(cf="AVERAGE", xff=0.5, steps=6, rows=10)]
        self.rrdfile = tempfile.NamedTemporaryFile()
        self.rrd = RRD(self.rrdfile.name, ds=ds, rra=rra, start=920804400)
        self.rrd.create()

    def test_updateError(self):
        self.rrd.bufferValue(1261214678, 612)
        self.rrd.bufferValue(1261214678, 612)
        self.assertRaises(ExternalCommandError, self.rrd.update)
        try:
            self.rrd.update()
        except ExternalCommandError, error:
            self.assertEquals(str(error), 
            ("ERROR: illegal attempt to update using time 1261214678 "
             "when last update time is 1261214678 (minimum one second step)"))
示例#20
0
class ExternalBackendTestCase(TestCase):
    def setUp(self):
        ds = [DataSource(dsName="speed", dsType="COUNTER", heartbeat=600)]
        rra = [
            RRA(cf="AVERAGE", xff=0.5, steps=1, rows=24),
            RRA(cf="AVERAGE", xff=0.5, steps=6, rows=10)
        ]
        self.rrdfile = tempfile.NamedTemporaryFile()
        self.rrd = RRD(self.rrdfile.name, ds=ds, rra=rra, start=920804400)
        self.rrd.create()

    def test_updateError(self):
        self.rrd.bufferValue(1261214678, 612)
        self.rrd.bufferValue(1261214678, 612)
        self.assertRaises(ExternalCommandError, self.rrd.update)
        try:
            self.rrd.update()
        except ExternalCommandError, error:
            self.assertEquals(str(error), (
                "ERROR: illegal attempt to update using time 1261214678 "
                "when last update time is 1261214678 (minimum one second step)"
            ))
示例#21
0
def graph_totals(ip=None):

    graph_type = ip.split('.')[2] + "-" + ip.split('.')[3] if ip else 'network'
    
    graph_setups = [
                        ('total_bytes', 'Bytes'), ('total_pkts', 'Packets'), ('total_flows', 'Flows'),
                        ('total_log_bytes', 'logBytes'), ('total_log_pkts', 'logPackets'), ('total_log_flows', 'logFlows'),
                        ('int_ip_entropy', 'IntIPEntropy'), ('ext_ip_entropy', 'ExtIPEntropy'),
                        ('d_int_ip_entropy', 'deltaIntIPEntropy'), ('d_ext_ip_entropy', 'deltaExtIPEntropy'),
                        ('wireless_retries', 'nRetries')
                    ]

    dss = []
    
    for graph in graph_setups:
        dss.append( DS(dsName=graph[0], dsType='GAUGE', heartbeat=900) )
    
    dbl_graph_setups = [ ('ivo_bytes', 'Bytes'), ('ivo_pkts', 'Pkts'), ('ivo_flows', 'Flows')]

    for graph in dbl_graph_setups:
        dss.append( DS(dsName='in_'+graph[0], dsType='GAUGE', heartbeat=900)  )
        dss.append( DS(dsName='out_'+graph[0], dsType='GAUGE', heartbeat=900) )

    myRRD = RRD(rrd_file % graph_type, step=60, ds=dss, rra=rras, start=startTime-60)
    myRRD.create(debug=False)
    
    counter = 0
    for flow_key in keys:
        if ip:
            if ip in flows[flow_key]['internal']:
                in_bytes, out_bytes = (flows[flow_key])['internal'][ip]['in_bytes'], (flows[flow_key])['internal'][ip]['out_bytes']
                in_pkts, out_pkts = (flows[flow_key])['internal'][ip]['in_pkts'], (flows[flow_key])['internal'][ip]['out_pkts']
                in_flows, out_flows = (flows[flow_key])['internal'][ip]['in_flows'], (flows[flow_key])['internal'][ip]['out_flows']
                total_bytes = in_bytes + out_bytes
                total_pkts  = in_pkts + out_pkts
                total_flows = in_flows + out_flows
                log_bytes, log_pkts, log_flows = log(total_bytes, 2), log(total_pkts, 2), log(total_flows, 2)
                nretries = (flows[flow_key])['internal'][ip]['nretries']
            else:
                in_bytes = out_bytes = in_pkts = out_pkts = in_flows = out_flows = 'U'
                total_bytes = total_pkts = total_flows = 'U'
                log_bytes = log_pkts = log_flows = 'U'
                nretries = 'U'
            myRRD.bufferValue(  int(flow_key), 
                                total_bytes, total_pkts, total_flows,
                                log_bytes, log_pkts, log_flows,
                                flow_entropies[flow_key]['external'][ip] if ip in flow_entropies[flow_key]['external'] else 0, 0,
                                delta_flow_entropies[flow_key]['external'][ip] if ip in flow_entropies[flow_key]['external'] else 0, 0,#delta_flow_entropies[flow_key]['internal'][ip],
                                nretries,
                                in_bytes, out_bytes, in_pkts, out_pkts, in_flows, out_flows,
                                )
        else:                
            in_bytes, out_bytes = (flows[flow_key])['in_bytes'], (flows[flow_key])['out_bytes']
            in_pkts, out_pkts = (flows[flow_key])['in_pkts'], (flows[flow_key])['out_pkts']
            in_flows, out_flows = (flows[flow_key])['in_flows'], (flows[flow_key])['out_flows']
            total_bytes = in_bytes + out_bytes
            total_pkts  = in_pkts + out_pkts
            total_flows = in_flows + out_flows
            log_bytes = log(total_bytes, 2) if total_bytes else 0
            log_pkts = log(total_pkts, 2) if total_pkts else 0
            log_flows = log(total_flows, 2) if total_flows else 0
            nretries = (flows[flow_key])['nretries']

            myRRD.bufferValue(  int(flow_key), 
                                total_bytes, total_pkts, total_flows,
                                log_bytes, log_pkts, log_flows,
                                flow_entropies[flow_key]['global_external'], 0,#flow_entropies[flow_key]['global_internal'],
                                delta_flow_entropies[flow_key]['global_external'], 0,#delta_flow_entropies[flow_key]['global_internal'],
                                nretries,
                                in_bytes, out_bytes, in_pkts, out_pkts, in_flows, out_flows,
                                )

        counter += 1
        if counter % 10 == 0:
            myRRD.update()
    
    myRRD.update()
    
    for idx, (feature, label) in enumerate(graph_setups[:-1]):
        
        def1 = DEF(rrdfile=myRRD.filename, vname=label, dsName=dss[idx].name)
        
        vdef1 = VDEF(vname='avg', rpn='%s,AVERAGE' % def1.vname)
        vdef2 = VDEF(vname='min', rpn='%s,MINIMUM' % def1.vname)
        vdef3 = VDEF(vname='max', rpn='%s,MAXIMUM' % def1.vname)
        vdef4 = VDEF(vname='stdev', rpn='%s,STDEV' % def1.vname)
        
        cdef1 = CDEF(vname='slightlyhigh', rpn='%s,avg,stdev,+,GE,%s,UNKN,IF' % (def1.vname, def1.vname))
        cdef2 = CDEF(vname='abnormallyhigh', rpn='%s,avg,stdev,1.5,*,+,GE,%s,UNKN,IF' % (def1.vname, def1.vname))
        cdef3 = CDEF(vname='vhigh', rpn='%s,avg,stdev,2.0,*,+,GE,%s,UNKN,IF' % (def1.vname, def1.vname))
        cdef4 = CDEF(vname='slightlylow', rpn='%s,avg,stdev,-,LE,%s,UNKN,IF' % (def1.vname, def1.vname))
        cdef5 = CDEF(vname='abnormallylow', rpn='%s,avg,stdev,1.5,*,-,LE,%s,UNKN,IF' % (def1.vname, def1.vname))
        cdef6 = CDEF(vname='vlow', rpn='%s,avg,stdev,2.0,*,-,LE,%s,UNKN,IF' % (def1.vname, def1.vname))
        
        area1 = AREA(defObj=def1, color='#00FF00')
        area2 = AREA(defObj=cdef1, color='#FFFF00')
        area3 = AREA(defObj=cdef2, color='#FF9900')
        area4 = AREA(defObj=cdef3, color='#FF0000')
        area5 = AREA(defObj=cdef4, color='#FFFF00')
        area6 = AREA(defObj=cdef4, color='#FF9900')
        area7 = AREA(defObj=cdef4, color='#FF0000')
        
        gprint1 = GPRINT(vdef1, 'Average %.2lf')
        gprint2 = GPRINT(vdef2, 'Min %.2lf')
        gprint3 = GPRINT(vdef3, 'Max %.2lf')
        gprint4 = GPRINT(vdef4, 'Stdev %.2lf')

        g = Graph(graph_file % (graph_type, feature), start=int(keys[0]), end=int(keys[-1]) )
        g.data.extend([def1, vdef1, vdef2, vdef3, vdef4,
                        cdef1, cdef2, cdef3, cdef4, cdef5, cdef6, 
                        area1, area2, area3, area4, area5, area6, area7, 
                        gprint1, gprint2, gprint3, gprint4
                        ])
        if idx > 5:
            g.width = 380
        else:
            g.width = 540
        g.height = 100
        g.write()

    wireless_index = len(graph_setups) - 1
    wireless_feature, wireless_label = graph_setups[wireless_index]
    def1 = DEF(rrdfile=myRRD.filename, vname=wireless_label, dsName=dss[wireless_index].name)
    line1 = LINE(defObj=def1, color='#FF0000')
    g = Graph(graph_file % (graph_type, wireless_feature), start=int(keys[0]), end=int(keys[-1]) )
    g.data.extend([def1, line1])
    g.width = 1800
    g.height = 80
    g.write() 
    
    for idx, (feature, label) in enumerate(dbl_graph_setups):
        def1 = DEF(rrdfile=myRRD.filename, vname=label+'IN', dsName=(dss[len(dss)-(len(dbl_graph_setups)*2)+(idx*2)]).name)
        def2 = DEF(rrdfile=myRRD.filename, vname=label+'OUT', dsName=(dss[len(dss)-(len(dbl_graph_setups)*2)+(idx*2)+1]).name)
        cdef1 = CDEF(vname=label[0]+'IN', rpn='%s' % def1.vname)
        cdef2 = CDEF(vname=label[0]+'OUT', rpn='%s,-1,*' % def2.vname)
        area1 = AREA(defObj=cdef1, color='#FF0000')
        area2 = AREA(defObj=cdef2, color='#00FF00')
        g = Graph(graph_file % (graph_type, feature), start=int(keys[0]), end=int(keys[-1]))
        g.data.extend([def1, def2, cdef1, cdef2, area1, area2])
        g.width = 380
        g.height = 100
        g.write()
示例#22
0
dataSources.append(dataSource)
roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=1, rows=3600*24*7))
roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=60, rows=4))
roundRobinArchives.append(RRA(cf='AVERAGE', xff=0.5, steps=60, rows=12))
rrd = RRD(filename, ds=dataSources, rra=roundRobinArchives, start=timestamp-1)

if not os.path.isfile(rrd.filename):
    rrd.create()

    i = 0
    for rtt in parse_rtt(lines):
        print i, rtt
        rrd.bufferValue(timestamp+i, int(1000 * rtt))
        i += 1
        if i % 100 == 0:
            rrd.update()
    rrd.update()

from pyrrd.graph import DEF, CDEF, VDEF, LINE, AREA, GPRINT, COMMENT, Graph
comment = COMMENT("RTT from SMO to Creagan Dearga")
rttus = DEF(rrdfile=rrd.filename, vname="rttus", dsName="rtt")
rttms = CDEF(vname="rttms", rpn="%s,1000,/" % rttus.vname)
rtt = LINE(defObj=rttms, color="#2299ff", legend="RTT")
rttmax = VDEF(vname="rttmax", rpn="%s,MAXIMUM" % rttms.vname)
rttavg = VDEF(vname="rttavg", rpn="%s,AVERAGE" % rttms.vname)
rttmaxp = GPRINT(rttmax, "Maximum: %6.2lf")
rttavgp = GPRINT(rttavg, "Average: %6.2lf")

imgfile = "/tmp/testgraph.png"
g = Graph(imgfile, start=timestamp, end=timestamp+263342,
          vertical_label="ms")
示例#23
0
文件: example5.py 项目: pavalx/pyrrd
        data[time].setdefault(dsName, datum)

# Sort everything by time
counter = 0
sortedData = [(i, data[i]) for i in sorted(data.keys())]
for time, dsNames in sortedData:
    counter += 1
    val1 = dsNames.get(ds1.name) or "U"
    val2 = dsNames.get(ds2.name) or "U"
    val3 = dsNames.get(ds3.name) or "U"
    val4 = dsNames.get(ds4.name) or "U"
    # Add the values
    myRRD.bufferValue(time, val1, val2, val3, val4)
    # Lets update the RRD/purge the buffer ever 100 entires
    if counter % 100 == 0:
        myRRD.update(debug=False)

# Add anything remaining in the buffer
myRRD.update(debug=False)

# Let's set up the objects that will be added to the graph
def1 = DEF(rrdfile=myRRD.filename, vname="in", dsName=ds1.name)
def2 = DEF(rrdfile=myRRD.filename, vname="out", dsName=ds2.name)
# Here we're just going to mulitply the in bits by 100, solely for
# the purpose of display
cdef1 = CDEF(vname="hundredin", rpn="%s,%s,*" % (def1.vname, 100))
cdef2 = CDEF(vname="negout", rpn="%s,-1,*" % def2.vname)
area1 = AREA(defObj=cdef1, color="#FFA902", legend="Bits In")
area2 = AREA(defObj=cdef2, color="#A32001", legend="Bits Out")

# Let's configure some custom colors for the graph
示例#24
0
myRRD.create()
myRRD.bufferValue('920805600', '12363')
myRRD.bufferValue('920805900', '12363')
myRRD.bufferValue('920806200', '12373')
myRRD.bufferValue('920806500', '12383')
myRRD.bufferValue('920806800', '12393')
myRRD.bufferValue('920807100', '12399')
myRRD.bufferValue('920807400', '12405')
myRRD.bufferValue('920807700', '12411')
myRRD.bufferValue('920808000', '12415')
myRRD.bufferValue('920808300', '12420')
myRRD.bufferValue('920808600', '12422')
myRRD.bufferValue('920808900', '12423')
#An example of how to use the RRDCached attribute on Update.
#Note you can only use it on updates for the time being.
myRRD.update(rrdcached="unix:/tmp/rrdcached.sock")

# Let's set up the objects that will be added to the graph
def1 = DEF(rrdfile=myRRD.filename, vname='myspeed', dsName=ds1.name)
cdef1 = CDEF(vname='kmh', rpn='%s,3600,*' % def1.vname)
cdef2 = CDEF(vname='fast', rpn='kmh,100,GT,kmh,0,IF')
cdef3 = CDEF(vname='good', rpn='kmh,100,GT,0,kmh,IF')
vdef1 = VDEF(vname='mymax', rpn='%s,MAXIMUM' % def1.vname)
vdef2 = VDEF(vname='myavg', rpn='%s,AVERAGE' % def1.vname)
line1 = LINE(value=100, color='#990000', legend='Maximum Allowed')
area1 = AREA(defObj=cdef3, color='#006600', legend='Good Speed')
area2 = AREA(defObj=cdef2, color='#CC6633', legend='Too Fast')
line2 = LINE(defObj=vdef2, color='#000099', legend='My Average', stack=True)
gprint1 = GPRINT(vdef2, '%6.2lf kph')

# Now that we've got everything set up, let's make a graph
示例#25
0
class RRDManip(object):

    def __init__(self, filename, step=None,
                 dataSources=None, roundRobinArchives=None):
        """
        实例化 RRDManip 类对象。

        :param filename: the name of the RRD you to manipulative
        :param dataSources: 相关的 data Source 队列
        :param roundRobinArchives: 相关的 rra 队列
        """
        if not isinstance(dataSources, list) and \
                not isinstance(dataSources, tuple):
            dataSources = [dataSources]
        if not isinstance(roundRobinArchives, list) and \
                not isinstance(roundRobinArchives, tuple):
            roundRobinArchives = [roundRobinArchives]

        self.dataSources = dataSources
        self.roundRobinArchives = roundRobinArchives
        self.filename = filename
        self.step = step
        self.rrd = None

    def ensure_rrd(self):
        """
        Ensures that an RRD file is created.
        """
        if os.path.isfile(self.filename):
            # the rrd file alread exist
            self.rrd = RRD(self.filename)
        else:
            self.create_rrd()

    def create_rrd(self):
        """
        Creates an RRD file.
        """
        dataSources = [DataSource(**ds) for ds in self.dataSources]
        roundRobinArchives = [RRA(**rra) for rra in self.roundRobinArchives]
        # start 时间设定为当前时间的一天前,86400 即一天内包含的秒数
        past_one_day = int(time.time()) - 86400
        self.rrd = RRD(self.filename, start=past_one_day, step=self.step,
                       ds=dataSources, rra=roundRobinArchives)
        self.rrd.create()

    def update(self, timestamp, values):
        """
        Feeds data values into an RRD.
        """
        timestamp = int(timestamp)
        if not isinstance(values, list) and not isinstance(values, tuple):
            values = [values]
        self.rrd.bufferValue(timestamp, *values)
        try:
            self.rrd.update()
        except:
            # 防止 脏数据 污染 update vslues
            self.rrd.values = []

    def fetch(self, cf='AVERAGE', resolution=None, start=None, end=None, returnStyle="ds"):
        """
        Fetch data values from an RRD.

        :param returnStyle: 指定返回的数据格式,包括有'ds' 和 'time'
        """
        return self.rrd.fetch(cf, resolution, start, end, returnStyle)
示例#26
0
class Simulator:
    NUM_NODES = 1000
    EVENTS = ["ADDFILE", "DELFILE", "ADDNODE", "DELNODE"]
    RATE_ADDFILE = 1.0 / 10
    RATE_DELFILE = 1.0 / 20
    RATE_ADDNODE = 1.0 / 3000
    RATE_DELNODE = 1.0 / 4000
    P_NODEAVAIL = 1.0

    def __init__(self):
        self.time = 1164783600 # small numbers of seconds since the epoch confuse rrdtool
        self.prevstamptime = int(self.time)

        ds = DataSource(ds_name='utilizationds', ds_type='GAUGE', heartbeat=1)
        rra = RRA(cf='AVERAGE', xff=0.1, steps=1, rows=1200)
        self.rrd = RRD("/tmp/utilization.rrd", ds=[ds], rra=[rra], start=self.time)
        self.rrd.create()

        self.introducer = q = Introducer(self)
        self.all_nodes = [Node(randomid(), q, self)
                          for i in range(self.NUM_NODES)]
        q.all_nodes = self.all_nodes
        self.next = []
        self.schedule_events()
        self.verbose = False

        self.added_files = 0
        self.added_data = 0
        self.deleted_files = 0
        self.published_files = []
        self.failed_files = 0
        self.lost_data_bytes = 0 # bytes deleted to make room for new shares

    def stamp_utilization(self, utilization):
        if int(self.time) > (self.prevstamptime+1):
            self.rrd.bufferValue(self.time, utilization)
            self.prevstamptime = int(self.time)

    def write_graph(self):
        self.rrd.update()
        self.rrd = None
        import gc
        gc.collect()

        def1 = graph.DataDefinition(vname="a", rrdfile='/tmp/utilization.rrd', ds_name='utilizationds')
        area1 = graph.Area(value="a", color="#990033", legend='utilizationlegend')
        g = graph.Graph('/tmp/utilization.png', imgformat='PNG', width=540, height=100, vertical_label='utilizationverticallabel', title='utilizationtitle', lower_limit=0)
        g.data.append(def1)
        g.data.append(area1)
        g.write()

    def add_file(self):
        size = random.randrange(1000)
        n = random.choice(self.all_nodes)
        if self.verbose:
            print "add_file(size=%d, from node %s)" % (size, n)
        fileid = randomid()
        able = n.publish_file(fileid, size)
        if able:
            able, tried = able
            self.added_files += 1
            self.added_data += size
            self.published_files.append(tried)
        else:
            self.failed_files += 1

    def lost_data(self, size):
        self.lost_data_bytes += size

    def delete_file(self):
        all_nodes = self.all_nodes[:]
        random.shuffle(all_nodes)
        for n in all_nodes:
            if n.delete_file():
                self.deleted_files += 1
                return
        print "no files to delete"

    def _add_event(self, etype):
        rate = getattr(self, "RATE_" + etype)
        next = self.time + random.expovariate(rate)
        self.next.append((next, etype))
        self.next.sort()

    def schedule_events(self):
        types = set([e[1] for e in self.next])
        for etype in self.EVENTS:
            if not etype in types:
                self._add_event(etype)

    def do_event(self):
        time, etype = self.next.pop(0)
        assert time > self.time
        # current_time = self.time
        self.time = time
        self._add_event(etype)
        if etype == "ADDFILE":
            self.add_file()
        elif etype == "DELFILE":
            self.delete_file()
        elif etype == "ADDNODE":
            pass
            #self.add_node()
        elif etype == "DELNODE":
            #self.del_node()
            pass
        # self.print_stats(current_time, etype)

    def print_stats_header(self):
        print "time:  added   failed   lost  avg_tried"

    def print_stats(self, time, etype):
        if not self.published_files:
            avg_tried = "NONE"
        else:
            avg_tried = sum(self.published_files) / len(self.published_files)
        print time, etype, self.added_data, self.failed_files, self.lost_data_bytes, avg_tried, len(self.introducer.living_files), self.introducer.utilization
示例#27
0
dss.extend([ds1, ds2, ds3, ds4])

rras = []
rra1 = RRA(cf='AVERAGE', xff=0.5, steps=24, rows=1460)
rras.append(rra1)

myRRD = RRD(filename, ds=dss, rra=rras, start=startTime)
myRRD.create()

# let's generate some data...
currentTime = startTime
for i in xrange(maxSteps):
    currentTime += step
    # lets update the RRD/purge the buffer ever 100 entires
    if i % 100 == 0:
        myRRD.update(debug=False)
    # let's do two different sets of periodic values
    value1 = int(sin(i % 200) * 1000)
    value2 = int(sin((i % 2000) / (200 * random())) * 200)
    value3 = int(sin((i % 4000) / (400 * random())) * 400)
    value4 = int(sin((i % 6000) / (600 * random())) * 600)
    # when you pass more than one value to update buffer like this,
    # they get applied to the DSs in the order that the DSs were
    # "defined" or added to the RRD object.
    myRRD.bufferValue(currentTime, value1, value2, value3, value4)
# add anything remaining in the buffer
myRRD.update()

# Let's set up the objects that will be added to the graph
def1 = DEF(rrdfile=myRRD.filename, vname='myspeed', dsName=ds1.name)
def2 = DEF(rrdfile=myRRD.filename, vname='mysilliness', dsName=ds2.name)
示例#28
0
def draw_graph(data, group):
  ## Graph bytes_in, bytes_out, request, by time+group
  filename = 'network.rrd'
  graphfile_traffic = 'traffic%s.png' %group
#  graphfileLg_traffic = 'traffic-large.png'
  graphfile_request = 'request%s.png' %group
#  graphfileLg_request = 'request-large'
  
  #define times
  hour = 60 * 60
  day = 24 * 60 * 60
  week = 7 * day
  month = day * 30
  quarter = month * 3
  half = 365 * day / 2
  year = 365 * day
  delta = settings.DELTA * hour
  step = 1
  endTime = int(time.time()) - 600
  startTime = endTime - 360000
  maxSteps = int((endTime-startTime)/step)
  
  # create RRD file
 
#  DSTYPE
#  Counter:Use this format with value of snmp MIB like traffic counter or 
#  packet number for a interface. 
#  Gauge:Use this format for value like temperature,  indicator of pressure.
#  Derive:Use this format if you variation or settings.DELTA between a moment and 
#  an another moment like the rate of of people entering or leaving a
#  room and derive works exactly like COUNTER but without overflow checks.
#  Absolute:Use this format when you count the number of mail after an alert. 
#   
#  HEARTBEAT
#  Is define the frequency between each update of value in the database but some time
#  it is possible to have UNKNOWN value.
#  MIN AND MAX are optional parameters witch define the range of your data source (DS).
#  If your value is out of the range the value will be defined as UNKNOWN.
#  If you don not know exactly the range of you value you can set the MIN and MAX value with 
#  U for unknown

  dss = []
  ds1 = DS(dsName='bytes_out', dsType='ABSOLUTE', heartbeat=200)
  ds2 = DS(dsName='bytes_in', dsType='ABSOLUTE', heartbeat=200)
  ds3 = DS(dsName='request', dsType='COUNTER', heartbeat=200)
  dss.extend([ds1, ds2, ds3])
  
  rras1 = []
  rra1 = RRA(cf='AVERAGE', xff=0.5, steps=1, rows=1440)
  rra2 = RRA(cf='AVERAGE', xff=0.5, steps=6, rows=2016)
  rra3 = RRA(cf='AVERAGE', xff=0.5, steps=60, rows=720)
  rras1.extend([rra1, rra2, rra3])
  
  myRRD = RRD(filename, step=step, ds=dss, rra=rras1, start=startTime)
  myRRD.create(debug=False)
  
  ## RRD update
  
  counter = 0
  for i in data:
    counter += 1
    bytes_in = i['bytes_in'] 
    bytes_out = i['bytes_out'] 
    requests = i['request'] 
    times = i['time'] 
    print bytes_out/1000000
    myRRD.bufferValue(times, bytes_out, bytes_in, requests)
    if counter % 100 == 0:
      myRRD.update(debug=True)
  myRRD.update(debug=True)
  
  ## RRD graph
  
  def1 = DEF(rrdfile=myRRD.filename, vname='output', dsName=ds1.name)
  def2 = DEF(rrdfile=myRRD.filename, vname='input', dsName=ds2.name)
  def3 = DEF(rrdfile=myRRD.filename, vname='request', dsName=ds3.name)
  vdef11 = VDEF(vname='max_out', rpn='%s,MAXIMUM' % def1.vname)
  vdef12 = VDEF(vname='avg_out', rpn='%s,AVERAGE' % def1.vname)
  vdef21 = VDEF(vname='max_in', rpn='%s,MAXIMUM' % def2.vname)
  vdef22 = VDEF(vname='avg_in', rpn='%s,AVERAGE' % def2.vname)
  vdef31 = VDEF(vname='max_request', rpn='%s,MAXIMUM' % def3.vname)
  vdef32 = VDEF(vname='avg_request', rpn='%s,AVERAGE' % def3.vname)
  
  line1 = LINE(2, defObj=def1, color='#2029CC', legend='Out')
  line2 = LINE(2, defObj=def2, color='#00FF00', legend='In')
  line3 = LINE(2, defObj=def3, color='#FF0000', legend='Request')
  gprint11 = GPRINT(vdef11, 'max\\: %5.1lf %Sbps')
  gprint12 = GPRINT(vdef12, 'avg\\: %5.1lf %Sbps\\n')
  gprint21 = GPRINT(vdef21, 'max\\: %5.1lf %Sbps')
  gprint22 = GPRINT(vdef22, 'avg\\: %5.1lf %Sbps\\n')
  gprint31 = GPRINT(vdef31, 'max\\: %5.1lf %S')
  gprint32 = GPRINT(vdef32, 'avg\\: %5.1lf %S\\n')
  
  
  # ColorAttributes
  ca = ColorAttributes()
  ca.back = '#CCCDE2'  #background
  ca.canvas = '#FFFFFF'#the background of the actual graph
  ca.shadea = '#000000'#left and top border
  ca.shadeb = '#111111'#right and bottom border
  ca.mgrid = '#6666CC' #maior grid
  ca.axis = '#000000'  #axis of the graph
  ca.frame = '#CCCDE2' #line around the color spots
  ca.font = '#000000'  #color of the font
  ca.arrow = '#CC0000' # arrow head pointing up and forward
  
## graph traffic
  g = Graph(graphfile_traffic, end=endTime, vertical_label='Bytes/s', color=ca)
  g.data.extend([def1, def2, vdef11, vdef12, vdef21, vdef22, line1, gprint11, gprint12, line2, gprint21, gprint22])
  g.title = '"report traffic %s"'%group
  
  g.start=endTime - delta
  g.step = step
  g.width = 397
  g.height = 182
  g.write(debug=True)
  
#  g.filename = graphfileLg_traffic
#  g.width = 800
#  g.height = 400
#  g.write()

## graph request
  g1 = Graph(graphfile_request, end=endTime, vertical_label='Request/s', color=ca)
  g1.data.extend([def3, vdef31, vdef32, line3, gprint31, gprint32])
  g1.title = '"report request %s"'%group

  g1.start=endTime - settings.DELTA
  g1.step = step
  g1.width = 397
  g1.height = 182
  g1.write(debug=False)
示例#29
0
def draw_total(res):
  ## graph total(bytes_out, bytes_in, request) by time
  
  # define name
  filename = 'total.rrd'
  graphfile_total_traffic = 'total_traffic.png' 
#  graphfileLg_total_traffic = 'total_traffic-large.png'
  graphfile_total_request = 'total_request.png'
#  graphfileLg_total_request = 'total_request-large'
  
  #define times
  hour = 60 * 60
  day = 24 * 60 * 60
  week = 7 * day
  month = day * 30
  quarter = month * 3
  half = 365 * day / 2
  year = 365 * day
  delta = settings.DELTA * hour
  step = 1
  endTime = int(time.time()) - 600
  startTime = endTime - 360000
  maxSteps = int((endTime-startTime)/step)
  
  ## Create RRD 
  dss = []
  ds1 = DS(dsName='total_bytes_out', dsType='ABSOLUTE', heartbeat=200)
  ds2 = DS(dsName='total_bytes_in', dsType='ABSOLUTE', heartbeat=200)
  ds3 = DS(dsName='total_request', dsType='ABSOLUTE', heartbeat=200)
  dss.extend([ds1, ds2, ds3])

  rras1 = []
  rra1 = RRA(cf='AVERAGE', xff=0.5, steps=1, rows=1440)
  rra2 = RRA(cf='AVERAGE', xff=0.5, steps=6, rows=2016)
  rra3 = RRA(cf='AVERAGE', xff=0.5, steps=60, rows=720)
  rras1.extend([rra1, rra2, rra3])
  
  myRRD = RRD(filename, step=step, ds=dss, rra=rras1, start=startTime)
  myRRD.create(debug=False)
  
  ## RRD update
  counter = 0
  for i in res:
    counter += 1
    total_bytes_in = int(i['total_bytes_in']) 
    total_bytes_out = int(i['total_bytes_out']) 
    total_requests = int(i['total_request']) 
    t_times = int(i['time']) 
    print total_bytes_out/1000000
    myRRD.bufferValue(t_times, total_bytes_out, total_bytes_in, total_requests)
    if counter % 100 == 0:
      myRRD.update(debug=True)
  myRRD.update(debug=True)
  
  
  ## RRD graph
  def1 = DEF(rrdfile=myRRD.filename, vname='output', dsName=ds1.name, cdef='AVERAGE')
  def2 = DEF(rrdfile=myRRD.filename, vname='input', dsName=ds2.name, cdef='AVERAGE')
  def3 = DEF(rrdfile=myRRD.filename, vname='request', dsName=ds3.name, cdef='AVERAGE')
  
  # Out
  vdef11 = VDEF(vname='max_out', rpn='%s,MAXIMUM' % def1.vname)
  vdef12 = VDEF(vname='avg_out', rpn='%s,AVERAGE' % def1.vname)
  vdef13 = VDEF(vname='min_out', rpn='%s,MINIMUM' % def1.vname)
  
  line1 = LINE(2, defObj=def1, color='#2029CC', legend='Out')
  gprint11 = GPRINT(vdef11, 'max\\: %5.1lf %Sbps')
  gprint12 = GPRINT(vdef12, 'avg\\: %5.1lf %Sbps')
  gprint13 = GPRINT(vdef13, 'min\\: %5.1lf %Sbps\\n')
  
  # In
  vdef21 = VDEF(vname='max_in', rpn='%s,MAXIMUM' % def2.vname)
  vdef22 = VDEF(vname='avg_in', rpn='%s,AVERAGE' % def2.vname)
  
  line2 = LINE(2, defObj=def2, color='#00FF00', legend='In')
  gprint21 = GPRINT(vdef21, 'max\\: %5.1lf %Sbps')
  gprint22 = GPRINT(vdef22, 'avg\\: %5.1lf %Sbps\\n')
  
  # Request
  vdef31 = VDEF(vname='max_request', rpn='%s,MAXIMUM' % def3.vname)
  vdef32 = VDEF(vname='avg_request', rpn='%s,AVERAGE' % def3.vname)
  
  line3 = LINE(2, defObj=def3, color='#FF0000', legend='Request')
  gprint31 = GPRINT(vdef31, 'max\\: %5.1lf %S')
  gprint32 = GPRINT(vdef32, 'avg\\: %5.1lf %S\\n')
  
  # ColorAttributes
  ca = ColorAttributes()
  ca.back = '#CCCDE2'  #background
  ca.canvas = '#FFFFFF'#the background of the actual graph
  ca.shadea = '#000000'#left and top border
  ca.shadeb = '#111111'#right and bottom border
  ca.mgrid = '#6666CC' #major grid
  ca.axis = '#000000'  #axis of the graph
  ca.frame = '#CCCDE2' #line around the color spots
  ca.font = '#000000'  #color of the font
  ca.arrow = '#CC0000' # arrow head pointing up and forward
  
  
  ##  
  g = Graph(graphfile_total_traffic, end=endTime, vertical_label='Bytes/s', color=ca)
  g.data.extend([def1, def2, vdef11, vdef12, vdef13, vdef21, vdef22, line1, gprint11, gprint12, gprint13, line2, gprint21, gprint22])
  g.title = '"report total traffic"'

  g.start = endTime - delta
  
  g.step = step
  g.width = 397
  g.height = 182
  g.write(debug=True)
  
#  g.filename = graphfileLg_total_traffic
#  g.width = 800
#  g.height = 400
#  g.write()
#  
##
  g1 = Graph(graphfile_total_request, end=endTime, vertical_label='Request/s', color=ca)
  g1.data.extend([def3, vdef31, vdef32, line3, gprint31, gprint32])
  g1.title = '"report total request"'

  g1.start = endTime - settings.DELTA
  g1.step = step
  g1.width = 397
  g1.height = 182
  g1.write(debug=True)
示例#30
0
def main():
    ns = parser.parse_args()
    signal.signal(signal.SIGINT, sigint_handler)
    dss = [
        DataSource(dsName='vsz', dsType='GAUGE', heartbeat=2),
        DataSource(dsName='rss', dsType='GAUGE', heartbeat=2)
    ]
    rras = [
        RRA(cf='AVERAGE', xff=0.5, steps=10, rows=3000),
        #RRA(cf='AVERAGE', xff=0.5, steps=1, rows=100)
        RRA(cf='LAST', xff=0.5, steps=1, rows=300000)
    ]
    try:
        os.remove(rrd_file)
        os.remove(graph_file)
    except OSError:
        pass
    rrd = RRD(rrd_file, ds=dss, rra=rras, step=1)
    rrd.create()
    start = time.time()
    print("Starting at %d." % start)
    while KEEP_GOING:
        if ns.children:
            vsz, rss = pid_and_subs_memory(ns.pid)
        else:
            vsz, rss = pid_memory(ns.pid)
        #print("sample {} {}".format(size, rss))
        if vsz == 0 and rss == 0:
            break
        rrd.bufferValue(time.time(), vsz, rss)
        rrd.update()
        time.sleep(1)
    end = time.time()
    print("Sampling finishes: %d." % end)
    #  #rrdtool fetch foo.rrd AVERAGE --end=now --start=now-50s
    #  command = [
    #      'rrdtool',
    #  	'fetch',
    #      rrd_file,
    #      'AVERAGE',
    #      '--end',
    #      str(int(end)),
    #      '--start',
    #      str(int(start))
    #  ]
    #  ps = subprocess.Popen(command)
    #  ps.wait()
    #CDEF:mem_used_x=mem_used,1024,\* \
    #LINE2:mem_used_x#D7CC00:mem_used
    command = [
        'rrdtool',
        'graph',
        '--title',
        ns.graph_name,
        graph_file,
        '--start',
        str(int(start)),
        '--end',
        str(int(end)),
        #     'DEF:vsz={}:vsz:AVERAGE'.format(rrd_file),
        'DEF:rss={}:rss:AVERAGE'.format(rrd_file),
        #     'CDEF:vsz_k=vsz,1024,*',
        'CDEF:rss_k=rss,1024,*',
        #     'LINE:vsz_k#4287f5:Virtual',
        'LINE:rss_k#42d7f5:Residential',
    ]
    ps = subprocess.check_output(command)
    print(ps)
    sys.exit(0)
示例#31
0
文件: rrdstats.py 项目: cambot/Bookie
class SystemCounts(object):
    """Handle the rrd for the system counts"""
    _datafile = 'systemcount.rrd'
    _outputfile = 'systemcount.png'

    @property
    def datafile(self):
        return os.path.join(self.data_root, self._datafile)

    @property
    def outputfile(self):
        return os.path.join(self.output_root, self._outputfile)

    def _boostrap(self):
        """Put together out bits"""
        self.dss = []
        self.ds1 = DS(dsName='bookmark_count', dsType='GAUGE', heartbeat=hour)
        self.ds2 = DS(dsName='unique_count', dsType='GAUGE', heartbeat=hour)
        self.ds3 = DS(dsName='tag_count', dsType='GAUGE', heartbeat=hour)
        self.dss.extend([self.ds1, self.ds2, self.ds3])

        self.rras = []
        rra1 = RRA(cf='AVERAGE', xff=0.5, steps=24, rows=8760)
        self.rras.append(rra1)

        self.myRRD = RRD(self.datafile,
            ds=self.dss,
            rra=self.rras,
            start=int(time.mktime(start_date.timetuple())))
        if not os.path.exists(self.datafile):
            # make sure we create the directory
            if not os.path.exists(os.path.dirname(self.datafile)):
                os.makedirs(os.path.dirname(self.datafile))
            self.myRRD.create()

    def __init__(self, data_root, output_root):
        """Bootstrap, does the data file exist, etc"""
        self.data_root = data_root
        self.output_root = output_root
        self._boostrap()

    def output(self, months=3):
        """Render out the image of the rrd"""
        def1 = DEF(rrdfile=self.datafile,
            vname='bookmark_count',
            dsName=self.ds1.name)
        def2 = DEF(rrdfile=self.datafile,
            vname='unique_count',
            dsName=self.ds2.name)
        def3 = DEF(rrdfile=self.datafile,
            vname='tag_count',
            dsName=self.ds3.name)
        line1 = LINE(defObj=def1,
            color='#01FF13',
            legend='Bookmarks',
            stack=True)
        line2 = LINE(defObj=def2,
            color='#DA7202',
            legend='Unique',
            stack=True)
        line3 = LINE(defObj=def3, color='#BD4902', legend='Tags', stack=True)

        # area1 = AREA(defObj=def1, color='#FFA902', legend='Bookmarks')
        # area2 = AREA(defObj=def2, color='#DA7202', legend='Unique')
        # area3 = AREA(defObj=def3, color='#BD4902', legend='Tags')

        # Let's configure some custom colors for the graph
        ca = ColorAttributes()
        ca.back = '#333333'
        ca.canvas = '#333333'
        ca.shadea = '#000000'
        ca.shadeb = '#111111'
        ca.mgrid = '#CCCCCC'
        ca.axis = '#FFFFFF'
        ca.frame = '#AAAAAA'
        ca.font = '#FFFFFF'
        ca.arrow = '#FFFFFF'

        # Now that we've got everything set up, let's make a graph
        start_date = time.mktime((today - timedelta(weeks=28)).timetuple())
        end_date = time.mktime(today.timetuple())
        g = Graph(self.outputfile,
            start=int(start_date),
            end=int(end_date),
            vertical_label='count',
            color=ca)
        g.data.extend([def1, def2, def3, line3, line2, line1])

        if not os.path.exists(os.path.dirname(self.outputfile)):
            os.makedirs(os.path.dirname(self.outputfile))

        g.write()

    def mark(self, tstamp, bmarks, uniques, tags):
        """Update the database with some data"""
        timestamp = time.mktime(tstamp.timetuple())
        self.myRRD.bufferValue(int(timestamp), bmarks, uniques, tags)

    def update(self):
        """Update the underlying rrd data"""
        try:
            self.myRRD.update(debug=False)
        except ExternalCommandError, exc:
            print "ERROR", str(exc)
示例#32
0
class SystemCounts(object):
    """Handle the rrd for the system counts"""
    _datafile = 'systemcount.rrd'
    _outputfile = 'systemcount.png'

    @property
    def datafile(self):
        return os.path.join(self.data_root, self._datafile)

    @property
    def outputfile(self):
        return os.path.join(self.output_root, self._outputfile)

    def _boostrap(self):
        """Put together out bits"""
        self.dss = []
        self.ds1 = DS(dsName='bookmark_count', dsType='GAUGE', heartbeat=hour)
        self.ds2 = DS(dsName='unique_count', dsType='GAUGE', heartbeat=hour)
        self.ds3 = DS(dsName='tag_count', dsType='GAUGE', heartbeat=hour)
        self.dss.extend([self.ds1, self.ds2, self.ds3])

        self.rras = []
        rra1 = RRA(cf='AVERAGE', xff=0.5, steps=24, rows=8760)
        self.rras.append(rra1)

        self.myRRD = RRD(self.datafile,
                         ds=self.dss,
                         rra=self.rras,
                         start=int(time.mktime(start_date.timetuple())))
        if not os.path.exists(self.datafile):
            # make sure we create the directory
            if not os.path.exists(os.path.dirname(self.datafile)):
                os.makedirs(os.path.dirname(self.datafile))
            self.myRRD.create()

    def __init__(self, data_root, output_root):
        """Bootstrap, does the data file exist, etc"""
        self.data_root = data_root
        self.output_root = output_root
        self._boostrap()

    def output(self, months=3):
        """Render out the image of the rrd"""
        def1 = DEF(rrdfile=self.datafile,
                   vname='bookmark_count',
                   dsName=self.ds1.name)
        def2 = DEF(rrdfile=self.datafile,
                   vname='unique_count',
                   dsName=self.ds2.name)
        def3 = DEF(rrdfile=self.datafile,
                   vname='tag_count',
                   dsName=self.ds3.name)
        line1 = LINE(defObj=def1,
                     color='#01FF13',
                     legend='Bookmarks',
                     stack=True)
        line2 = LINE(defObj=def2, color='#DA7202', legend='Unique', stack=True)
        line3 = LINE(defObj=def3, color='#BD4902', legend='Tags', stack=True)

        # area1 = AREA(defObj=def1, color='#FFA902', legend='Bookmarks')
        # area2 = AREA(defObj=def2, color='#DA7202', legend='Unique')
        # area3 = AREA(defObj=def3, color='#BD4902', legend='Tags')

        # Let's configure some custom colors for the graph
        ca = ColorAttributes()
        ca.back = '#333333'
        ca.canvas = '#333333'
        ca.shadea = '#000000'
        ca.shadeb = '#111111'
        ca.mgrid = '#CCCCCC'
        ca.axis = '#FFFFFF'
        ca.frame = '#AAAAAA'
        ca.font = '#FFFFFF'
        ca.arrow = '#FFFFFF'

        # Now that we've got everything set up, let's make a graph
        start_date = time.mktime((today - timedelta(weeks=28)).timetuple())
        end_date = time.mktime(today.timetuple())
        g = Graph(self.outputfile,
                  start=int(start_date),
                  end=int(end_date),
                  vertical_label='count',
                  color=ca)
        g.data.extend([def1, def2, def3, line3, line2, line1])

        if not os.path.exists(os.path.dirname(self.outputfile)):
            os.makedirs(os.path.dirname(self.outputfile))

        g.write()

    def mark(self, tstamp, bmarks, uniques, tags):
        """Update the database with some data"""
        timestamp = time.mktime(tstamp.timetuple())
        self.myRRD.bufferValue(int(timestamp), bmarks, uniques, tags)

    def update(self):
        """Update the underlying rrd data"""
        try:
            self.myRRD.update(debug=False)
        except ExternalCommandError, exc:
            print "ERROR", str(exc)
示例#33
0
from pyrrd.rrd import RRD, RRA, DS
from pyrrd.graph import DEF, CDEF, VDEF
from pyrrd.graph import LINE, AREA, GPRINT
from pyrrd.graph import ColorAttributes, Graph

filename = '%s.rrd' % os.path.splitext(os.path.basename(__file__))[0]

# Let's create and RRD file and dump some data in it
dss = []
rras = []
ds1 = DS(dsName='speed', dsType='COUNTER', heartbeat=600)
dss.append(ds1)
rra1 = RRA(cf='AVERAGE', xff=0.5, steps=1, rows=24)
rra2 = RRA(cf='AVERAGE', xff=0.5, steps=6, rows=10)
rras.extend([rra1, rra2])
myRRD = RRD(filename, ds=dss, rra=rras, start=920804400)
myRRD.create()
myRRD.bufferValue('920805600', '12363')
myRRD.bufferValue('920805900', '12363')
myRRD.bufferValue('920806200', '12373')
myRRD.bufferValue('920806500', '12383')
myRRD.bufferValue('920806800', '12393')
myRRD.bufferValue('920807100', '12399')
myRRD.bufferValue('920807400', '12405')
myRRD.bufferValue('920807700', '12411')
myRRD.bufferValue('920808000', '12415')
myRRD.bufferValue('920808300', '12420')
myRRD.bufferValue('920808600', '12422')
myRRD.bufferValue('920808900', '12423')
myRRD.update()
示例#34
0
文件: rrd.py 项目: cmatsuoka/synomon
 def update(self, data):
     ''' Update the RRD '''
     my_rrd = RRD(self._rrd_file)
     my_rrd.bufferValue(time.time(), *data)
     my_rrd.update()
示例#35
0
from pyrrd.graph import DEF, CDEF, VDEF
from pyrrd.graph import LINE, AREA, GPRINT
from pyrrd.graph import ColorAttributes, Graph


filename = '%s.rrd' % os.path.splitext(os.path.basename(__file__))[0]

# Let's create and RRD file and dump some data in it
dss = []
rras = []
ds1 = DS(dsName='speed', dsType='COUNTER', heartbeat=600)
dss.append(ds1)
rra1 = RRA(cf='AVERAGE', xff=0.5, steps=1, rows=24)
rra2 = RRA(cf='AVERAGE', xff=0.5, steps=6, rows=10)
rras.extend([rra1, rra2])
myRRD = RRD(filename, ds=dss, rra=rras, start=920804400)
myRRD.create()
myRRD.bufferValue('920805600', '12363')
myRRD.bufferValue('920805900', '12363')
myRRD.bufferValue('920806200', '12373')
myRRD.bufferValue('920806500', '12383')
myRRD.bufferValue('920806800', '12393')
myRRD.bufferValue('920807100', '12399')
myRRD.bufferValue('920807400', '12405')
myRRD.bufferValue('920807700', '12411')
myRRD.bufferValue('920808000', '12415')
myRRD.bufferValue('920808300', '12420')
myRRD.bufferValue('920808600', '12422')
myRRD.bufferValue('920808900', '12423')
myRRD.update()
示例#36
0
def GenerateGraph():

    data = db.GetDataHumidityRrd(10000)
    #print len(data)

    filename = 'humidity.rrd'
    graphfile = 'humidity.png'
    graphfileLg = 'humidity-large.png'

    day = 24 * 60 * 60
    week = 7 * day
    month = day * 30
    quarter = month * 3
    half = 365 * day / 2
    year = 365 * day

    startTime = data[0][0] -1
    endTime   = data[-1][0]
    step = 1000
    maxSteps = int((endTime-startTime)/step)

    # Let's create and RRD file and dump some data in it
    dss = []
    ds1 = DS(dsName='humidity', dsType='GAUGE', heartbeat=60)
    dss.extend([ds1])

    #week: RA:AVERAGE:0.5:6:336
    #For Daily Graph, every 5 minute average for 24 hours:
    #RRA:AVERAGE:0.5:1:288
    rra1 = RRA(cf='AVERAGE', xff=0.5, steps=1, rows=1440)

    #For Weekly Graph, every 30 minute average for 7 days:
    #RRA:AVERAGE:0.5:6:336
    #rra1 = RRA(cf='AVERAGE', xff=0.5, steps=6, rows=336)

    #For Monthly Graph, every 2 hour average for 30 days:
    #RRA:AVERAGE:0.5:24:360
    #rra1 = RRA(cf='AVERAGE', xff=0.5, steps=32, rows=1080)

    #For Yearly Graph, every 1 day average for 365 days:
    #RRA:AVERAGE:0.5:288:365
    #rra1 = RRA(cf='AVERAGE', xff=0.5, steps=96, rows=365)

    rras = []
    #rra1 = RRA(cf='AVERAGE', xff=0.5, steps=24, rows=1460)
    rras.append(rra1)

    myRRD = RRD(filename, ds=dss, rra=rras, start=startTime)
    myRRD.create()

    # let's generate some data...
    currentTime = startTime
    i = 0
    for row in data:
        timestamp = row[0]
        value1 = row[1]

        # lets update the RRD/purge the buffer ever 100 entires
        i = i + 1
        if i % 100 == 0:
            myRRD.update(debug=False)

        # when you pass more than one value to update buffer like this,
        # they get applied to the DSs in the order that the DSs were
        # "defined" or added to the RRD object.
        myRRD.bufferValue(timestamp, value1)
    # add anything remaining in the buffer
    myRRD.update()

    # Let's set up the objects that will be added to the graph
    def1 = DEF(rrdfile=myRRD.filename, vname='anturi1', dsName=ds1.name)
    vdef1 = VDEF(vname='myavg', rpn='%s,AVERAGE' % def1.vname)
    sensor1 = LINE(defObj=def1, color='#4544FC', legend='anturi1')
    line1 = LINE(defObj=vdef1, color='#01FF13', legend='Average', stack=True)

    # Let's configure some custom colors for the graph
    ca = ColorAttributes()
    ca.back = '#000000'
    ca.canvas = '#000000'
    ca.shadea = '#000000'
    ca.shadeb = '#111111'
    ca.mgrid = '#CCCCCC'
    ca.axis = '#FFFFFF'
    ca.frame = '#AAAAAA'
    ca.font = '#FFFFFF'
    ca.arrow = '#FFFFFF'

    # Now that we've got everything set up, let's make a graph
    #startTime = endTime - 3 * month
    g = Graph(graphfile, start=startTime, end=endTime, vertical_label='kosteus', color=ca)
    g.data.extend([def1, vdef1, sensor1])
    g.write()

    g.filename = graphfileLg
    g.width = 690
    g.height = 300
    g.write()
示例#37
0
class Simulator(object):
    NUM_NODES = 1000
    EVENTS = ["ADDFILE", "DELFILE", "ADDNODE", "DELNODE"]
    RATE_ADDFILE = 1.0 / 10
    RATE_DELFILE = 1.0 / 20
    RATE_ADDNODE = 1.0 / 3000
    RATE_DELNODE = 1.0 / 4000
    P_NODEAVAIL = 1.0

    def __init__(self):
        self.time = 1164783600  # small numbers of seconds since the epoch confuse rrdtool
        self.prevstamptime = int(self.time)

        ds = DataSource(ds_name='utilizationds', ds_type='GAUGE', heartbeat=1)
        rra = RRA(cf='AVERAGE', xff=0.1, steps=1, rows=1200)
        self.rrd = RRD("/tmp/utilization.rrd",
                       ds=[ds],
                       rra=[rra],
                       start=self.time)
        self.rrd.create()

        self.introducer = q = Introducer(self)
        self.all_nodes = [
            Node(randomid(), q, self) for i in range(self.NUM_NODES)
        ]
        q.all_nodes = self.all_nodes
        self.next = []
        self.schedule_events()
        self.verbose = False

        self.added_files = 0
        self.added_data = 0
        self.deleted_files = 0
        self.published_files = []
        self.failed_files = 0
        self.lost_data_bytes = 0  # bytes deleted to make room for new shares

    def stamp_utilization(self, utilization):
        if int(self.time) > (self.prevstamptime + 1):
            self.rrd.bufferValue(self.time, utilization)
            self.prevstamptime = int(self.time)

    def write_graph(self):
        self.rrd.update()
        self.rrd = None
        import gc
        gc.collect()

        def1 = graph.DataDefinition(vname="a",
                                    rrdfile='/tmp/utilization.rrd',
                                    ds_name='utilizationds')
        area1 = graph.Area(value="a",
                           color="#990033",
                           legend='utilizationlegend')
        g = graph.Graph('/tmp/utilization.png',
                        imgformat='PNG',
                        width=540,
                        height=100,
                        vertical_label='utilizationverticallabel',
                        title='utilizationtitle',
                        lower_limit=0)
        g.data.append(def1)
        g.data.append(area1)
        g.write()

    def add_file(self):
        size = random.randrange(1000)
        n = random.choice(self.all_nodes)
        if self.verbose:
            print("add_file(size=%d, from node %s)" % (size, n))
        fileid = randomid()
        able = n.publish_file(fileid, size)
        if able:
            able, tried = able
            self.added_files += 1
            self.added_data += size
            self.published_files.append(tried)
        else:
            self.failed_files += 1

    def lost_data(self, size):
        self.lost_data_bytes += size

    def delete_file(self):
        all_nodes = self.all_nodes[:]
        random.shuffle(all_nodes)
        for n in all_nodes:
            if n.delete_file():
                self.deleted_files += 1
                return
        print("no files to delete")

    def _add_event(self, etype):
        rate = getattr(self, "RATE_" + etype)
        next = self.time + random.expovariate(rate)
        self.next.append((next, etype))
        self.next.sort()

    def schedule_events(self):
        types = set([e[1] for e in self.next])
        for etype in self.EVENTS:
            if not etype in types:
                self._add_event(etype)

    def do_event(self):
        time, etype = self.next.pop(0)
        assert time > self.time
        # current_time = self.time
        self.time = time
        self._add_event(etype)
        if etype == "ADDFILE":
            self.add_file()
        elif etype == "DELFILE":
            self.delete_file()
        elif etype == "ADDNODE":
            pass
            #self.add_node()
        elif etype == "DELNODE":
            #self.del_node()
            pass
        # self.print_stats(current_time, etype)

    def print_stats_header(self):
        print("time:  added   failed   lost  avg_tried")

    def print_stats(self, time, etype):
        if not self.published_files:
            avg_tried = "NONE"
        else:
            avg_tried = sum(self.published_files) / len(self.published_files)
        print(time, etype, self.added_data,
              self.failed_files, self.lost_data_bytes, avg_tried,
              len(self.introducer.living_files), self.introducer.utilization)
示例#38
0
class RRDController(object):

    def __init__(self, rrdfile, static_path):
        
        self.rrdfile = rrdfile
        self.static_path = static_path
        
    def delete(self):
        os.unlink(self.rrdfile)
           
    def create(self):

        if os.path.exists(self.rrdfile):
            self.rrd = RRD(self.rrdfile)
            return
        
        dss = []
        
        ds1 = DS(dsName="requests", dsType="COUNTER",  heartbeat=120, minval=0, maxval=100000000)
        ds2 = DS(dsName="connections", dsType="ABSOLUTE",  heartbeat=120, minval=0, maxval=60000)
        ds3 = DS(dsName="reading", dsType="ABSOLUTE",  heartbeat=120, minval=0, maxval=60000)
        ds4 = DS(dsName="writing", dsType="ABSOLUTE",  heartbeat=120, minval=0, maxval=60000)
        ds5 = DS(dsName="waiting", dsType="ABSOLUTE",  heartbeat=120, minval=0, maxval=60000)
        dss.extend([ds1,ds2,ds3,ds4,ds5])
        
        rras = []
        rra1 = RRA(cf="AVERAGE", xff=0.5, steps=1, rows=2880)    	
        rra2 = RRA(cf="AVERAGE", xff=0.5, steps=30, rows=672)
        rra3 = RRA(cf="AVERAGE", xff=0.5, steps=120, rows=732)
        rra4 = RRA(cf="AVERAGE", xff=0.5, steps=720, rows=1460)
        rras.extend([rra1, rra2, rra3, rra4])
        
        self.rrd = RRD(self.rrdfile, step=60, ds=dss, rra=rras)
        self.rrd.create(debug=False)
        time.sleep(2)
        
    def update(self, connections, requests, reading, writing, waiting):
        self.rrd.bufferValue("%d:%d:%d:%d:%d:%d" % (time.time(),connections, requests, reading, writing, waiting))
        self.rrd.update(template="connections:requests:reading:writing:waiting", debug=True)
    
    def graph_request(self, period='day'):
        def1 = DEF(rrdfile=self.rrdfile, vname='request', dsName="requests", cdef="AVERAGE")
        
        vdef1 = VDEF(vname='max', rpn='request,MAXIMUM')
        vdef2 = VDEF(vname='avg', rpn='request,AVERAGE')
        vdef3 = VDEF(vname='last', rpn='request,LAST')
        
        area1 = AREA(defObj=def1, color='#336600', legend='Requests')
        gprint1 = GPRINT(vdef1, "Max\\: %5.1lf %S")
        gprint2 = GPRINT(vdef2, "Avg\\: %5.1lf %S")
        gprint3 = GPRINT(vdef3, "Current\\: %5.1lf %Sreq/sec")
        
        ca = ColorAttributes()
        ca.back = '#333333'
        ca.canvas = '#333333'
        ca.shadea = '#000000'
        ca.shadeb = '#111111'
        ca.mgrid = '#CCCCCC'
        ca.axis = '#FFFFFF'
        ca.frame = '#AAAAAA'
        ca.font = '#FFFFFF'
        ca.arrow = '#FFFFFF'
        
        img = "request-%s.png" % period
        imgname = self.static_path +"/"+ img
        start = '-1'+period
        
        g = Graph(imgname, imgformat='PNG', step=start, vertical_label='request/sec', color=ca, width=700, height=150)
        g.data.extend([def1, vdef1, vdef2, vdef3, area1, gprint1, gprint2, gprint3])
        g.write()

    def graph_connection(self, period='day'):
        def1 = DEF(rrdfile=self.rrdfile, vname='connections', dsName="connections", cdef="AVERAGE")
        def2 = DEF(rrdfile=self.rrdfile, vname='reading', dsName="reading", cdef="AVERAGE")
        def3 = DEF(rrdfile=self.rrdfile, vname='writing', dsName="writing", cdef="AVERAGE")
        def4 = DEF(rrdfile=self.rrdfile, vname='waiting', dsName="waiting", cdef="AVERAGE")

        # TOTAL
        vdef1 = VDEF(vname='max', rpn='connections,MAXIMUM')
        vdef2 = VDEF(vname='avg', rpn='connections,AVERAGE')
        vdef3 = VDEF(vname='last', rpn='connections,LAST')
        vdef4 = VDEF(vname='min', rpn='connections,MINIMUM')

        line1 = LINE(1, defObj=def1, color='#22FF22', legend='Total')
        gprint1 = GPRINT(vdef1, "Max\\: %5.1lf %S")
        gprint2 = GPRINT(vdef2, "Avg\\: %5.1lf %S")
        gprint3 = GPRINT(vdef3, "Current\\: %5.1lf %S")
        gprint4 = GPRINT(vdef4, "Min\\: %5.1lf %S\\n")

        # READING
        reading_vdef1 = VDEF(vname='rmax', rpn='reading,MAXIMUM')
        reading_vdef2 = VDEF(vname='ravg', rpn='reading,AVERAGE')
        reading_vdef3 = VDEF(vname='rlast', rpn='reading,LAST')
        reading_vdef4 = VDEF(vname='rmin', rpn='reading,MINIMUM')

        line2 = LINE(1, defObj=def2, color='#0022FF', legend='Reading')
        reading_gprint1 = GPRINT(reading_vdef1, "Max\\: %5.1lf %S")
        reading_gprint2 = GPRINT(reading_vdef2, "Avg\\: %5.1lf %S")
        reading_gprint3 = GPRINT(reading_vdef3, "Current\\: %5.1lf %S")
        reading_gprint4 = GPRINT(reading_vdef4, "Min\\: %5.1lf %S\\n")

        # writing
        writing_vdef1 = VDEF(vname='wmax', rpn='writing,MAXIMUM')
        writing_vdef2 = VDEF(vname='wavg', rpn='writing,AVERAGE')
        writing_vdef3 = VDEF(vname='wlast', rpn='writing,LAST')
        writing_vdef4 = VDEF(vname='wmin', rpn='writing,MINIMUM')

        line3 = LINE(1, defObj=def3, color='#FF0000', legend='Writing')
        writing_gprint1 = GPRINT(writing_vdef1, "Max\\: %5.1lf %S")
        writing_gprint2 = GPRINT(writing_vdef2, "Avg\\: %5.1lf %S")
        writing_gprint3 = GPRINT(writing_vdef3, "Current\\: %5.1lf %S")
        writing_gprint4 = GPRINT(writing_vdef4, "Min\\: %5.1lf %S\\n")

        # WAITING
        waiting_vdef1 = VDEF(vname='wamax', rpn='waiting,MAXIMUM')
        waiting_vdef2 = VDEF(vname='waavg', rpn='waiting,AVERAGE')
        waiting_vdef3 = VDEF(vname='walast', rpn='waiting,LAST')
        waiting_vdef4 = VDEF(vname='wamin', rpn='waiting,MINIMUM')

        line4 = LINE(1, defObj=def4, color='#00AAAA', legend='Waiting')
        waiting_gprint1 = GPRINT(waiting_vdef1, "Max\\: %5.1lf %S")
        waiting_gprint2 = GPRINT(waiting_vdef2, "Avg\\: %5.1lf %S")
        waiting_gprint3 = GPRINT(waiting_vdef3, "Current\\: %5.1lf %S")
        waiting_gprint4 = GPRINT(waiting_vdef4, "Min\\: %5.1lf %S\\n")

        ca = ColorAttributes()
        ca.back = '#333333'
        ca.canvas = '#333333'
        ca.shadea = '#000000'
        ca.shadeb = '#111111'
        ca.mgrid = '#CCCCCC'
        ca.axis = '#FFFFFF'
        ca.frame = '#AAAAAA'
        ca.font = '#FFFFFF'
        ca.arrow = '#FFFFFF'

        img = "connection-%s.png" % period
        imgname = self.static_path +"/"+ img
        start = '-1'+period

        g = Graph(imgname, imgformat='PNG', step=start, vertical_label='connections', color=ca, width=700, height=150)
        g.data.extend([def1, vdef1, vdef2, vdef3, vdef4, line1, gprint1, gprint2, gprint3, gprint4])
        g.data.extend([def2, reading_vdef1, reading_vdef2, reading_vdef3, reading_vdef4, line2, reading_gprint1, reading_gprint2, reading_gprint3, reading_gprint4])
        g.data.extend([def3, writing_vdef1, writing_vdef2, writing_vdef3, writing_vdef4, line3, writing_gprint1, writing_gprint2, writing_gprint3, writing_gprint4])
        g.data.extend([def4, waiting_vdef1, waiting_vdef2, waiting_vdef3, waiting_vdef4, line4, waiting_gprint1, waiting_gprint2, waiting_gprint3, waiting_gprint4])
        g.write()
		
    def graph(self, period='day'):
        
        self.graph_request(period)
        self.graph_connection(period)