Exemple #1
0
class VMStat(TimeSeriesCollection):
    file_hdr_line = line(
        "^procs -----------memory---------- ---swap-- -----io---- -system-- ----cpu----$",
        [])
    stats_hdr_line = line(
        "^ r  b   swpd   free   buff  cache   si   so    bi    bo   in   cs us sy id wa$",
        [])
    stats_line = line("\s+(\d+)" * 16, [('r', 'd'), ('b', 'd'), ('swpd', 'd'),
                                        ('free', 'd'), ('buff', 'd'),
                                        ('cache', 'd'), ('si', 'd'),
                                        ('so', 'd'), ('bi', 'd'), ('bo', 'd'),
                                        ('in', 'd'), ('cs', 'd'), ('us', 'd'),
                                        ('sy', 'd'), ('id', 'd'), ('wa', 'd')])

    def parse(self, data):
        res = default_empty_timeseries_dict()
        data.reverse()
        while True:
            m = until(self.file_hdr_line, data)
            if m == False:
                break
            m = take(self.stats_hdr_line, data)
            assert m != False
            m = take_while([self.stats_line], data)
            for stat_line in m:
                for val in stat_line.iteritems():
                    res[val[0]] += [val[1]]
        return res
Exemple #2
0
    def __init__(self, o, w, h):
        super().__init__(o, w, h)
        left = line(o, (o.x, o.y + h))
        bottom = line(o, (o.x + w, o.y))
        hypotenuse = line((o.x, o.y + h), (o.x + w, o.y))

        self.sides.append(left)  #left sides
        self.sides.append(bottom)  #bottome sides
        self.sides.append(hypotenuse)  #left sides
 def __init__(self,o,w,h):
     super().__init__(o,w,h)
     left = line(o,(o.x,o.y+h))
     bottom = line(o,(o.x+w,o.y))
     hypotenuse = line((o.x,o.y+h),(o.x+w,o.y))   
     
     self.sides.append(left) #left sides
     self.sides.append(bottom)#bottome sides
     self.sides.append(hypotenuse) #left sides 
    def __init__(self,o,w,h):
        super().__init__(o,w,h)
        left = line(o,(o.x,o.y+h))
        bottom = line(o,(o.x+w,o.y))
        right = line((o.x+w,o.y),(o.x+w,o.y+h))
        top = line((o.x,o.y+h),(o.x+w,o.y+h))

        self.sides.append(left) #left sides
        self.sides.append(bottom)#bottome sides
        self.sides.append(right) #left sides
        self.sides.append(top)#bottome sides       
Exemple #5
0
    def __init__(self, o, w, h):
        super().__init__(o, w, h)
        left = line(o, (o.x, o.y + h))
        bottom = line(o, (o.x + w, o.y))
        right = line((o.x + w, o.y), (o.x + w, o.y + h))
        top = line((o.x, o.y + h), (o.x + w, o.y + h))

        self.sides.append(left)  #left sides
        self.sides.append(bottom)  #bottome sides
        self.sides.append(right)  #left sides
        self.sides.append(top)  #bottome sides
Exemple #6
0
def perp_distance(point, line):
    """
    Returns the perpendicular offset distance between a given point (x, y)
    and a line evaluated at the x value of the point.
    """

    return (point.y - line(point.x)) / math.sqrt(1.0 + line.slope**2)
Exemple #7
0
def distance(point, line):
    """
    Returns the delta y offset distance between a given point (x, y) and a
    line evaluated at the x value of the point.
    """

    return point.y - line(point.x)
Exemple #8
0
def perp_distance(point, line):
    
    """
    Returns the perpendicular offset distance between a given point (x, y)
    and a line evaluated at the x value of the point.
    """
    
    return (point.y - line(point.x)) / math.sqrt(1.0 + line.slope ** 2)
Exemple #9
0
class QPS(TimeSeriesCollection):
    line = line("(\d+)\s+([\d]+)\n", [('tick', 'd'), ('qps', 'f')])

    def parse(self, data):
        res = default_empty_timeseries_dict()
        for line in data:
            res['qps'] += [self.line.parse_line(line)['qps']]
        return res
Exemple #10
0
def distance(point, line):
    
    """
    Returns the delta y offset distance between a given point (x, y) and a
    line evaluated at the x value of the point.
    """
    
    return point.y - line(point.x)
Exemple #11
0
class IOStat(TimeSeriesCollection):
    file_hdr_line = line("Linux.*", [])
    avg_cpu_hdr_line = line(
        "^avg-cpu:  %user   %nice %system %iowait  %steal   %idle$", [])
    avg_cpu_line = line("^" + "\s+([\d\.]+)" * 6 + "$", [('user', 'f'),
                                                         ('nice', 'f'),
                                                         ('system', 'f'),
                                                         ('iowait', 'f'),
                                                         ('steal', 'f'),
                                                         ('idle', 'f')])
    dev_hdr_line = line(
        "^Device:            tps   Blk_read/s   Blk_wrtn/s   Blk_read   Blk_wrtn$",
        [])
    dev_line = line(
        "^(\w+)\s+([\d\.]+)\s+([\d\.]+)\s+([\d\.]+)\s+(\d+)\s+(\d+)$",
        [('device', 's'), ('tps', 'f'), (' Blk_read', 'f'), (' Blk_wrtn', 'f'),
         (' Blk_read', 'd'), (' Blk_wrtn', 'd')])

    def parse(self, data):
        res = default_empty_timeseries_dict()
        data.reverse()
        m = until(self.file_hdr_line, data)
        assert m != False
        while True:
            m = until(self.avg_cpu_hdr_line, data)
            if m == False:
                break

            m = take(self.avg_cpu_line, data)
            assert m
            for val in m.iteritems():
                res['cpu_' + val[0]] += [val[1]]

            m = until(self.dev_hdr_line, data)
            assert m != False

            m = take_while([self.dev_line], data)
            for device in m:
                dev_name = device.pop('device')
                for val in device.iteritems():
                    res['dev:' + dev_name + '_' + val[0]] += [val[1]]

        return res
Exemple #12
0
 def parse_client_meta(self, data):
     client_line = line(
         '\[host: [\d\.]+, port: \d+, clients: \d+, load: (\d+)/(\d+)/(\d+)/(\d+), keys: \d+-\d+, values: \d+-\d+ , duration: (\d+), batch factor: \d+-\d+, latency file: latency.txt, QPS file: qps.txt\]',
         [('deletes', 'd'), ('updates', 'd'), ('inserts', 'd'),
          ('reads', 'd'), ('duration', 'd')])
     m = until(client_line, data)
     assert m != False
     return "D/U/I/R = %d/%d/%d/%d Duration = %d" % (
         m['deletes'], m['updates'], m['inserts'], m['reads'],
         m['duration'])
Exemple #13
0
class RunStats(TimeSeriesCollection):
    name_line = line("^" + "([\#\d_]+)[\t\n]+" * ncolumns,
                     [('col%d' % i, 's') for i in range(ncolumns)])
    data_line = line("^" + "(\d+)[\t\n]+" * ncolumns,
                     [('col%d' % i, 'd') for i in range(ncolumns)])

    def parse(self, data):
        res = default_empty_timeseries_dict()

        data.reverse()

        m = take(self.name_line, data)
        assert m
        col_names = m

        m = take_while([self.data_line], data)
        for line in m:
            for col in line.iteritems():
                res[col_names[col[0]]] += [col[1]]

        return res
Exemple #14
0
	def append_line (self, line_number, new_line) :

		continuation_line = False
		new_line = new_line.rstrip(' \n').replace('\t', '')
		
		## is it a comment or empty line?
		if new_line == "" or new_line[0] in ["C", "c", "*"] or new_line.lstrip(' ')[0] == "!" :
			return

		# is this a continuation line ?
		# F90-style
		if self.continuation_pending :
			if new_line.lstrip (" ")[0] == "&" :
				new_line = new_line.lstrip (" ")[1:]
			continuation_line = True
			self.continuation_pending = False
		# F77 style:
		# we interpret it as continuation only if there is neither a space nor a letter
		# in column 6 to prevent trouble with free form source starting in column 6
		# If anybody used a letter in column 6 as continuation marker, the code
		# is pretty f****d up anyway 
		if new_line[:5].isspace () and not new_line[5].isalpha () and not new_line[5].isspace () :
			new_line = new_line[6:]
			continuation_line = True

		# continuation pending ?
		if len(new_line) > 0 and new_line[-1] == '&' :
			new_line = new_line[:-1]
			self.continuation_pending = True

		if not new_line == "" :
			if continuation_line :  
				self.lines[-1].continue_line (line_number, new_line)
			else:
				if not self.lines == [] :
                                        try:
                                                self.lines[-1].line_is_complete ()
                                        except error.FortranException, ex:
                                                if self.ignore_errors :
                                                        print str(ex)
                                                        print "Error is ignored"
                                                        del self.lines[-1]
                                                else :
                                                        raise ex

				self.lines.append ( line (line_number, new_line) )
def createNewline(self, wherex, wherey, screenCoordinates=1):
    self.fromClass = None
    self.toClass = None
    # try the global constraints...
    res = self.ASGroot.preCondition(ASG.CREATE)
    if res:
        self.constraintViolation(res)
        self.mode = self.IDLEMODE
        return

    new_semantic_obj = line(self)
    ne = len(self.ASGroot.listNodes["line"])
    if new_semantic_obj.keyword_:
        new_semantic_obj.keyword_.setValue(
            new_semantic_obj.keyword_.toString() + str(ne))
    if screenCoordinates:
        new_obj = graph_line(self.UMLmodel.canvasx(wherex),
                             self.UMLmodel.canvasy(wherey), new_semantic_obj)
    else:  # already in canvas coordinates
        new_obj = graph_line(wherex, wherey, new_semantic_obj)
    new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
    self.UMLmodel.addtag_withtag("line", new_obj.tag)
    new_semantic_obj.graphObject_ = new_obj
    self.ASGroot.addNode(new_semantic_obj)
    res = self.ASGroot.postCondition(ASG.CREATE)
    if res:
        self.constraintViolation(res)
        self.mode = self.IDLEMODE
        return

    res = new_semantic_obj.postCondition(ASGNode.CREATE)
    if res:
        self.constraintViolation(res)
        self.mode = self.IDLEMODE
        return

    self.mode = self.IDLEMODE
    if self.editGGLabel:
        self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
    else:
        self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
    return new_semantic_obj
Exemple #16
0
class default_empty_timeseries_dict(dict):
    units_line = line("(\w+)\[(\w+)\]", [('key', 's'), ('units', 's')])

    def __setitem__(self, key, item):
        m = self.units_line.parse_line(key)
        if m:
            key = m['key']
            units = m['units']
        dict.__setitem__(self, key, item)

    def __getitem__(self, key):
        m = self.units_line.parse_line(key)
        if m:
            key = m['key']
            units = m['units']
        else:
            units = ''
        if key in self:
            return self.get(key)
        else:
            return TimeSeries(units)

    def copy(self):
        copy = default_empty_timeseries_dict()
        copy.update(self)
        return copy

    # Hack to enforce a specific ordering of RethinkDB and competitors
    def iteritems(self):
        sorted_keys = self.keys()
        sorted_keys.sort()
        sorted_keys.reverse()
        result = []
        # Put RethinkDB to the beginning
        if 'RethinkDB' in self:
            result.append(('RethinkDB', self['RethinkDB']))
        for x in sorted_keys:
            if x != 'RethinkDB':
                result.append((x, self[x]))

        return iter(result)
def createNewline(self, wherex, wherey, screenCoordinates = 1):
   self.fromClass = None
   self.toClass = None
   # try the global constraints...
   res = self.ASGroot.preCondition(ASG.CREATE)
   if res:
      self.constraintViolation(res)
      self.mode=self.IDLEMODE
      return

   new_semantic_obj = line(self)
   ne = len(self.ASGroot.listNodes["line"])
   if new_semantic_obj.keyword_:
      new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
   if screenCoordinates:
      new_obj = graph_line(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
   else: # already in canvas coordinates
      new_obj = graph_line(wherex, wherey, new_semantic_obj)
   new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
   self.UMLmodel.addtag_withtag("line", new_obj.tag)
   new_semantic_obj.graphObject_ = new_obj
   self.ASGroot.addNode(new_semantic_obj)
   res = self.ASGroot.postCondition(ASG.CREATE)
   if res:
      self.constraintViolation(res)
      self.mode=self.IDLEMODE
      return

   res = new_semantic_obj.postCondition(ASGNode.CREATE)
   if res:
      self.constraintViolation(res)
      self.mode=self.IDLEMODE
      return

   self.mode=self.IDLEMODE
   if self.editGGLabel :
      self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
   else:
      self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
   return new_semantic_obj
Exemple #18
0
class RDBStat():
    int_line = line("^STAT\s+([\w\[\]]+)\s+(\d+|-)$", [('name', 's'),
                                                       ('value', 'd')])
    flt_line = line("^STAT\s+([\w\[\]]+)\s+([\d\.]+|-)$", [('name', 's'),
                                                           ('value', 'f')])
    str_line = line("^STAT\s+([\w\[\]]+)\s+(.+)$", [('name', 's'),
                                                    ('value', 's')])
    end_line = line("END", [])

    def __init__(self, addrinfo, limits={}, interval=1, stats_callback=None):
        self.socket = socket.socket()
        self.socket.connect(addrinfo)
        self.socket.setblocking(True)
        self.limits = limits
        self.interval = interval
        self.stats_callback = stats_callback  # we pass a dictionary with stats whenever we get the new stats
        self.last_stats = {}
        self.limits = None
        self.thr = None

    def check(self, limits=None):
        if limits:
            self.limits = limits

        # assert self.limits

        #send the stat request
        self.socket.send("rdb stats\r\n")

        #recv the response
        data = ''
        while True:
            try:
                if re.search("END", data):
                    break
                new_data = self.socket.recv(1000)
                if len(new_data) == 0:
                    self.keep_going = False
                    return
                data += new_data
            except:
                self.keep_going = False
                return

        assert data  #make sure the server actually gave us back something

        data = data.splitlines()
        data.reverse()

        matches = take_while([self.int_line, self.flt_line, self.str_line],
                             data)
        if not matches:
            return

        stat_dict = {}
        for stat in matches:
            stat_dict[stat["name"]] = stat["value"]

        self.last_stats = stat_dict
        if self.stats_callback:
            self.stats_callback(stat_dict)

        for name, acceptable_range in self.limits.iteritems():
            if not stat_dict[name] in acceptable_range:
                raise (StatError(name, stat_dict[name], acceptable_range))

    def run(self):
        self.keep_going = True
        time.sleep(5)
        try:
            while self.keep_going:
                self.check()
                time.sleep(self.interval)
        finally:
            self.keep_going = False
            try:
                self.socket.close()
            except:
                pass

    def start(self):
        self.thr = threading.Thread(target=self.run)
        self.thr.start()

    def stop(self):
        self.keep_going = False
        if self.thr and self.thr.is_alive():
            self.thr.join()
Exemple #19
0
 def parse_server_meta(self, data):
     threads_line = line('Number of DB threads: (\d+)', [('threads', 'd')])
     m = until(threads_line, data)
     assert m != False
     return "Threads: %d" % m['threads']
Exemple #20
0
 def parse_server_meta(self, data):
     threads_line = line('Number of DB threads: (\d+)',
                         [('threads', 'd')])
     m = until(threads_line, data)
     assert m != False
     return "Threads: %d" % m['threads']
Exemple #21
0
    else:
        return prefix + value_padding + str(num).zfill(6)


def sock_readline(sock_file):
    ls = []
    while True:
        l = sock_file.readline()
        ls.append(l)
        if len(l) >= 2 and l[-2:] == '\r\n':
            break
    return ''.join(ls)


value_line = line("^VALUE\s+([^\s]+)\s+(\d+)\s+(\d+)\r\n$", [('key', 's'),
                                                             ('flags', 'd'),
                                                             ('length', 'd')])


def is_sorted_output(kvs):
    k = None
    for kv in kvs:
        if not k:
            k = kv['key']
            continue

        if k >= kv['key']:
            return False

        k = kv['key']
    return True
Exemple #22
0
    if num % 5 == 4:
        return prefix + large_value_padding + str(num).zfill(6)
    else:
        return prefix + value_padding + str(num).zfill(6)


def sock_readline(sock_file):
    ls = []
    while True:
        l = sock_file.readline()
        ls.append(l)
        if len(l) >= 2 and l[-2:] == '\r\n':
            break
    return ''.join(ls)

value_line = line("^VALUE\s+([^\s]+)\s+(\d+)\s+(\d+)\r\n$", [('key', 's'), ('flags', 'd'), ('length', 'd')])

def get_results(s):
    res = []

    f = s.makefile()
    while True:
        l = sock_readline(f)
        if l == 'END\r\n':
            break
        val_def = value_line.parse_line(l)
        if not val_def:
            raise ValueError("received unexpected line from rget: %s" % l)
        val = sock_readline(f).rstrip()
        if len(val) != val_def['length']:
            raise ValueError("received value of unexpected length (expected %d, got %d: '%s')" % (val_def['length'], len(val), val))
Exemple #23
0
 def parse_client_meta(self, data):
     client_line = line('\[host: [\d\.]+, port: \d+, clients: \d+, load: (\d+)/(\d+)/(\d+)/(\d+), keys: \d+-\d+, values: \d+-\d+ , duration: (\d+), batch factor: \d+-\d+, latency file: latency.txt, QPS file: qps.txt\]', [('deletes', 'd'), ('updates', 'd'), ('inserts', 'd'), ('reads', 'd'), ('duration', 'd')])
     m = until(client_line, data) 
     assert m != False
     return "D/U/I/R = %d/%d/%d/%d Duration = %d" % (m['deletes'], m['updates'], m['inserts'], m['reads'], m['duration'])
Exemple #24
0
class RDBStats(TimeSeriesCollection):
    int_line = line("^STAT\s+([\w\[\]]+)\s+(\d+|-)\r$", [('name', 's'),
                                                         ('value', 'd')])
    flt_line = line("^STAT\s+([\w\[\]]+)\s+([\d\.]+|-)\r$", [('name', 's'),
                                                             ('value', 'f')])
    str_line = line("^STAT\s+([\w\[\]]+)\s+(.+|-)\r$", [('name', 's'),
                                                        ('value', 's')])
    end_line = line("END", [])

    def parse(self, data):
        res = default_empty_timeseries_dict()
        data.reverse()

        while True:
            m = take_while([self.int_line, self.flt_line, self.str_line], data)
            if not m:
                break
            for match in m:
                res[match['name']] += [match['value']]

            m = take(self.end_line, data)
            if m == False:  # Incomplete stats, might happen if monitor was killes while retrieving stats response
                break

            if res:
                lens = map(lambda x: len(x[1]), res.iteritems())
                assert max(lens) == min(lens)
        return res

    def process(self):
        differences = [('io_reads_completed', 'io_reads_started'),
                       ('io_writes_started', 'io_writes_completed')]
        ratios = [('conns_reading_total', 'conns_total'),
                  ('conns_writing_total', 'conns_total'),
                  ('blocks_dirty', 'blocks_total'),
                  ('blocks_in_memory', 'blocks_total'),
                  ('serializer_old_garbage_blocks',
                   'serializer_old_total_blocks')]

        slides = [('io_writes_started', 'io_writes_completed')]
        #        differences = [('io_reads_completed', 'io_reads_started'),
        #                       ('io_writes_started', 'io_writes_completed'),
        #                       ('transactions_started', 'transactions_ready'),
        #                       ('transactions_ready', 'transactions_completed'),
        #                       ('bufs_acquired', 'bufs_ready'),
        #                       ('bufs_ready', 'bufs_released')]
        #        ratios = [('conns_in_btree_incomplete', 'conns_total'),
        #                  ('conns_in_outstanding_data', 'conns_total'),
        #                  ('conns_in_socket_connected', 'conns_total'),
        #                  ('conns_in_socket_recv_incomplete', 'conns_total'),
        #                  ('conns_in_socket_send_incomplete', 'conns_total'),
        #                  ('blocks_dirty', 'blocks_total'),
        #                  ('blocks_in_memory', 'blocks_total'),
        #                  ('serializer_old_garbage_blocks',  'serializer_old_total_blocks')]
        #
        #        slides = [('flushes_started', 'flushes_acquired_lock'),
        #                  ('flushes_acquired_lock', 'flushes_completed'),
        #                  ('io_writes_started', 'io_writes_completed')]
        keys_to_drop = set()
        for dif in differences:
            self.derive(dif[0] + ' - ' + dif[1], dif, difference)
            keys_to_drop.add(dif[0])
            keys_to_drop.add(dif[1])

        for rat in ratios:
            self.derive(rat[0] + ' / ' + rat[1], rat, ratio)
            keys_to_drop.add(rat[0])

        self.remap(
            'serializer_old_garbage_blocks / serializer_old_total_blocks',
            'garbage_ratio')

        for s in slides:
            self.derive('slide(' + s[0] + ', ' + s[1] + ')', s, slide)
            keys_to_drop.add(s[0])
            keys_to_drop.add(s[1])

        self.drop(keys_to_drop)