Esempio n. 1
0
def isVMDead(hostname):
    #assuming that it is a storage VM
    dead=False
    path_id=get_host_path_id(hostname)
    try:
        #try two files
        rrd_file=config['rrd_path_vms_prefix']+"/"+path_id+"/"+config['vm_bytes_out_filename']
        dead=((long(time.time())-(rrdtool.info(rrd_file)["last_update"]))>DEATH_TIME)
        if not dead:
            rrd_file=config['rrd_path_vms_prefix']+"/"+path_id+"/"+config['load_one_filename']
            dead=((long(time.time())-(rrdtool.info(rrd_file)["last_update"]))>DEATH_TIME)
    except:
        dead=True
    if dead:
        return True
    else:
        #supposing now that it is a workload VM
        try:
            rrd_file=config['rrd_path_workload_hosts_prefix']+"/"+path_id+"/"+config['slo_throughput_filename']
            return ((long(time.time())-(rrdtool.info(rrd_file)["last_update"]))>DEATH_TIME)
        except:
            dead=True
    if dead:
        return True
    else:
        #supposing now that it is a monitor VM
        try:
            rrd_file=config['rrd_path_monitor_prefix']+"/"+path_id+"/"+config['load_one_filename']
            return ((long(time.time())-(rrdtool.info(rrd_file)["last_update"]))>DEATH_TIME)
        except:
            return True
Esempio n. 2
0
File: rrd.py Progetto: pombreda/MoaT
	def run(self,ctx,**k):
		event = self.params(ctx)
		if len(event) < 3:
			raise SyntaxError(u'Usage: var rrd ‹variable› ‹item› ‹name…›')
		s = RRDs[Name(*event[2:])]
		try:
			setattr(self.parent.ctx,event[0],rrdtool.info(s.upath)["ds"][s.dataset][event[1]])
		except KeyError:
			setattr(self.parent.ctx,event[0],rrdtool.info(s.upath)["ds[%s].%s" % (s.dataset,event[1])])
Esempio n. 3
0
def setupRRD(filename, data_sources, consolidation="AVERAGE"):
    step = 20
    try:
        RRD.info(filename)
    except RRD.error:
        RRD.create(filename, '--start', str(long(time.time())), '--step',
                   str(step), data_sources,
                   'RRA:%s:0.5:1:8640' % (consolidation),
                   'RRA:%s:0.5:3:10080' % (consolidation),
                   'RRA:%s:0.5:180:43830' % (consolidation))
Esempio n. 4
0
 def run(self, ctx, **k):
     event = self.params(ctx)
     if len(event) < 3:
         raise SyntaxError(u'Usage: var rrd ‹variable› ‹item› ‹name…›')
     s = RRDs[Name(*event[2:])]
     try:
         setattr(self.parent.ctx, event[0],
                 rrdtool.info(s.upath)["ds"][s.dataset][event[1]])
     except KeyError:
         setattr(self.parent.ctx, event[0],
                 rrdtool.info(s.upath)["ds[%s].%s" % (s.dataset, event[1])])
Esempio n. 5
0
    def exists(self):
        try:
            f = open(self.rrd_file)
        except FileNotFoundError:
            return False

        f.close()
        try:
            rrdtool.info(self.rrd_file)
        except rrdtool.OperationalError as err:
            raise FileNotFoundError:
Esempio n. 6
0
def setupRRD(filename, data_sources, consolidation="AVERAGE"):
    step = 20
    try:
        RRD.info(filename)
    except RRD.error:
        RRD.create( filename,
                    '--start', str(long(time.time())),
                    '--step', str(step),
                    data_sources,
                    'RRA:%s:0.5:1:8640' % (consolidation),
                    'RRA:%s:0.5:3:10080' % (consolidation),
                    'RRA:%s:0.5:180:43830' % (consolidation))
    def __init__(self, filename, check_type=True):
        "Initialize the class instance with a filename."

        if not os.access(filename, os.F_OK | os.R_OK):
            raise rrdtool.OperationalError('RRD {!s} cannot be opened.' \
              .format(filename))

        # Use rrdinfo to test whether the file is a valid RRD file
        if check_type is True:
            rrdtool.info(filename)

        self.readonly = not os.access(filename, os.W_OK)
        self.filename = filename
Esempio n. 8
0
 def list(self):
     yield super(RRD, self)
     yield ("file", self.path)
     yield ("dataset", self.dataset)
     try:
         for k, v in rrdtool.info(self.upath)["ds"][self.udataset].items():
             yield (k, v)
     except KeyError:
         s = "ds[%s]." % (self.udataset)
         d = rrdtool.info(self.upath)
         # mainly for testing
         for k in sorted(x for x in d.keys() if x.startswith(s)):
             yield (k[len(s):], d[k])
Esempio n. 9
0
File: rrd.py Progetto: pombreda/MoaT
	def list(self):
		yield ("name",self.name)
		yield ("file",self.path)
		yield ("dataset",self.dataset)
		try:
			for k,v in rrdtool.info(self.upath)["ds"][self.udataset].iteritems():
				yield (k,v)
		except KeyError:
			s="ds[%s]." % (self.udataset)
			d=rrdtool.info(self.upath)
			# mainly for testing
			for k in sorted(x for x in d.keys() if x.startswith(s)):
				yield (k[len(s):],d[k])
Esempio n. 10
0
def show_widget_with_current_and_past_value(widget):
    rrd_path = widget.rrd.path()
    info = rrdtool.info(rrd_path)
    #last_update = datetime.datetime.fromtimestamp(info["last_update"]).strftime("%m-%d %H:%M")
    last_update = str(info["last_update"])
    
    current = rrdtool.fetch(rrd_path, "-s", last_update + "-1", "-e", "s+0", "LAST")  
    yesterday = rrdtool.fetch(rrd_path, "-s", last_update + "-1d", "-e", "s+1", "LAST")
    lastweek = rrdtool.fetch(rrd_path, "-s", last_update + "-1w", "-e", "s+1", "LAST")
    
    current_value = current[2][0][0]
    field_def = None
    if widget.data_def:
        try:
            data_def = eval(widget.data_def.replace("\n", "").replace("\r", ""))
            ds = (current[1][0])
            if data_def.has_key(ds):
                field_def = data_def[ds]
                try:
                    data_rrd = rrdtool.fetch(rrd_path, "-s", str(int(last_update)-int(field_def[3]) * 60), "-e", last_update + "-1", "LAST")
                    data_rrd = map(lambda x:x[0],data_rrd[2])
                    current_value = format_value_def(field_def,data_rrd)
                except:
                    current_value = format_value(field_def, current_value)
            else:
                current_value = get_last_value(current)
        except:
            raise
            return widget.data_def
    else:
        current_value = get_last_value(current)
    
    return "<td>" + "</td><td>".join([current_value, get_last_value(yesterday, field_def) , get_last_value(lastweek, field_def)]) + "</td>"
Esempio n. 11
0
def show_widget_with_current_value(widget):
    rrd_path = widget.rrd.path()
    info = rrdtool.info(rrd_path)
    last_update = str(info["last_update"])
    
    current = rrdtool.fetch(rrd_path, "-s", last_update + "-1", "-e", "s+0", "LAST")
    
    if widget.data_def:
        try:
            data_def = eval(widget.data_def.replace("\n", "").replace("\r", ""))
            data = list(current[2][0])
            ds = current[1]
            for i in range(0, len(ds)):
                if data_def.has_key(ds[i]):
                    field_def = data_def[ds[i]]
                    try:
                        data_rrd = rrdtool.fetch(rrd_path, "-s", str(int(last_update)-int(field_def[3]) * 60), "-e", last_update + "-1", "LAST")
                        data_rrd = map(lambda x:x[i],data_rrd[2])
                        data[i] = format_value_def(field_def,data_rrd)
                    except:
                        data[i] = format_value(field_def, data[i])
                else:
                    data[i] = str(data[i])
        except:
            return widget.data_def
            #raise
    else:
        data = map(str, current[2][0])
    
    #return check_date(info) + get_last_value(current) + "</td>"
    
    return "<td>" + "</td><td>".join(data) + "</td>"
Esempio n. 12
0
    def getPeriodCF(self, start):
        """
        Retourne la fonction de calcul à appliquer
        pour la période commençant à la date donnée.

        @param start: Date de début de la période considérée.
        @type start: C{int}
        @return: Fonction de calcul à utiliser ("LAST" ou "AVERAGE").
        @rtype: C{str}
        """
        infos = rrdtool.info(self.filename)
        step = infos['step']
        now = time.mktime(datetime.datetime.utcnow().timetuple())
        i = 0
        rras = []
        while ('rra[%d].cf' % i) in infos:
            # Date de la 1ère valeur possible dans ce RRA.
            first = now - infos['rra[%d].rows' % i] * step
            # Si la date demandée n'appartient pas
            # à ce RRA, on l'ignore.
            if start < first:
                i += 1
                continue

            rras.append({
                'cf': infos['rra[%d].cf' % i],
                'first': first,
            })
            i += 1
        # On trie les RRA restants par granularité décroissante.
        rras.sort(cmp=lambda a, b: int(b['first'] - a['first']))
        if not rras:
            return "AVERAGE"
        return rras[0]['cf']
Esempio n. 13
0
def rrd_info(rrd_file, raw=False):
    """ 
    Intended use is from shell. If you want the whole dict returned by
    rrdtool.info, set raw to true.
    """
    file_info = rrdtool.info(rrd_file)

    if raw:
        return file_info

    if not file_info.has_key('ds'):
        #=======================================================================
        # In version 1.3 the output from info is totally different. We just 
        # print a key/value output.
        #=======================================================================
        for key in sorted(file_info.keys()):
            print "%s: %s" % (key, file_info[key])
        return

    print "%s last updated %s" % (file_info['filename'],
                                 time.ctime(file_info['last_update']))
    print "Datasources (datasource: datasourcename):"
    for datasource in sorted(file_info['ds'].items()):
        print "  %s: %s" % (datasource[0], datasource[1]['ds_name'])

    print "RRA's (Step = %s):" % (file_info['step'])

    for rra in file_info['rra']:
        print "  %s: %s/%s" % (rra['cf'], rra['pdp_per_row'], rra['rows'])
Esempio n. 14
0
def rrdupdate(owdata):
    if config.rrdenable:
        stime = int(time.mktime(time.localtime()))
        path = config.rrdpath
        step = 300
        hb = 3600
        xff = 0.5
        HOUR = 3600
        YEAR = 31536000
        steps1 = 1
        rows1 = YEAR // step
        for sensor in owdata:
            (value, timestamp) = owdata[sensor]
            if value == config.owfail:
                continue
            rrdfile = '%s/%s.rrd' % (path, sensor.upper())
            if not os.path.isfile(rrdfile):
                try:
                    rrdtool.create(
                        rrdfile, '--step', '%d' % step,
                        'DS:data:GAUGE:%d:U:U' % hb,
                        'RRA:AVERAGE:%d:%d:%d' % (xff, steps1, rows1))
                except rrdtool.error, e:
                    logger.warning(e)
                logger.debug("RRD %s created" % sensor)
            info = rrdtool.info(rrdfile)
            if ((stime - info['last_update']) > step):
                try:
                    rrdtool.update(rrdfile, '%s:%s' % (timestamp, value))
                except rrdtool.error, e:
                    logger.warning(e)
                logger.debug("RRD %s updated" % sensor)
Esempio n. 15
0
    def _read_rrd(self, rrd_path, hostname, device_name):
        """Main metric fetching method.

        """
        import rrdtool
        metric_count = 0

        try:
            info = rrdtool.info(rrd_path)
        except Exception:
            # Unable to read RRD file, ignore it
            self.log.exception("Unable to read RRD file at %s" % rrd_path)
            return metric_count

        # Find the consolidation functions for the RRD metrics
        c_funcs = set([v for k, v in info.items() if k.endswith('.cf')])

        for c in list(c_funcs):
            last_ts_key = '%s.%s' % (rrd_path, c)
            if last_ts_key not in self.last_ts:
                self.last_ts[last_ts_key] = time.time()
                continue

            start = self.last_ts[last_ts_key]
            last_ts = start

            try:
                fetched = rrdtool.fetch(rrd_path, c, '--start', str(start))
            except rrdtool.error:
                # Start time was out of range, skip this RRD
                self.log.warn("Time %s out of range for %s" %
                              (rrd_path, start))
                return metric_count

            # Extract the data
            (start_ts, end_ts, interval) = fetched[0]
            metric_names = fetched[1]
            points = fetched[2]
            for k, m_name in enumerate(metric_names):
                m_name = self._format_metric_name(m_name, c)
                for i, p in enumerate(points):
                    ts = start_ts + (i * interval)

                    if p[k] is None:
                        continue

                    # Save this metric as a gauge
                    val = self._transform_metric(m_name, p[k])
                    self.gauge(m_name,
                               val,
                               hostname=hostname,
                               device_name=device_name,
                               timestamp=ts,
                               dimensions=self.dimensions)
                    metric_count += 1
                    last_ts = (ts + interval)

            # Update the last timestamp based on the last valid metric
            self.last_ts[last_ts_key] = last_ts
        return metric_count
Esempio n. 16
0
def rrd_info(rrd_file, raw=False):
    """ 
    Intended use is from shell. If you want the whole dict returned by
    rrdtool.info, set raw to true.
    """
    file_info = rrdtool.info(rrd_file)

    if raw:
        return file_info

    if not file_info.has_key('ds'):
        #=======================================================================
        # In version 1.3 the output from info is totally different. We just
        # print a key/value output.
        #=======================================================================
        for key in sorted(file_info.keys()):
            print "%s: %s" % (key, file_info[key])
        return

    print "%s last updated %s" % (file_info['filename'],
                                  time.ctime(file_info['last_update']))
    print "Datasources (datasource: datasourcename):"
    for datasource in sorted(file_info['ds'].items()):
        print "  %s: %s" % (datasource[0], datasource[1]['ds_name'])

    print "RRA's (Step = %s):" % (file_info['step'])

    for rra in file_info['rra']:
        print "  %s: %s/%s" % (rra['cf'], rra['pdp_per_row'], rra['rows'])
Esempio n. 17
0
def check_aberration(rrdpath, fname):
    """ This will check for begin and end of aberration
        in file. Will return:
        0 if aberration not found.
        1 if aberration begins
        2 if aberration ends
    """
    ab_status = 0
    rrdfilename = rrdpath + fname

    info = rrdtool.info(rrdfilename)
    rrdstep = int(info['step'])
    lastupdate = info['last_update']
    previosupdate = str(lastupdate - rrdstep - 1)
    graphtmpfile = tempfile.NamedTemporaryFile()
    # Ready to get FAILURES  from rrdfile
    # will process failures array values for time of 2 last updates
    values = rrdtool.graph(
        graphtmpfile.name + 'F',
        'DEF:f0=' + rrdfilename + ':inoctets:FAILURES:start=' + previosupdate +
        ':end=' + str(lastupdate), 'PRINT:f0:MIN:%1.0lf',
        'PRINT:f0:MAX:%1.0lf', 'PRINT:f0:LAST:%1.0lf')
    print values
    fmin = int(values[2][0])
    fmax = int(values[2][1])
    flast = int(values[2][2])
    print("fmin=" + fmin + ", fmax=" + fmax + ",flast=" + flast)
    # check if failure value had changed.
    if (fmin != fmax):
        if (flast == 1):
            ab_status = 1
        else:
            ab_status = 2
    return ab_status
Esempio n. 18
0
def info_rrd(rrdfile_name):
    """
    is not currenty in use
    info_rrd:
    get info about an existing rrd

    Args:
        rrdfile_name (string): it is the name of your file/rrd

    Returns:
        info_msg (dict): get info about the rrd
    """
    info_content = []
    info_status_msg = ""
    # get the last value
    try:
        db_info = rrdtool.info(rrdfile_name)
        info_content = db_info
        info_status_msg = f"success: {rrdfile_name}: was read successfully"
    except Exception as e:
        info_status_msg = f"error: rrd update error: {sys.exc_info()[1]} \n{e}"
    info_msg = dict()
    info_msg['data'] = info_content
    info_msg['status'] = info_status_msg
    return info_msg
    def TuneHoltData(self, dp):
        # Alpha
        rrdtool.tune(self.context().path(dp),'--alpha',str(self.alpha))

        # Beta
        rrdtool.tune(self.context().path(dp),'--beta',str(self.beta))

        # Gamma
        rrdtool.tune(self.context().path(dp),'--gamma',str(self.gamma))
        rrdtool.tune(self.context().path(dp),'--gamma-deviation',str(self.gamma))

        # Delta
        rrdtool.tune(self.context().path(dp),'--deltapos',str(self.delta))
        rrdtool.tune(self.context().path(dp),'--deltaneg',str(self.delta))


        # Check that these actually changed otherwise we will reset
        # violation counts to 0
        data = rrdtool.info(self.context().path(dp))
        rrasearch = re.compile('^rra\[(\d+)\]\.(\S+)')
        sorted_keys=data.keys()
        sorted_keys.sort()
        for key in sorted_keys:
            value=data[key]
            rra_match=rrasearch.search(key)
            if rra_match:
                rranum,rraprop=rra_match.groups()

                # Failure Threshold
                if rraprop == 'failure_threshold' and value != self.threshold:
                    rrdtool.tune(self.context().path(dp),'--failure-threshold',str(self.threshold))

                # Window Length
                if rraprop == 'window_length' and value != self.window:
                    rrdtool.tune(self.context().path(dp),'--window-length',str(self.window))
Esempio n. 20
0
def getLoadWeek(server_id):
    ''' get server current load and one week load'''
    import rrdtool
    try:
        rrdPath = s_service.objects.get(server__id = server_id,category__title__contains = "Perfmon").rrd.path()
    except:
        return "</td><td>"
    
    info = rrdtool.info(rrdPath)
    last_update = str(info["last_update"])
    current = rrdtool.fetch(rrdPath, "-s", last_update + "-604801", "-e", last_update, "LAST")
    load = "";ls = [];loadAvg = ""
    for i in range(len(current[1])):
        if current[1][i] == "load":
            for l in current[2][-10:]:
                if l[i] != None and l[i] != "nan":
                    load = l[i]
            for l in current[2]:
                try:
                    ls.append(float(l[i]))
                except:
                    pass
    if load == "nan":load = ""
    try:
        load = int(load)
    except:
        pass
    if ls != []:
        loadAvg = str(sum(ls)/len(ls))[:5]
    return str(load)+"</td><td>"+loadAvg
Esempio n. 21
0
def isPeerAlive(path_to_rrd,rrd_filename,peer):
    path_id=get_host_path_id(peer)    
    try:
        rrd_file=path_to_rrd+"/"+path_id+"/"+rrd_filename
        return ((long(time.time())-(rrdtool.info(rrd_file)["last_update"]))<=ALIVE_TIME)
    except:
        return False
Esempio n. 22
0
 def getDataSources(self):
     try:
         info = rrdtool.info(self.fs_path)
         return [RRDDataSource(self, source) for source in info['ds']]
     except:
         raise
         return []
Esempio n. 23
0
def listSources():
  hwid = request.args.get('hwid')
  device = ceresdb.devices.find_one({'hwid' : hwid})
  if device['username'] != session['username']:
    flash('Wrong username [' + session['username'] + '] for device [' + hwid + ']')
    return redirect(url_for('myceres'))

  info = rrdtool.info(str(device['file']))
  step = int(info['step'])

  # Pull out all of the source names (e.g. temperature, light, etc..)
  r = re.compile('ds\[(.*)\]\.type')
  sourcenames = [match.group(1) for match in [r.match(key) for key in info if r.match(key)]]

  RRAs = []
  # Extract the info about the RRAs
  r = re.compile('rra\[(.*)\]\.cf')
  for RRAid in sorted([int(match.group(1)) for match in [r.match(key) for key in info if r.match(key)]]):
    rra = 'rra['+str(RRAid)+']'

    if info[rra + '.cf'] != 'AVERAGE':
      continue

    RRAs.append({
        'resolution' : info[rra + '.pdp_per_row'] * step,
        'totaltime'  : info[rra + '.pdp_per_row'] * step * info[rra + '.rows']
    })

  RRAs = sorted(RRAs, key=lambda k: k['resolution'])
  return jsonify(data={'RRAs' : RRAs, 'sources' : sourcenames})
Esempio n. 24
0
def rrd_check_equality(configname, rrd_name, rrd_conf):
    dsequality = False
    rraequality = False

    try:
        info = rrdtool.info(f"rrd/{configname}/{rrd_name}.rrd")
        if info['step'] != config.STEP:
            return False
    except rrdtool.OperationalError:
        return False

    for ds_def in rrd_conf['ds']:
        try:
            groups = dsregexp.match(ds_def).groups()
            key = "ds[{0}]".format(groups[0])
            dsequality = info[f"{key}.type"] == groups[1] and str(
                info[f"{key}.minimal_heartbeat"]) == groups[2]
        except KeyError:
            return False

    for index, rra_def in enumerate(rrd_conf['rra']):
        try:
            groups = rraregexp.match(rra_def).groups()
            key = f"rra[{index}]"
            rraequality = info[f"{key}.cf"] == groups[0] and str(
                info[f"{key}.rows"]) == groups[3] and str(
                    info[f"{key}.pdp_per_row"]) == groups[2]
        except KeyError:
            return False

    return dsequality and rraequality
Esempio n. 25
0
def getLoadWeek(server_id):
    ''' get server current load and one week load'''
    import rrdtool
    try:
        rrdPath = s_service.objects.get(
            server__id=server_id,
            category__title__contains="Perfmon").rrd.path()
    except:
        return "</td><td>"

    info = rrdtool.info(rrdPath)
    last_update = str(info["last_update"])
    current = rrdtool.fetch(rrdPath, "-s", last_update + "-604801", "-e",
                            last_update, "LAST")
    load = ""
    ls = []
    loadAvg = ""
    for i in range(len(current[1])):
        if current[1][i] == "load":
            for l in current[2][-10:]:
                if l[i] != None and l[i] != "nan":
                    load = l[i]
            for l in current[2]:
                try:
                    ls.append(float(l[i]))
                except:
                    pass
    if load == "nan": load = ""
    try:
        load = int(load)
    except:
        pass
    if ls != []:
        loadAvg = str(sum(ls) / len(ls))[:5]
    return str(load) + "</td><td>" + loadAvg
Esempio n. 26
0
 def getDataSources(self):
   try:
     info = rrdtool.info(self.fs_path)
     return [RRDDataSource(self, source) for source in info['ds']]
   except:
     raise
     return []
Esempio n. 27
0
def rrdupdate(owdata):
    if config.rrdenable:
	stime = int(time.mktime(time.localtime()))
	path = config.rrdpath
	step = 300
	hb = 3600
	xff = 0.5
	HOUR = 3600
	YEAR = 31536000
	steps1 = 1
	rows1 = YEAR // step
	for sensor in owdata:
	    (value, timestamp) = owdata[sensor]
	    if value == config.owfail:
		continue
	    rrdfile = '%s/%s.rrd' % (path, sensor.upper())
	    if not os.path.isfile(rrdfile):
		try:
		    rrdtool.create(rrdfile, '--step', '%d' % step,
			'DS:data:GAUGE:%d:U:U' % hb,
			'RRA:AVERAGE:%d:%d:%d' % (xff, steps1, rows1))
		except rrdtool.error, e:
		    logger.warning(e)
	        logger.debug("RRD %s created" % sensor)
	    info = rrdtool.info(rrdfile)
	    if ((stime - info['last_update']) > step):
	        try:
		    rrdtool.update(rrdfile,'%s:%s' % (timestamp, value))
		except rrdtool.error, e:
		    logger.warning(e)
	        logger.debug("RRD %s updated" % sensor)
Esempio n. 28
0
    def info(self, context, target=None):
        # Figure out the target.
        if target == None:
            target = socket.getfqdn()
        rrdpath = os.path.join(FLAGS.canary_rrdpath, target)

        # Grab available metrics.
        available = glob.glob(os.path.join(rrdpath, "*/*.rrd"))
        metrics = {}

        for filename in available:
            # NOTE: Not sure quite why, but it seems like
            # the rrdtool commands below barf unless they
            # this happens -- maybe barfing on unicode?
            filename = str(filename)

            m = re.match("^%s/([^\/-]+)(-([^\/]+))?/([^\.]+)\.rrd$" % rrdpath, filename)
            if m:
                plugin = m.group(1)
                unit = m.group(3)
                key = m.group(4)

                # NOTE: At this point we construct a metric name that is
                # equivilant to how we deconstruct the name above. It's
                # important that these two operations are symmetric for
                # the sake of a user's sanity.
                if unit:
                    metric = "%s[%s].%s" % (plugin, unit, key)
                else:
                    metric = "%s.%s" % (plugin, key)
                if not (metric in metrics):
                    metrics[metric] = {}

                metrics[metric]["from_time"] = rrdtool.first(filename)
                metrics[metric]["to_time"] = rrdtool.last(filename)

                step = 1
                pdps = []
                cfs = []
                for (k, v) in rrdtool.info(filename).items():
                    if re.match("^step$", k):
                        step = int(v)
                        continue
                    elif re.match("^rra\[\d+\]\.pdp_per_row$", k):
                        pdps.append(int(v))
                        continue
                    elif re.match("^rra\[\d+\]\.cf", k):
                        cfs.append(v)
                        continue

                pdps = list(set(pdps))
                cfs = list(set(cfs))
                cfs.sort()
                resolutions = map(lambda x: step * x, pdps)
                resolutions.sort()

                metrics[metric]["cfs"] = cfs
                metrics[metric]["resolutions"] = resolutions

        return metrics
Esempio n. 29
0
def fetch_last(rrddef):
    info = rrdtool.info(rrddef.path)
    try:
        value = info['ds[%s].last_ds' % (rrddef.name, )]
    except ValueError, e:
        current_app.logger.exception(e)
        value = None
Esempio n. 30
0
def _convertFile(filename):
    import rrdtool
    try:
        if rrdtool.info(filename)['ds[ds0].type'] == 'COUNTER':
            rrdtool.tune(filename, '-d', 'ds0:DERIVE', '-i', 'ds0:0')
    except KeyError:
        pass
Esempio n. 31
0
def render(fname):
    print "Rendering new image."
    info = rrdtool.info(fname)
    for i in info:
	print "{} : {}".format(i,info[i])

    DAY = 86400
    DAYS_5 = DAY * 5
    YEAR = 365 * DAY
    rrdtool.graph('image-temp.png',
              '--imgformat', 'PNG',
              '--width', '540',
              '--height', '100',
              '--start', "-%i" % DAY,
              '--end', "-1",
              '--vertical-label', 'Temperature C',
              '--title', 'Temperature over last 24h',
              '--lower-limit', '0',
              'DEF:temp=%s:sensor1-temp:AVERAGE' % fname ,
              'VDEF:tempavg=temp,AVERAGE' ,
              'VDEF:tempmax=temp,MAXIMUM' ,
              'VDEF:tempmin=temp,MINIMUM' ,
              'AREA:temp#01DF01:Temperature' ,
              'LINE:tempmax#FF0000:Max' ,
              r'GPRINT:tempmin:Min\: %6.1lf C' ,
              r'GPRINT:tempmax:Max\: %6.1lf C' ,
              r'GPRINT:tempavg:Avg\: %6.1lf C' ,
	      r'GPRINT:temp:LAST:Cur\: %5.2lf C' ,
              )
Esempio n. 32
0
def latest(hostname):
    rrdname = "rrds/" + hostname + "_users.rrd"
    try:
        info = rrdtool.info(rrdname)
        lastupdate = [info['last_update'], info['ds[users].last_ds']]
        return lastupdate
    except:
        return None
def latest(hostname):
    rrdname = "rrds/" + hostname + "_ansible.rrd"
    try:
        info = rrdtool.info(rrdname)
        lastupdate = [info['last_update'], info['ds[ok].last_ds'], info['ds[change].last_ds'], info['ds[unreachable].last_ds'], info['ds[failed].last_ds']]
        return lastupdate
    except:
        return None
Esempio n. 34
0
def convert_to_whisper(rrdfile, mapping, extra_retention=None):
    """Convert a rrd-file to whisper"""

    rrd_file = str(join(rrdfile.path, rrdfile.filename))
    try:
        rrd_info = rrdtool.info(rrd_file)
    except rrdtool.error, error:
        _logger.error(error)
Esempio n. 35
0
def isPeerDead(path_to_rrd,rrd_filename,peer):
    #assuming that it is a storage VM
    path_id=get_host_path_id(peer)
    try:
        rrd_file=path_to_rrd+"/"+path_id+"/"+rrd_filename
        return ((long(time.time())-(rrdtool.info(rrd_file)["last_update"]))>DEATH_TIME)
    except:
        return True
Esempio n. 36
0
 def getDataSources(self):
   info = rrdtool.info(self.fs_path)
   if 'ds' in info:
     return [RRDDataSource(self, datasource_name) for datasource_name in info['ds']]
   else:
     ds_keys = [ key for key in info if key.startswith('ds[') ]
     datasources = set( key[3:].split(']')[0] for key in ds_keys )
     return [ RRDDataSource(self, ds) for ds in datasources ]
Esempio n. 37
0
def get_temp():
    db_rrd = rrdtool.info(db_rrd_path)
    temperature = [
        db_rrd["ds[Sensor0].last_ds"], db_rrd["ds[Sensor1].last_ds"],
        db_rrd["ds[Sensor2].last_ds"], db_rrd["ds[Sensor3].last_ds"],
        db_rrd["ds[Sensor4].last_ds"], db_rrd["ds[Sensor5].last_ds"]
    ]
    return (temperature)
Esempio n. 38
0
def show_widget_header(widget):
    rrd_path = widget.rrd.path()
    info = rrdtool.info(rrd_path)
    last_update = str(info["last_update"])

    current = rrdtool.fetch(rrd_path, "-s", last_update, "-e", "s+1", "LAST")
    titles = map(lambda x: x.replace("_", " "), current[1])
    return "<th>" + "</th><th>".join(titles) + "</th>"
Esempio n. 39
0
def crearGrafica(valoresy, tiempoInicial, steps, names, archivo):
    print(rrdtool.info(archivo))
    muestras = len(valoresy)
    ret = rrdtool.create('predicciones/trend.rrd', "--start",
                         str(tiempoInicial), "--step", str(steps),
                         "DS:" + names[0] + ":GAUGE:600:U:U",
                         "RRA:AVERAGE:0.5:1:" + str(muestras))
    return 'predicciones/' + archivo
Esempio n. 40
0
 def getDataSources(self):
   info = rrdtool.info(self.fs_path)
   if 'ds' in info:
     return [RRDDataSource(self, datasource_name) for datasource_name in info['ds']]
   else:
     ds_keys = [ key for key in info if key.startswith('ds[') ]
     datasources = set( key[3:].split(']')[0] for key in ds_keys )
     return [ RRDDataSource(self, ds) for ds in datasources ]
def getMonitorData(rrd_file):
    rrd_info = rrdtool.info(rrd_file)
    last_update = rrd_info['last_update'] - 60
    args = '-s ' + str(last_update)
    results = rrdtool.fetch(rrd_file, 'AVERAGE', args)
    lost_package_num = int(results[2][0][1])
    average_rrt = 0 if not results[2][0][2] else results[2][0][2] * 1000
    return lost_package_num, round(average_rrt, 4)
Esempio n. 42
0
def rrd_ds(file):
    info = rrdtool.info(file)
    ds_re = re.compile(r'ds\[(\S+)\]')
    ds = ds_re.search(str(info))

    if ds:
        result = ds.group(1)
        return result
Esempio n. 43
0
def show_widget_header(widget):
    rrd_path = widget.rrd.path()
    info = rrdtool.info(rrd_path)
    last_update = str(info["last_update"])

    current = rrdtool.fetch(rrd_path, "-s", last_update, "-e", "s+1", "LAST")
    titles = map(lambda x: x.replace("_", " "), current[1])
    return "<th>" + "</th><th>".join(titles) + "</th>"
Esempio n. 44
0
def main():
    #setup logging
    logging.basicConfig(level=logging.DEBUG, filename='/home/pi/Desktop/Glovebox_Monitoring/logfile.txt')
    #setup GPIO
    GPIO.setmode(GPIO.BCM)
    for pin in config.GPIO_pins:
        GPIO.setup(pin, GPIO.IN, GPIO.PUD_DOWN)
    #setup static folder if necessary
    directory = os.path.split(config.database_path_analog)[0]
    if not os.path.exists(directory):
        os.makedirs(directory)
    #setup database
    if not os.path.isfile(config.database_path_analog): #does database already exist?
        CreateDatabase()
        print('database created, Info:')
    else:
        print('database continued, Info:') 
    for key, value in rrdtool.info(config.database_path_analog).items():
        print('{} : {}'.format(key,value))
    print('rrdtool.lib_version = ' + rrdtool.lib_version())        
    #setup datafile
    if not os.path.isfile(config.datafile_path_digital): #does file already exist?
        file_digital = open(config.datafile_path_digital, 'a', os.O_NONBLOCK)
        file_digital.write(';'.join(['time']+config.channel_names_digital) + '\n') # writing header
        file_digital.write(';'.join([datetime.now().isoformat(' ')[:-5],*['-1']*6]))
    else:
        file_digital = open(config.datafile_path_digital, 'a', os.O_NONBLOCK)
    #setup spi
    SPI_PORT   = 0
    SPI_DEVICE = 0
    mcp = Adafruit_MCP3008.MCP3008(spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE))
    print('measuring..')
    digital = np.ones(6,dtype=np.int8)*-1
    sending_email_possible = False
    while 1:
        time_before = time.time()
        digital_new, analog_new, time_new = MeasureData(mcp)
        argon_pressure = analog_new[0]
        if argon_pressure > config.Ar_hysteresis_for_email[1]:
            sending_email_possible = True
        if argon_pressure < config.Ar_hysteresis_for_email[0] and sending_email_possible == True:
            SendEmail()
            print('Argon pressure < {} bar!!!, Email sent!'.format(config.Ar_hysteresis_for_email[0]))
            sending_email_possible = False
        if any(digital_new != digital):
            time_str = time_new.isoformat(' ')[:-5] # with accuracy of thents of a second
            digital_str = ';'.join([str(d) for d in digital_new])
            print(digital_str)
            file_digital.write('{};{}\n'.format(time_str,digital_str))
            file_digital.flush()
            digital = digital_new
        time_rrd = '{:.3f}'.format(time.time())
        rrdtool.update(config.database_path_analog,'{}:{}:{}'.format(time_rrd,analog_new[0],analog_new[1])) # N means current time
        time_delta = 0.1 - (time.time()-time_before)
        if time_delta < 0:
            print('WARNING: Measurement loop not fast enough: time_delta = {}!'.format(time_delta))
        else:
            time.sleep(time_delta) # ms
def rtl_433_probe(sock):
    next_graph = {}

    while True:
        line, _addr = sock.recvfrom(1024)

        try:
            line = parse_syslog(line)
            data = json.loads(line)
            now = int(time.time())

            label = sanitize(data["model"])
            if "channel" in data:
                label += ".CH" + str(data["channel"])
            elif "id" in data:
                label += ".ID" + str(data["id"])
            rrd_file = RRD_PATH + label + ".rrd"

            if "type" in data and data["type"] == "TPMS":
                continue

            if "temperature_C" not in data:
                continue
            temperature = data["temperature_C"]

            humidity = "U"
            if "humidity" in data:
                humidity = data["humidity"]

            try:
                rrdtool.info(rrd_file)
            except rrdtool.OperationalError:
                create_rrd(rrd_file)

            update_rrd(rrd_file, temperature, humidity)

            if label not in next_graph or next_graph[label] < now:
                graph_rrd(rrd_file, label, GRAPH_PATH)
                next_graph[label] = now + GRAPH_INTERVAL

        except KeyError:
            pass

        except ValueError:
            pass
Esempio n. 46
0
    def _form_entries_from_file(self, root, name):
        info = rrdtool.info(str(os.path.join(root, name)))
        dses = set()
        for entry in info.keys():
            match = ds_re.match(entry)
            if match:
                dses.add(match.group(1))

        return [{'type': 'file', 'name': "%s_%s" % (name[:-4], stat)} for stat in sorted(list(dses))]
Esempio n. 47
0
    def get_datasources(fs_path):
        info = rrdtool.info(fs_path)

        if "ds" in info:
            return [datasource_name for datasource_name in info["ds"]]
        else:
            ds_keys = [key for key in info if key.startswith("ds[")]
            datasources = set(key[3:].split("]")[0] for key in ds_keys)
            return list(datasources)
Esempio n. 48
0
    def _form_entries_from_file(self, root, name):
        info = rrdtool.info(str(os.path.join(root, name)))
        dses = set()
        for entry in info.keys():
            match = ds_re.match(entry)
            if match:
                dses.add(match.group(1))

        return [{'type': 'file', 'name': "%s_%s" % (name[:-4], stat)} for stat in sorted(list(dses))]
Esempio n. 49
0
    def process(self, fullpath):
        "convert a single file"
        log.debug("processing %s" % fullpath)
        newpath = os.path.join(os.path.dirname(fullpath),
                               '.' + os.path.basename(fullpath))
        # get type, old step
        info = rrdtool.info(fullpath)
        dataType = info['ds[ds0].type']

        try:
            os.unlink(newpath)
        except OSError:
            pass

        rraList = []
        for rraIndex in count():
            rra = {}
            rra['pdp_per_row'] = info.get('rra[%s].pdp_per_row' % rraIndex)
            rra['rows'] = info.get('rra[%s].rows' % rraIndex)
            if rra['pdp_per_row'] is None or rra['rows'] is None:
                break
            rraList.append(rra)

        # Collect some information about the current file:
        # how far back can the data go?
        earliest = info['last_update']

        # how wide is the biggest data point?
        biggest = 0
            
        for rra in rraList:
            size = rra['pdp_per_row'] * info['step']
            earliest = min(earliest, info['last_update'] - rra['rows'] * size)
            biggest = max(biggest, size)
            
        # create a file with the correct step to accept the data
        rrdtool.create(
            newpath,
            'DS:ds0:%s:%d:U:U' % (dataType, biggest * 2),
            '--start', str(earliest),
            '--step', str(self.options.step),
            *self.defaultRRDCommand.split())
            
        # extract the time and values from each archive
        updates = {}
        
        for rra in rraList:            
            self.processRRA(fullpath, rra, info, updates)
            
        # get the times in order
        updates = updates.items()
        updates.sort()
        rrdtool.update(newpath, *[('%d@%s' % (t, v)) for t, v in updates])
        # use a reasonable heartbeat
        rrdtool.tune(newpath, '-h','ds0:%d' % (self.options.step*3))
        if self.options.commit:
            os.rename(newpath, fullpath)
Esempio n. 50
0
  def get_datasources(fs_path):
    info = rrdtool.info(RRDReader._convert_fs_path(fs_path))

    if 'ds' in info:
      return [datasource_name for datasource_name in info['ds']]
    else:
      ds_keys = [ key for key in info if key.startswith('ds[') ]
      datasources = set( key[3:].split(']')[0] for key in ds_keys )
      return list(datasources)
Esempio n. 51
0
def list_variables(filename):
    info = rrdtool.info(str(filename))
    datasources = [
        ds[3:-7]
        for ds in info.keys()
        if ds.startswith("ds[") and ds.endswith("].index")
    ]
    datasources.sort(key = lambda(ds): info["ds[%s].index" % (ds)])
    return datasources
Esempio n. 52
0
  def get_datasources(fs_path):
    info = rrdtool.info(RRDReader._convert_fs_path(fs_path))

    if 'ds' in info:
      return [datasource_name for datasource_name in info['ds']]
    else:
      ds_keys = [ key for key in info if key.startswith('ds[') ]
      datasources = set( key[3:].split(']')[0] for key in ds_keys )
      return list(datasources)
Esempio n. 53
0
def _convertFile(filename):
    import rrdtool
    try:
        if rrdtool.info(filename)['ds[ds0].type'] == 'COUNTER':
            rrdtool.tune(filename,
                         '-d', 'ds0:DERIVE',
                         '-i', 'ds0:0')
    except KeyError:
        pass
Esempio n. 54
0
    def build_from_file(self, fname):
        """
        Build RRD info from the given filename
        """
        info = rrdtool.info(fname)

        for (key, val) in info.iteritems():
            #ignore RRAs, and only examine ds[***] entries
            self.push_item(key, val)
Esempio n. 55
0
    def _read_rrd(self, rrd_path, hostname, device_name):
        """Main metric fetching method.

        """
        import rrdtool
        metric_count = 0

        try:
            info = rrdtool.info(rrd_path)
        except Exception:
            # Unable to read RRD file, ignore it
            self.log.exception("Unable to read RRD file at %s" % rrd_path)
            return metric_count

        # Find the consolidation functions for the RRD metrics
        c_funcs = set([v for k, v in info.items() if k.endswith('.cf')])

        for c in list(c_funcs):
            last_ts_key = '%s.%s' % (rrd_path, c)
            if last_ts_key not in self.last_ts:
                self.last_ts[last_ts_key] = time.time()
                continue

            start = self.last_ts[last_ts_key]
            last_ts = start

            try:
                fetched = rrdtool.fetch(rrd_path, c, '--start', str(start))
            except rrdtool.error:
                # Start time was out of range, skip this RRD
                self.log.warn("Time %s out of range for %s" % (rrd_path, start))
                return metric_count

            # Extract the data
            (start_ts, end_ts, interval) = fetched[0]
            metric_names = fetched[1]
            points = fetched[2]
            for k, m_name in enumerate(metric_names):
                m_name = self._format_metric_name(m_name, c)
                for i, p in enumerate(points):
                    ts = start_ts + (i * interval)

                    if p[k] is None:
                        continue

                    # Save this metric as a gauge
                    val = self._transform_metric(m_name, p[k])
                    self.gauge(m_name, val, hostname=hostname,
                               device_name=device_name, timestamp=ts,
                               dimensions=self.dimensions)
                    metric_count += 1
                    last_ts = (ts + interval)

            # Update the last timestamp based on the last valid metric
            self.last_ts[last_ts_key] = last_ts
        return metric_count
Esempio n. 56
0
def getValues():
  now = int(time.time())
  kellerinfo = rrdtool.info(DATADIR+'/keller.rrd')
  ausseninfo = rrdtool.info(DATADIR+'/aussen.rrd')
  faninfo = rrdtool.info(DATADIR+'/fan.rrd')
  if ( now - kellerinfo["last_update"] > 600 ):
    Tkeller = "U"
    RHkeller = "U"
    DPkeller = "U"
    AHkeller = "U"
  else:
    Tkeller = kellerinfo["ds[T].last_ds"]
    RHkeller = kellerinfo["ds[RH].last_ds"]
    DPkeller = kellerinfo["ds[DP].last_ds"]
    AHkeller = kellerinfo["ds[AH].last_ds"]
  if ( now - ausseninfo["last_update"] > 600 ):
    Taussen = "U"
    RHaussen = "U"
    DPaussen = "U"
    AHaussen = "U"
  else:
    Taussen = ausseninfo["ds[T].last_ds"]
    RHaussen = ausseninfo["ds[RH].last_ds"]
    DPaussen = ausseninfo["ds[DP].last_ds"]
    AHaussen = ausseninfo["ds[AH].last_ds"]
  if ( now - faninfo["last_update"] > 240 ):
    Fan = "U"
  else:
    Fan = faninfo["ds[on].last_ds"]
  return {
    'last_keller' : kellerinfo["last_update"],
    'Tkeller' : Tkeller,
    'RHkeller' : RHkeller,
    'DPkeller' : DPkeller,
    'AHkeller' : AHkeller,
    'last_aussen' : ausseninfo["last_update"],
    'Taussen' : Taussen,
    'RHaussen' : RHaussen,
    'DPaussen' : DPaussen,
    'AHaussen' : AHaussen,
    'last_fan' : faninfo["last_update"],
    'Fan': Fan
  }
Esempio n. 57
0
 def callHomeData(self):
     volume = 0.0
     zenhome_ = zenhome.value
     for dirpath, dirnames, filenames in os.walk("{zenhome_}/perf/Devices".format(**locals())):
         for filename in filenames:
             if filename.endswith(".rrd"):
                 rrdinfo = rrdtool.info("{dirpath}/{filename}".format(**locals()))
                 if time.time() - rrdinfo["last_update"] < rrdinfo["step"] * 12: # 12 is fudge factor to take rrdcached into consideration
                     volume += 1.0 / rrdinfo["step"]
     yield "Collection Volume", "{volume:.1f} datapoints per second".format(**locals())
Esempio n. 58
0
    def get_vm_total(self, vm, names=['cpu_time', 'rd_req', 'rd_bytes', 'wr_req', 'wr_bytes', 'rx_bytes', 'tx_bytes']):
        if not check_stat_exists(vm):
            raise CMException('stat_not_exists')
        filename = get_path(vm)
        ds_info = rrdtool.info(filename)

        ds_all = {}
        for i in names:
            ds_all[i] = ds_info['ds[%s].last_ds' % i]
        return ds_all
Esempio n. 59
0
def get_info(rrd_name):
    """Retrieve the information about a specific RRD.

    :param rrd_name: The name of the desired RRD.
    :type rrd_name: str
    :return: The information regarding the named RRD, as a dictionary.
    :rtype: dict
    """
    # Force rrd_name to str since rrdtool doesn't like Unicode or other kinds of strings
    return rrdtool.info(str(rrd_name))