Example #1
0
def show_widget_with_current_and_past_value(widget):
    rrd_path = widget.rrd.path()
    info = rrdtool.info(rrd_path)
    #last_update = datetime.datetime.fromtimestamp(info["last_update"]).strftime("%m-%d %H:%M")
    last_update = str(info["last_update"])
    
    current = rrdtool.fetch(rrd_path, "-s", last_update + "-1", "-e", "s+0", "LAST")  
    yesterday = rrdtool.fetch(rrd_path, "-s", last_update + "-1d", "-e", "s+1", "LAST")
    lastweek = rrdtool.fetch(rrd_path, "-s", last_update + "-1w", "-e", "s+1", "LAST")
    
    current_value = current[2][0][0]
    field_def = None
    if widget.data_def:
        try:
            data_def = eval(widget.data_def.replace("\n", "").replace("\r", ""))
            ds = (current[1][0])
            if data_def.has_key(ds):
                field_def = data_def[ds]
                try:
                    data_rrd = rrdtool.fetch(rrd_path, "-s", str(int(last_update)-int(field_def[3]) * 60), "-e", last_update + "-1", "LAST")
                    data_rrd = map(lambda x:x[0],data_rrd[2])
                    current_value = format_value_def(field_def,data_rrd)
                except:
                    current_value = format_value(field_def, current_value)
            else:
                current_value = get_last_value(current)
        except:
            raise
            return widget.data_def
    else:
        current_value = get_last_value(current)
    
    return "<td>" + "</td><td>".join([current_value, get_last_value(yesterday, field_def) , get_last_value(lastweek, field_def)]) + "</td>"
Example #2
0
def show_widget_with_current_value(widget):
    rrd_path = widget.rrd.path()
    info = rrdtool.info(rrd_path)
    last_update = str(info["last_update"])
    
    current = rrdtool.fetch(rrd_path, "-s", last_update + "-1", "-e", "s+0", "LAST")
    
    if widget.data_def:
        try:
            data_def = eval(widget.data_def.replace("\n", "").replace("\r", ""))
            data = list(current[2][0])
            ds = current[1]
            for i in range(0, len(ds)):
                if data_def.has_key(ds[i]):
                    field_def = data_def[ds[i]]
                    try:
                        data_rrd = rrdtool.fetch(rrd_path, "-s", str(int(last_update)-int(field_def[3]) * 60), "-e", last_update + "-1", "LAST")
                        data_rrd = map(lambda x:x[i],data_rrd[2])
                        data[i] = format_value_def(field_def,data_rrd)
                    except:
                        data[i] = format_value(field_def, data[i])
                else:
                    data[i] = str(data[i])
        except:
            return widget.data_def
            #raise
    else:
        data = map(str, current[2][0])
    
    #return check_date(info) + get_last_value(current) + "</td>"
    
    return "<td>" + "</td><td>".join(data) + "</td>"
Example #3
0
 def getStartTime(self):
     """Gets the timestamp of the first non-null entry in the RRD"""
     first =  rrdtool.first(self.filename)
     end = rrdtool.last(self.filename)
     cf = self.getPeriodCF(first)
     try:
         info , _ds_rrd , data = rrdtool.fetch(self.filename,
                 cf, "--start", str(first), "--end", str(end))
     except rrdtool.error:
         # Adjust for daylight saving times
         first = first - 3600
         end = end + 3600
         info , _ds_rrd , data = rrdtool.fetch(self.filename,
                 cf, "--start", str(first), "--end", str(end))
     #start_rrd = info[0]
     #end_rrd = info[1]
     step = info[2]
     for line in data:
         all_none = True
         for value in line:
             if value is not None:
                 all_none = False
         if not all_none:
             break
         first = first + step
     if first >= end:
         raise RRDError("The RRD file looks empty !")
     return first
Example #4
0
def run(args, processId):
  try:
    perfPath = "/".join([args.perf_path, str(processId)])
    
    create_cmd = 'RRA:AVERAGE:0.5:1:600\nRRA:AVERAGE:0.5:6:600\nRRA:AVERAGE:0.5:24:600\nRRA:AVERAGE:0.5:288:600\nRRA:MAX:0.5:6:600\nRRA:MAX:0.5:24:600\nRRA:MAX:0.5:288:600'
    rrd = RRDUtil(create_cmd, args.cycle_time)
    
    
    cycle_begin = time()
    dp_count = 0
    for device in map(str, range(args.device_count)):
        begin = time()
        for device_dp in map(str, range(args.device_datapoints)):
            dp_count += 1
            rrd.save(os.path.join(perfPath, device, device_dp), 42, 'GAUGE')

        for interface in map(str, range(args.component_count)):
            for interface_dp in map(str, range(args.component_datapoints)):
                dp_count += 1
                path = os.path.join(perfPath, device, 'os', 'interfaces', interface,
                    interface_dp)
                rrd.save(path, 42, 'DERIVE')
                rrdtool.fetch(rrd.performancePath(path) + '.rrd',
                    'AVERAGE', '-s', 'now-%d' % (args.cycle_time*2), '-e', 'now')

    cycle_duration = time() - cycle_begin
    return (cycle_duration, dp_count)
  except KeyboardInterrupt:
    return (0, 0)
Example #5
0
 def publishPower():
     try:
         today = datetime.now()
         database = "/srv/dev-disk-by-label-DISK1/localdata/powermeter.rrd"
         max = rrdtool.fetch(database, "MAX",
                             "--start=" + today.strftime("%Y%m%d"),
                             "--end=" + today.strftime('%Y%m%d'))
         yesterday = today - timedelta(days=1)
         min = rrdtool.fetch(database, "MAX",
                             "--start=" + yesterday.strftime("%Y%m%d"),
                             "--end=" + yesterday.strftime('%Y%m%d'))
         day = yesterday.strftime("%d.%m.")
         consumption = max[2][0][0] - min[2][0][0]
         assessmentConsumption = 0
         if (consumption < 0.001):
             assessmentConsumption = "2"
         elif (consumption < 1):
             assessmentConsumption = "1"
         elif (consumption < 5.0):
             assessmentConsumption = "0"
         elif (consumption < 7.0):
             assessmentConsumption = "1"
         else:
             assessmentProduction = "2"
         production = max[2][0][1] - min[2][0][1]
         assessmentProduction = 0
         if (production < 0.001):
             if production < 0.0:
                 production = 0.0
             assessmentProduction = "-2"
         elif (production < 0.1):
             assessmentProduction = "-1"
         elif (production < 0.5):
             assessmentProduction = "0"
         elif (production < 0.9):
             assessmentProduction = "1"
         else:
             assessmentProduction = "2"
         if (consumption < 10.0):
             aClient.publish(
                 "/inkplate/in/power",
                 f"{day}@{consumption:.3f}@{assessmentConsumption}@{production:.3f}@{assessmentProduction}",
                 qos=1,
                 retain=True)
         else:
             aClient.publish(
                 "/inkplate/in/power",
                 f"{day}@{consumption:.2f}@{assessmentConsumption}@{production:.3f}@{assessmentProduction}",
                 qos=1,
                 retain=True)
     except:
         aClient.publish("/inkplate/in/power",
                         "[email protected]@[email protected]@0",
                         qos=1,
                         retain=True)
Example #6
0
def get_bandwidth():
    """get bandwith"""
    fn = "/var/lib/pnp4nagios/iem-director0/eth0.rrd"

    ts = rrdtool.last(fn)
    data = rrdtool.fetch(fn, "AVERAGE", "-s", str(ts - 300), "-e", str(ts))
    samples = data[2]

    fn = "/var/lib/pnp4nagios/iem-director1/eth0.rrd"
    ts = rrdtool.last(fn)
    data = rrdtool.fetch(fn, "AVERAGE", "-s", str(ts - 300), "-e", str(ts))
    samples2 = data[2]
    j["stats"]["bandwidth"] = samples[-2][2] + samples2[-2][2]
Example #7
0
 def fetch(self, cf='LAST', start='now', end='now', res=''):
     '''Returns the result of an rrdfetch command'''
     if res == '':
         data = rrdtool.fetch(self.file_name, cf, '-s', str(start), '-e',
                              str(end))
     else:
         data = rrdtool.fetch(self.file_name, cf, '-s', str(start), '-e',
                              str(end), '-r', str(res))
     self.logger.debug(
         'RRDtool fetch value cf={cf_} start={start_t} end={end_t} res={res_}:'
         .format(cf_=cf, start_t=start, end_t=end, res_=res))
     self.logger.debug(data)
     return data
Example #8
0
 def fetch(self, cf='LAST', start='now', end='now', res=''):
     '''Returns the result of an rrdfetch command'''
     if res == '':
         data = rrdtool.fetch(self.file_name, cf, '-s', str(start), '-e', str(end))
     else:
         data = rrdtool.fetch(self.file_name, cf, '-s', str(start), '-e', str(end), '-r', str(res))
     self.logger.debug(
         'RRDtool fetch value cf={cf_} start={start_t} end={end_t} res={res_}:'.format(
                                     cf_=cf, 
                                     start_t= start, 
                                     end_t= end,
                                     res_=res))
     self.logger.debug(data)
     return data
Example #9
0
def getLoadWeek(server_id):
    ''' get server current load and one week load'''
    import rrdtool
    try:
        rrdPath = s_service.objects.get(
            server__id=server_id,
            category__title__contains="Perfmon").rrd.path()
    except:
        return "</td><td>"

    info = rrdtool.info(rrdPath)
    last_update = str(info["last_update"])
    current = rrdtool.fetch(rrdPath, "-s", last_update + "-604801", "-e",
                            last_update, "LAST")
    load = ""
    ls = []
    loadAvg = ""
    for i in range(len(current[1])):
        if current[1][i] == "load":
            for l in current[2][-10:]:
                if l[i] != None and l[i] != "nan":
                    load = l[i]
            for l in current[2]:
                try:
                    ls.append(float(l[i]))
                except:
                    pass
    if load == "nan": load = ""
    try:
        load = int(load)
    except:
        pass
    if ls != []:
        loadAvg = str(sum(ls) / len(ls))[:5]
    return str(load) + "</td><td>" + loadAvg
Example #10
0
def rrd_export(options):
    """Exports all RRD data.

    Raises:
      OSError, if output directory cannot be created.
      IOError, if output file cannot be created or written.
    """
    open_func = open
    if options.compress:
        open_func = gzip.open
        options.output += '.gz'
    make_output_dirs(options.output)
    scale = options.step if options.counts else 1
    with contextlib.closing(open_func(options.output, 'w')) as fd_output:
        for filename in get_rrd_files(options.rrddir_prefix):
            time_range, value_names, data = rrdtool.fetch(
                filename, 'AVERAGE', '--start', str(options.ts_start), '--end',
                str(options.ts_end))
            # W0142 is the use of "* magic". These are legitimate use-cases.
            # 1) time_range is a 3-tuple (start, end, step): i.e. arguments to range.
            # 2) data is a list of tuples, which are transposed by zip.
            #    i.e. [(a,), (b,), ...] -> [(a,b,...)]
            # pylint: disable=W0142
            timestamps = range(*time_range)
            values = zip(*data)
            # pylint: enable=W0142

            for i, value_name in enumerate(value_names):
                hostname, experiment, metric = get_canonical_names(
                    filename, value_name, options)
                if metric is None or experiment in options.ignored_experiments:
                    continue
                record = get_json_record(hostname, experiment, metric,
                                         timestamps, values[i], scale)
                write_json_record(fd_output, record, options.pretty_json)
Example #11
0
    def fetchLastData(self, rrdfile, rrdfield):
        now = datetime.now()
        past = now - timedelta(minutes=10)
        startTime = (past.strftime("%Y%m%d %H:%M"))
        endTime = (now.strftime("%Y%m%d %H:%M"))

        cmdString = [rrdfile, 'AVERAGE', '-s ' + startTime, '-e ' + endTime]

        (timeInfo, columns, rows) = rrdtool.fetch(cmdString)

        colIndex = list(columns).index(rrdfield)
        rows.pop(
        )  #chop off the latest value because RRD returns crazy last values sometimes

        #print "rrdfile=", rrdfile," , startTime=",startTime," , endTime=",endTime, " , time=", timeInfo, " , rows=", len(rows)

        lastTime = timeInfo[1]
        lastRow = rows.pop()
        if lastRow is None:
            lastValue = 0
        else:
            value = lastRow[colIndex]
            if value is None:
                lastValue = 0
            else:
                lastValue = value

        return lastTime, lastValue
Example #12
0
    def render (self, startTime, endTime, params):

        (metadata, titles, data) = rrdtool.fetch(
            self.rrdFile, 
            "--start", str(startTime),
            "--end", str(endTime), 
            "AVERAGE"
        )

        try:
            self.responseCode(200)
            self.contentType('text/plain; charset=utf-8')

            out = "Timestamp\tWatts\tTemp"

            timestamp = metadata[0]
            for item in data:

                first = item[0]
                if first == None:
                    first = 0;

                second = item[1]
                if second == None:
                    second = 0;

                out += "%d\t%f\t%f" % (timestamp, first, second)

                timestamp = timestamp + metadata[2]

        except socket.error, e:
            pass
Example #13
0
    def _rrdfetch(self, CFS, start, end, res):
        ''' Does the heavy lifting of calling rrdtool.fetch for each
			CF in the CFS list and assembling the returned data
			in the self.data attribute.

				CFS:	list of RRD CF's ([str]) e.g., ['AVERAGE', 'MIN', 'MAX'], ['LAST']
				start:	starting time reference (str) e.g., '-30m', '-1d', ...
				res:	RRA resolution (str) e.g., '-5m', '1', '300', ...
		'''
        if not self.rrd:
            return

        rrd = os.path.join(CHDIR, self.rrd)

        for i, CF in enumerate(CFS):

            # see http://oss.oetiker.ch/rrdtool/doc/rrdfetch.en.html
            data = rrdtool.fetch(rrd, CF, '-a', '-s', start, '-e', end, '-r',
                                 res)

            if i == 0:
                # set initial fetch data
                self.data = data
            else:
                # append new trace data
                self.data += (data[2], )
Example #14
0
def resource():
    db_name = 'rrdtool/' + request.form.get('server') + ".rrd"
    start_time = request.form.get('start_time')
    end_time = request.form.get('end_time')
    resolution = request.form.get('resolution')
    # type = request.form.get('type')
    print(db_name, start_time, end_time)
    data = rrdtool.fetch(db_name, 'AVERAGE',  '-s {0}'.format(start_time), '-e {0}'.format(end_time))

    # if type == "load":
    #     d = data[2]
    # elif type == "cpu":
    #     d = data[1]
    # elif type == 'memory':
    #     d = data[0]

    data = list(zip(*data[2]))

    memory = list(map(lambda x: int(x) if x is not None else 0, data[0]))[:-2]
    cpu = list(map(lambda x: int(x) if x is not None else 0, data[1]))[:-2]
    load = list(map(lambda x: round(x, 2) if x is not None else 0, data[2]))[:-2]

    # print(data)
    return jsonify({
        'data': {'memory': memory,
                 'cpu': cpu,
                 'load': load}
    })
Example #15
0
def indirect_export(self, tls, path, spec, start,interval):
    """
    
    Indirect Export the RRD metrics to repository
    first connect then fetch finally send result to Repository
    it runs until the process stops

    """
    repository_ip = str(spec.get_parameter_value("repository.url").split(":")[-2])
    repository_port = int(spec.get_parameter_value("repository.url").split(":")[-1])

    connect_to_repository(self, tls, repository_ip, repository_port)
    last_fetched_time = 0

    # change the time expressed in UTC to local timezone    
    start_local = change_to_local_tzone(start)

    print ("local start time :" + str(start_local))
    print ("UTC start time :" + str(start))

    while True:

        # fetch RRD files till the process killed by the function -> change_conf_indirect_export
        result_list = []
        if(last_fetched_time == 0):
            #convert timedate fprmat to time format
            start_time_local = int(mktime(start_local.timetuple()))
            startTime = str (start_time_local - interval )

        else:
            startTime = str(last_fetched_time)


        endTime = str (int(time()))
        rrd_files = [ f for f in listdir(path) if isfile(join(path,f)) and (".rrd" in f) ]

        for f in rrd_files :
            rrdMetric = rrdtool.fetch( (path  + f),  "AVERAGE" ,'--resolution', str(interval), '-s', startTime, '-e', endTime)

            rrd_time = rrdMetric[0][0]

            for tuple in rrdMetric[2]:
                if tuple[0] is not None:

                    rrd_time = rrd_time + interval
                    timestamp = float(rrd_time)
                    value = float(tuple[0])
                    metric = f
                    
                    if (rrd_time > last_fetched_time):
                        last_fetched_time = int(rrd_time)

                    result_list.append((metric,timestamp,value))
            
        print ("result list size :    " + str(len (result_list)))

        if len(result_list) > 0:
            return_results_to_repository(self, result_list)

        sleep(interval)
Example #16
0
def fetchDataFromRRDDatabase( databaseName, startTime, endTime, interval, graphicType):
    """
        @summary : Returns the stored data from a database based on the desired interval.
        
        @retutn : Output obtained from the rrd fetch command.
    """
    
    resolution = int(interval*60)
    
    if endTime > ( time.time() )/3600*3600:
        endTime = int(time.time())/3600*3600 #top of the hour...databases are never any newer
        
    # round end time to time closest to desired resolution EX : closest 10 minutes,hour,day,etc... 
    endTime = int(endTime)/int(resolution)*int(resolution)
          
    
    try:
        output = rrdtool.fetch( databaseName, 'AVERAGE', '-r', str(resolution), '-s', "%s" %(startTime), '-e', '%s' %(endTime) )
        #print databaseName, 'AVERAGE', '-r', str(resolution), '-s', "%s" %(startTime), '-e', '%s' %(endTime)
    
    except:
        pass
        #----------------------------------------------------------- output = ""
        #------------- print "Error.Could not fetch data from %s." %databaseName
        #------------------------------------------- print "Program terminated."
        #------------------------------------------------------------ sys.exit()
        
    
    return output 
Example #17
0
def _read_data_from_rrd(data_source_path, start, end, step,
                        pipeline_: Pipeline):
    if not data_source_path:
        return
    if '<path_rra>/' not in data_source_path:
        logger_.debug(
            f'Path {data_source_path} does not contain "<path_rra>/", skipping'
        )
        return

    rrd_file_path = data_source_path.replace('<path_rra>',
                                             _get_source_dir(pipeline_))
    if not os.path.isfile(rrd_file_path):
        logger_.debug(f'File {rrd_file_path} does not exist')
        return

    if source.CactiSource.RRD_DIR_PATH in pipeline_.source.config:
        rrd_file_path = _copy_files_to_tmp_dir(pipeline_, rrd_file_path,
                                               data_source_path)

    result = rrdtool.fetch(rrd_file_path, 'AVERAGE',
                           ['-s', start, '-e', end, '-r', step])

    if source.CactiSource.RRD_DIR_PATH in pipeline_.source.config:
        os.remove(rrd_file_path)

    if not result or not result[0]:
        return

    # result[0][2] - is the closest available step to the step provided in the fetch command
    # if they differ - skip the source as the desired step is not available for it
    if result[0][2] != int(step) and not pipeline_.dynamic_step:
        return
    return result
Example #18
0
def get_bandwidth():
    fn = "/var/lib/pnp4nagios/mesonet/eth0.rrd"

    ts = rrdtool.last(fn)
    data = rrdtool.fetch(fn, "AVERAGE", "-s", str(ts - 300), "-e", str(ts))
    samples = data[2]
    j['stats']['bandwidth'] = samples[-2][2]
Example #19
0
 def dump_histogram_data(self, period, step_in):
     end_ts_in = int(
         int(calendar.timegm(time.gmtime()) * step_in) / step_in)
     start_ts_in = int(end_ts_in - int(period))
     result = rrdtool.fetch(self.rrd_location, 'AVERAGE', '-r',
                            str(step_in), '-s', str(start_ts_in), '-e',
                            str(end_ts_in))
     periodic_cpu_usage = collections.defaultdict(lambda: 0.0)
     periodic_mem_usage = collections.defaultdict(lambda: 0.0)
     periodic_consolidated_usage = collections.defaultdict(lambda: 0.0)
     valid_rows = collections.defaultdict(lambda: 0.0)
     start_ts_out, _, step = result[0]
     current_ts = start_ts_out
     for _, cdp in enumerate(result[2]):
         current_ts += step
         if len(cdp) == 4:
             try:
                 datetime_utc = time.gmtime(current_ts)
                 date_group = self.get_date_group(datetime_utc, period)
                 current_cpu_usage = round(100 * float(cdp[0])) / 100
                 current_mem_usage = round(100 * float(cdp[1])) / 100
                 current_consolidated_usage = round(
                     100 * float(cdp[2])) / 100
                 periodic_cpu_usage[date_group] += current_cpu_usage
                 periodic_mem_usage[date_group] += current_mem_usage
                 periodic_consolidated_usage[
                     date_group] += current_consolidated_usage
                 valid_rows[date_group] += 1
             except:
                 pass
     return periodic_cpu_usage, periodic_mem_usage, periodic_consolidated_usage, valid_rows
Example #20
0
    def dump_trends_data(self, period, step_in):
        end_ts_in = int(
            int(calendar.timegm(time.gmtime()) * step_in) / step_in)
        start_ts_in = int(end_ts_in - int(period))
        result = rrdtool.fetch(self.rrd_location, 'AVERAGE', '-r',
                               str(step_in), '-s', str(start_ts_in), '-e',
                               str(end_ts_in))
        res_usage = collections.defaultdict(list)
        sum_res_usage = collections.defaultdict(lambda: 0.0)
        cumulated_cost = 0.0
        start_ts_out, _, step = result[0]
        current_ts = start_ts_out
        for _, cdp in enumerate(result[2]):
            current_ts += step
            if len(cdp) == 4:
                try:
                    datetime_utc = time.gmtime(current_ts)
                    current_cpu_usage = round(100 * float(cdp[0])) / 100
                    current_mem_usage = round(100 * float(cdp[1])) / 100
                    current_consolidated_usage = round(
                        100 * float(cdp[2])) / 100
                    cumulated_cost += round(100 * float(cdp[3])) / 100
                    datetime_utc_json = time.strftime('%Y-%m-%dT%H:%M:%SZ',
                                                      datetime_utc)
                    res_usage[ResUsageType.CPU].append(
                        '{"name":"%s","dateUTC":"%s","usage":%f}' %
                        (self.dbname, datetime_utc_json, current_cpu_usage))
                    res_usage[ResUsageType.MEMORY].append(
                        '{"name":"%s","dateUTC":"%s","usage":%f}' %
                        (self.dbname, datetime_utc_json, current_mem_usage))
                    res_usage[ResUsageType.CONSOLIDATED].append(
                        '{"name":"%s","dateUTC":"%s","usage":%s}' %
                        (self.dbname, datetime_utc_json,
                         current_consolidated_usage))
                    res_usage[ResUsageType.CUMULATED_COST].append(
                        '{"name":"%s", "dateUTC":"%s","usage":%s}' %
                        (self.dbname, datetime_utc_json, cumulated_cost))
                    sum_res_usage[ResUsageType.CPU] += current_cpu_usage
                    sum_res_usage[ResUsageType.MEMORY] += current_mem_usage
                    sum_res_usage[ResUsageType.
                                  CONSOLIDATED] += current_consolidated_usage
                    sum_res_usage[
                        ResUsageType.CUMULATED_COST] += cumulated_cost
                except:
                    pass

        if sum_res_usage[ResUsageType.CPU] > 0.0 and sum_res_usage[
                ResUsageType.MEMORY] > 0.0:
            PROMETHEUS_HOURLY_USAGE_EXPORTER.labels(
                self.dbname, ResUsageType.CPU.name).set(current_cpu_usage)
            PROMETHEUS_HOURLY_USAGE_EXPORTER.labels(
                self.dbname, ResUsageType.MEMORY.name).set(current_mem_usage)
            PROMETHEUS_HOURLY_USAGE_EXPORTER.labels(
                self.dbname,
                ResUsageType.CONSOLIDATED.name).set(current_consolidated_usage)
            return (','.join(res_usage[ResUsageType.CPU]),
                    ','.join(res_usage[ResUsageType.MEMORY]),
                    ','.join(res_usage[ResUsageType.CONSOLIDATED]),
                    ','.join(res_usage[ResUsageType.CUMULATED_COST]))
        return '', '', '', ''
Example #21
0
def fetch_datapoints(rrd_file, periods, datasource):
    """Fetches the datapoints from the rrd-file"""
    datapoints = []
    for period in periods:
        (time_info, columns, rows) = rrdtool.fetch(rrd_file, 'AVERAGE', '-s',
                                                   str(period.start_time),
                                                   '-e', str(period.end_time))

        # rows.pop()  # The last value may be NaN based on when last update was
        column_index = list(columns).index(datasource.name)
        values = [row[column_index] for row in rows]
        if datasource.type in ['COUNTER', 'DERIVE']:
            values = calculate_absolute_from_rate(values, time_info[-1])

        timestamps = list(range(*time_info))

        if period.step and period.step < time_info[-1]:
            num_missing_values = time_info[-1] / period.step
            values = insert_missing_values(values, num_missing_values - 1)
            timestamps = list(range(time_info[0], time_info[1], period.step))

        datapoints.extend(p for p in zip(timestamps, values)
                          if p[1] is not None)

    return datapoints
Example #22
0
def get_rrd_line_value(rrd,start,end,line):
    import rrdtool
    current = rrdtool.fetch(rrd, "-s", str(start) + "-1", "-e", str(end), "LAST")
    for i in range(0,len(current[1])):
        if current[1][i] == line:
            break
    return map(lambda x:x[i],current[2])
Example #23
0
def getStoreData():
    fi = open('testData.txt','r')
    for line in fi:
        #generate data
        VAR1 = float(line)
        #see what we generated to determine if rrd data is correct
        print dataName1 + ': %f\n' % VAR1
        #update rrd 
        ret = rrdtool.update(name, "%d:%f" %(time.time(),VAR1))
        
        #Error message. Error if time stamp is wrong
        if ret:
            print 'ERROR: ' + rrdtool.error()
        
        #set endtime for fetching data from rrd
        lastTimeStamp = rrdtool.last(name)
        
        #send data from RRDTool to server
        #CURRENTLY SENDS ALL CONTENTS OF RRD - to change replace startTime with 1 second ago...
        sendData(rrdtool.fetch(name, 'LAST', '--start', "%d" % (startTime) , '--end', '%d' % (lastTimeStamp)))
        
        
        #let me know if everything worked
        print 'client sleeping...\n'
        
        #sleep for 1 second
        time.sleep(1)
    
    fi.close()
Example #24
0
def get_widget_alert_times(widget,start,end):
    result = {"error":0,"warning":0,"ok":0,"all":0}
    field_def = {}
    if widget.data_def:
        try:
            current = rrdtool.fetch(widget.rrd.path(), "-s", str(start) + "-1", "-e", str(end), "LAST")
            data_def = eval(widget.data_def.replace("\n", "").replace("\r", ""))
            ds = current[1]
            for i in range(0, len(ds)):
                if data_def.has_key(ds[i]):
                    field_def[i] = data_def[ds[i]]
            flag = False; flat = True
            for l in current[2]:
                tmp = False
                for x,y in field_def.items():
                    if eval(str(l[x])+y[2]):
                        tmp = True
                if tmp:
                    flag = True
                else:
                    flag = False
                    flat = True
                if flag  and flat:
                    result["error"] += 1
                    flat = False
        except:
            pass
    return result
Example #25
0
def parse(host, service, start='-1h', interval='60'):
    datapath = path.join(PERF_PATH, host)
    xml_path = path.join(datapath, '%s.xml' % service)
    rrd_path = path.join(datapath, '%s.rrd' % service)

    tree = ET.parse(xml_path)
    root = tree.getroot()
    labels = [s.find('LABEL').text for s in root.findall('DATASOURCE')]

    rrd_info = rrdtool.fetch(rrd_path, 'AVERAGE', '--start', start, '-r',
                             interval)
    start, end, resolution = rrd_info[0]
    rrd_rows = rrd_info[2]
    data = {}
    for label_index, label in enumerate(
            labels):  # we want a separate graph for every label
        timestamp = start
        result = []
        for row in rrd_rows:
            x = timestamp
            y = row[label_index]
            # if y is None: y = float('nan')
            result.append({'x': x, 'y': y})
            timestamp += resolution
        elements_to_avg = len(
            result) / 120  # ex. if there are 120 elements, average every 2
        #new_result = [{'y': sum([el['y'] for el in grouped]) / elements_to_avg, 'x': grouped[0]['x']} \
        #for grouped in zip(*[iter(result)] * elements_to_avg)]
        data[label] = result
    return data
Example #26
0
def monthMaxAvg(f):
    r = rrdtool.fetch(f, 'MAX', '-s', '-30d')
    legitValues = [i[0] for i in r[2] if not i[0] is None]
    inp = sum(legitValues) / len(legitValues)
    legitValues = [i[1] for i in r[2] if not i[1] is None]
    out = sum(legitValues) / len(legitValues)
    return (inp, out)
Example #27
0
def getLoadWeek(server_id):
    ''' get server current load and one week load'''
    import rrdtool
    try:
        rrdPath = s_service.objects.get(server__id = server_id,category__title__contains = "Perfmon").rrd.path()
    except:
        return "</td><td>"
    
    info = rrdtool.info(rrdPath)
    last_update = str(info["last_update"])
    current = rrdtool.fetch(rrdPath, "-s", last_update + "-604801", "-e", last_update, "LAST")
    load = "";ls = [];loadAvg = ""
    for i in range(len(current[1])):
        if current[1][i] == "load":
            for l in current[2][-10:]:
                if l[i] != None and l[i] != "nan":
                    load = l[i]
            for l in current[2]:
                try:
                    ls.append(float(l[i]))
                except:
                    pass
    if load == "nan":load = ""
    try:
        load = int(load)
    except:
        pass
    if ls != []:
        loadAvg = str(sum(ls)/len(ls))[:5]
    return str(load)+"</td><td>"+loadAvg
Example #28
0
def rrd_get_stddev(rrdFile, ds, cf, start_time, end_time):
    """ from RRDhelper.pm
    """
    (realvals, names, array) = rrdtool.fetch(\
        str(rrdFile),
        str(cf),
        "--start=%d" % start_time,
        "--end=%d" % end_time)
    idx = list(names).index(ds)
    mysum = 0
    mysqsum = 0
    mycnt = 0
    for line in array:
        val = line[idx]
        if val is not None:
            mycnt += 1
            mysum += val
            mysqsum += val**2
    if mycnt > 0:
        mysqdev = 1.0 / mycnt * (mysqsum - mysum**2 / mycnt)
        if mysqdev < 0.0:
            return 0.0
        else:
            return sqrt(mysqdev)
    else:
        return None
Example #29
0
    def get(self, host, metric):
        if host not in self.application.plugin_info:
            self.write('404')
            self.finish()
        plugins = self.application.plugin_info[host].get('plugins', {})
        if metric not in plugins:
            self.write('404')
            self.finish()
        instances = plugins[metric]
        cleaned_data = {
            'units': UNIT_MAP.get(metric),
        }
        instance_data = {}
        offset = self.get_argument('range', default='24hr')
        if offset not in DATE_RANGE_MAP:
            raise HTTPError(400, 'Invalid date range')
        date_range = DATE_RANGE_MAP[offset]

        if metric.startswith('cpu-'):
            instances.remove('cpu-idle')
        if metric == 'df-local':
            instances.remove('df_complex-reserved')

        for instance in instances:
            load_file = str(
                os.path.join(self.application.settings['rrd_directory'], host,
                             metric, '%s.rrd' % instance))
            start = str(date_range['start'])
            res = str(date_range['resolution'])
            period, metrics, data = rrdtool.fetch(load_file, 'AVERAGE',
                                                  '--start', start,
                                                  '--resolution', res)
            start, end, resolution = period
            default = {
                'start': start,
                'end': end,
                'resolution': resolution,
                'timeline': []
            }

            if metric == 'load':
                metrics = (metrics[0], )
                data = ((i[0], ) for i in data)

            if len(metrics) == 1:
                key = instance
                instance_data[key] = default
            else:
                for name in metrics:
                    key = '%s-%s' % (instance, name)
                    instance_data[key] = default
            for item in data:
                for i, name in enumerate(metrics):
                    if len(metrics) == 1:
                        key = instance
                    else:
                        key = '%s-%s' % (instance, name)
                    instance_data[key]['timeline'].append(item[i])
        cleaned_data['instances'] = instance_data
        self.write(cleaned_data)
Example #30
0
 def underMinQuota(self, diffString="24h"):
     """ physical values over max in float 0..1
     """
     if not os.path.exists(self.getRrdFilename()):
         self.rrd_create()
         return None
     retFloat = 0.0
     inpPQ = self.getPQinpQuantity()
     (minQuantity, minString) = self.getMinQuantity()
     if minQuantity is None:
         return None
     convMinPQ = minQuantity / inpPQ
     convMinPQ.ounit("1/s")
     convMin = float(convMinPQ)
     currtime = time.time()
     rrdRet = rrdtool.fetch(\
         self.getRrdFilename(),
         "MIN",
         "--start=%d-%s" % (currtime, diffString),
         "--end=%d" % currtime
     )
     rrdTimeTup, rrdVarTup, rrdValList = rrdRet
     nbrPoints = 0
     nbrUnderMin = 0
     for vals in rrdValList:
         for dim in range(len(vals)):
             if vals[dim] is not None:
                 nbrPoints += 1
                 if vals[dim] < convMin:
                     nbrUnderMin += 1
     if nbrPoints > 0:
         retFloat = float(nbrUnderMin) / float(nbrPoints)
     return retFloat
Example #31
0
def get_metric():
    node = request.args.get('node', 0, type=str)

    # list of nodes avabilable from ganglia
    nodelist = listdir('/var/lib/ganglia/rrds/unspecified')

    for n in nodelist:
        if n != '__SummaryInfo__':
            nodes[n.split('.')[0]] = dict()
            nodes[n.split('.')[0]]['domain'] = n.split('.')[1]
            nodes[node.split('.')[0]]['f'] = dict()
            nodes[node.split('.')[0]]['s'] = dict()

    # use rrdtool to get load of server
    res = 600 # 5 minutes
    t = int(time.mktime(time.localtime(time.time())))

    # need to move things out of 'unspecified" at some point...
    metrics = listdir('/var/lib/ganglia/rrds/unspecified/' + node + '.' + nodes[node]['domain'])
    for metric in metrics:
        rawdata = fetch('/var/lib/ganglia/rrds/unspecified/'  
                        + node + '.' + nodes[node]['domain'] + '/' 
                        + metric, 'AVERAGE', '-r ' + str(res), 
                        '-s e-30m', '-e ' + str(t/res*res))[2]
            
        # find maximum
        m = 0.0
        for datapoint in rawdata:
            if isinstance(datapoint[0], float):
                if datapoint[0] > m:
                    m = datapoint[0]

        if m == 0:
            ratio = 1
        else:    
            ratio = graph_height/m

        data = list()                
        for i, datapoint in enumerate(rawdata):
            if isinstance(datapoint[0], float) and i < 6: # Maybe remove size limit...
                value = datapoint[0] * ratio
                point = value
                if '.' in str(value):
                    point = str(value).split('.')[0]# + "." + str(value).split('.')[1][:2] # round to 2 decimal places
                data.append([str(point), datapoint[0]]) # append the normalised value for display plus the actual value for diagnosis
            if isinstance(datapoint[0], str):
                data.append(datapoint[0])

        # TODO Handle string metrics here
        if isinstance(rawdata[0][0], float):
            nodes[node]['f'][metric.split('.')[0]] = data
        if isinstance(rawdata[0][0], str):
            nodes[node]['s'][metric.split('.')[0]] = data


    instances = [ instance for instance in session.query(sqlinstances) if instance.deleted == False]
    for instance in instances:
        print instance.host

    return jsonify(metrics=nodes[node])            
Example #32
0
    def get_cpu_stats(self, start=600, step=60):
        now = int(time.time())
        path = self._make_path('cpu')

        if step >= 1440:
            cf = 'AVERAGE'
            step = 1440
        else:
            cf = 'LAST'

        start_ts = now - (now % step)
        negated = -(start - (start % step))

        rdata = None
        try:
            rdata = rrdtool.fetch(str(path), cf, '-s', str(negated), '-e', str(start_ts), '-r', str(step))
        except:
            return {}

        data = rdata[2]

        step = rdata[0][2]
        begin_ts = rdata[0][0]
        set = []
        for i in data:
            if i[0] is None: continue
            set.append([begin_ts * 1000, i[0] / 10])
            begin_ts += step

        return {'label': 'CPU usage (%)', 'data': set, 'color': "#336633"}
Example #33
0
    def _update(self, rrd_path, v):
      try:
        res = rrdtool.fetch(rrd_path, "AVERAGE", "--start", "now-60", "--end", "now");
#        log.debug("-------------------->update")
      except:  
         traceback.print_exc()
         return 0
          
      if(res):
          start_time = res[0][0];
          end_time = res[0][1];  
          step = res[0][2];
          
          try:
#            if(res[2][0][0] == None):
#              rrdtool.update(rrd_path, str(start_time) + ":" + str(v));
#            else:
#              rrdtool.update(rrd_path, str(end_time) + ":" + str(v)); 
             rrdtool.update(rrd_path, str(end_time) + ":" + str(v)); 
          except:
             t = end_time + 60;
             try:
               rrdtool.update(rrd_path, str(t) + ":" + str(v));
             except:
#               log.debug("Update Error!! datasource_name: %s ", rrd_path);
               traceback.print_exc()
                 
            #log.debug("Update Error!! datasource_name: %s servicename: %s time: %d value: %d", dbname, servicename, t, v);
               return 0
             
      return 1;       
def getwireless(apports, path, check):
    if check == "in":
        num = 0
    elif check == "out":
        num = 1
    end_str = os.path.basename(path).split('-')
    name_str = '_'.join(end_str[0:3]) + end_str[3] + ":00"
    end_str = '/'.join(end_str[0:3]) + " " + end_str[3] + ":00"
    first = True
    wirelessin = []
    for p in sorted(apports):
        rrddata = rrdtool.fetch(os.path.join(path,
                                             p), "AVERAGE", "--resolution",
                                "60s", "--start", "end-10h", "--end", end_str)
        start, end, step = rrddata[0]
        datanames = rrddata[1]
        values = rrddata[2]
        index = 0
        for t in values:
            if not first:
                if t[num] != None:
                    wirelessin[index] = wirelessin[index] + t[num]
                else:
                    wirelessin[index] = wirelessin[index] + 0
            else:
                if t[num] != None:
                    wirelessin.append(t[num])
                else:
                    wirelessin.append(0)
            index = index + 1
        first = False
    return wirelessin
Example #35
0
    def _fetch_data(self, rrdObject, startTime, endTime):
        """ Fetch data from RRD archive for given period of time.

        :param rrdObject: RRD
        :type rrdObject: String
        :type startTime: int
        :type endTime: int
        :return: Dictionary with data from RRD archive.
        :rtype: Dict
        """

        # print rrdObject

        if not path.exists(rrdObject):
            raise Exception("File not exists: %s" % rrdObject)

        # print "%s - %s" % (startTime, endTime)

        rrd_data = None

        try:
            rrd_data = rrdtool.fetch(str(rrdObject), "AVERAGE", "--start", str(startTime), "--end", str(endTime))
        except Exception as err:
            LOG.error("fetch exc %s | %s", err, rrdObject)

        return RrdData(info=rrd_data[0], additional=rrd_data[1], series=rrd_data[2])
Example #36
0
 def _fetch_rrd_data(self):
     '''
     Fetch input, output data from the rrd file
     '''
     logging.debug('RRD_PATH: %s',self.rrdfile)
     data = rrdtool.fetch(self.rrdfile, 'AVERAGE', '-r', '300', '-s','-300')
     return data
Example #37
0
  def fetchLastData(self,rrdfile,rrdfield):
    now = datetime.now()
    past = now - timedelta(minutes=10)
    startTime = (past.strftime("%Y%m%d %H:%M"))
    endTime = (now.strftime("%Y%m%d %H:%M"))

    cmdString = [rrdfile , 'AVERAGE' , '-s ' + startTime , '-e ' + endTime]
    
    (timeInfo, columns, rows) = rrdtool.fetch(cmdString)

    colIndex = list(columns).index(rrdfield)
    rows.pop() #chop off the latest value because RRD returns crazy last values sometimes
    
    #print "rrdfile=", rrdfile," , startTime=",startTime," , endTime=",endTime, " , time=", timeInfo, " , rows=", len(rows)
    
    lastTime = timeInfo[1]
    lastRow = rows.pop()
    if lastRow is None:
        lastValue = 0
    else:
        value = lastRow[colIndex]
        if value is None:
            lastValue = 0
        else:
            lastValue = value
        
    return lastTime, lastValue
Example #38
0
  def fetch(self,rrdfile,rrdfield,startTime, endTime):
    startString = time.strftime("%H:%M_%Y%m%d+%Ss", time.localtime(startTime))
    endString = time.strftime("%H:%M_%Y%m%d+%Ss", time.localtime(endTime))

#    if settings.FLUSHRRDCACHED:
#      rrdtool.flushcached(self.fs_path, '--daemon', settings.FLUSHRRDCACHED)
    cmdString = [rrdfile , 'AVERAGE' , '-s ' + str(startTime) , '-e ' + str(endTime)]
    
    #print cmdString

    (timeInfo, columns, rows) = rrdtool.fetch(cmdString)
#    (timeInfo, columns, rows) = rrdtool.fetch(self.datafile_name,'AVERAGE','-s ' + str(startTime),'-e ' + str(endTime))
    colIndex = list(columns).index(rrdfield)
    rows.pop() #chop off the latest value because RRD returns crazy last values sometimes
    
    #values = (str(row[colIndex]) for row in rows)

    values = []
    for row in rows:
        value = row[colIndex]
        if value is None:
            values.append(0)
        else:
            values.append(value)

    #print "rows_count=",len(rows) 

    #print timeInfo
    #print columns
    #log("rows", rows)
    #log("values",values)

    #results = rrdtool.fetch(rrdfile,'AVERAGE','-s ' + str(startTime),'-e ' + str(endTime))
    
    return (timeInfo, values)
Example #39
0
def rrd_export(options):
    """Exports all RRD data.

    Raises:
      OSError, if output directory cannot be created.
      IOError, if output file cannot be created or written.
    """
    open_func = open
    if options.compress:
        open_func = gzip.open
        options.output += '.gz'
    make_output_dirs(options.output)
    scale = options.step if options.counts else 1
    with contextlib.closing(open_func(options.output, 'w')) as fd_output:
        for filename in get_rrd_files(options.rrddir_prefix):
            time_range, value_names, data = rrdtool.fetch(
                filename, 'AVERAGE', '--start', str(options.ts_start), '--end',
                str(options.ts_end))
            # W0142 is the use of "* magic". These are legitimate use-cases.
            # 1) time_range is a 3-tuple (start, end, step): i.e. arguments to range.
            # 2) data is a list of tuples, which are transposed by zip.
            #    i.e. [(a,), (b,), ...] -> [(a,b,...)]
            # pylint: disable=W0142
            timestamps = range(*time_range)
            values = zip(*data)
            # pylint: enable=W0142

            for i, value_name in enumerate(value_names):
                hostname, experiment, metric = get_canonical_names(
                    filename, value_name, options)
                if metric is None or experiment in options.ignored_experiments:
                    continue
                record = get_json_record(hostname, experiment, metric,
                                         timestamps, values[i], scale)
                write_json_record(fd_output, record, options.pretty_json)
Example #40
0
def fetchDataFromRRDDatabase(databaseName, startTime, endTime, interval,
                             graphicType):
    """
        @summary : Returns the stored data from a database based on the desired interval.
        
        @retutn : Output obtained from the rrd fetch command.
    """

    resolution = int(interval * 60)

    if endTime > (time.time()) / 3600 * 3600:
        endTime = int(time.time(
        )) / 3600 * 3600  #top of the hour...databases are never any newer

    # round end time to time closest to desired resolution EX : closest 10 minutes,hour,day,etc...
    endTime = int(endTime) / int(resolution) * int(resolution)

    try:
        output = rrdtool.fetch(databaseName, 'AVERAGE', '-r', str(resolution),
                               '-s', "%s" % (startTime), '-e',
                               '%s' % (endTime))
        #print databaseName, 'AVERAGE', '-r', str(resolution), '-s', "%s" %(startTime), '-e', '%s' %(endTime)

    except:
        pass
        #----------------------------------------------------------- output = ""
        #------------- print "Error.Could not fetch data from %s." %databaseName
        #------------------------------------------- print "Program terminated."
        #------------------------------------------------------------ sys.exit()

    return output
Example #41
0
def writeJson(consolidation,resolution):
   now=time.time()
   json_dict={}
   res=resolution
   end=int((now)/res)*res
   start=end-res
   #print now,start,end
   filename=CHARTPATH+consolidation+"values.json"
   try:
	ret = rrdtool.fetch('meteo.rrd',consolidation,"--start",str(start),"--end",str(end),"--resolution",str(res));
	if ret:
	 	print rrdtool.error() 
	#print ret
	mags=ret[1]
	values=ret[2][0]
	i=0
	for mag in mags:
		#print mag,values[i]
		json_dict[mag]=int(values[i]*100)/100.
		i=i++1	
	#print consolidation,json_dict
	x = simplejson.dumps(json_dict)
        fi=open(filename,"w")
	fi.write(x)
	fi.close()
   except:
	os.remove(filename)
Example #42
0
File: views.py Project: mv/nagcat
def are_graphable(host, service_list):
    """Flags services of host that are graphable (has state or trend)"""
    rra_path = settings.RRA_PATH
    for service in service_list:
        coilfile = '%s%s/%s.coil' % \
            (rra_path, host, service['service_description'])
        rrd = '%s%s/%s.rrd' % (rra_path, host, service['service_description'])
        if os.path.exists(coilfile) and os.path.exists(rrd):
            coilstring = open(coilfile).read()
            coilstruct = coil.parse(coilstring)
            query = coilstruct.get('query')

            # rrdtool hates unicode strings, and Django gives us one,
            # so convert to ascii
            rrdslice = rrdtool.fetch(str(rrd),
                        '--start', '0',
                        '--end', '10',
                        '--resolution', '1',
                        'AVERAGE')

            try:
                rrdslice[1].index('_state')
                service['is_graphable'] = True
            except ValueError:
                for key in query.keys():
                    val = query.get(key)
                    if type() == type(query) and 'trend' in val:
                        service['is_graphable'] = True
                        break
                service['is_graphable'] = False

        else:
            service['is_graphable'] = False
Example #43
0
def get_rrdseries(path, start_time=None, end_time=None, CF="AVERAGE"):
    _rrdargs = []
    if start_time:
        int(start_time)
        _rrdargs.extend(["-s", "{}".format(start_time)])
    if end_time:
        int(end_time)
        _rrdargs.extend(["-e", "{}".format(end_time)])

    _series = []
    _path = os.path.normpath(path)
    if not os.path.isabs(_path):
        _path = os.path.join(RRDPATH, _path)
    _inum = os.stat(_path).st_ino
    try:
        _meta, _name, _data = rrdtool.fetch(str(_path), str(CF), *_rrdargs)
        istart, iend, istep = _meta
        for i, n in enumerate(_name):
            _series.append({
                'index': '{}.{}'.format(_inum, i),
                'name': n,
                'sequence': []
            })
        for i, vals in enumerate(_data):
            _t = istart + i * istep
            for j in range(len(_name)):
                if vals[j] != None:
                    assert (_series[j].get('name') == _name[j])
                    _pair = {"t": _t, "v": vals[j]}
                    _series[j]['sequence'].append(_pair)
    except Exception as excn:
        print str(excn)
        pass
    return _series
Example #44
0
    def dump_histogram_data(self, period, step_in=None):
        if step_in is not None:
            step = int(step_in)
        else:
            step = int(RrdPeriod.PERIOD_1_HOUR_SEC)

        rrd_end_ts = int(int(calendar.timegm(time.gmtime()) * step) / step)
        rrd_start_ts = int(rrd_end_ts - int(period))
        rrd_result = rrdtool.fetch(self.rrd_location, 'AVERAGE', '-r',
                                   str(step), '-s', str(rrd_start_ts), '-e',
                                   str(rrd_end_ts))
        rrd_start_ts_out, _, step = rrd_result[0]
        rrd_current_ts = rrd_start_ts_out
        periodic_cpu_usage = collections.defaultdict(lambda: 0.0)
        periodic_mem_usage = collections.defaultdict(lambda: 0.0)
        for _, cdp in enumerate(rrd_result[2]):
            rrd_current_ts += step
            if len(cdp) == 2:
                try:
                    rrd_cdp_gmtime = time.gmtime(rrd_current_ts)
                    date_group = self.get_date_group(rrd_cdp_gmtime, period)
                    current_cpu_usage = round(100 * float(
                        cdp[0]), KOA_CONFIG.db_round_decimals) / 100
                    current_mem_usage = round(100 * float(
                        cdp[1]), KOA_CONFIG.db_round_decimals) / 100
                    periodic_cpu_usage[date_group] += current_cpu_usage
                    periodic_mem_usage[date_group] += current_mem_usage
                except:
                    pass
        return periodic_cpu_usage, periodic_mem_usage
Example #45
0
File: views.py Project: mv/nagcat
def is_graphable(host, service):
    """Checks if service of host is graphable (has state or trend)"""
    rra_path = settings.RRA_PATH
    coilfile = '%s%s/%s.coil' % (rra_path, host, service)
    rrd = '%s%s/%s.rrd' % (rra_path, host, service)

    if os.path.exists(coilfile) and os.path.exists(rrd):
        coilstring = open(coilfile).read()
        coilstruct = coil.parse(coilstring)
        query = coilstruct.get('query')

        # rrdtool hates unicode strings, and Django gives us one,
        # so convert to ascii
        rrdslice = rrdtool.fetch(str(rrd),
                    '--start', '0',
                    '--end', '10',
                    '--resolution', '1',
                    'AVERAGE')

        try:
            rrdslice[1].index('_state')
            return True
        except ValueError:
            for key in query.keys():
                val = query.get(key)
                if type() == type(query) and 'trend' in val:
                    return True
            return False
    return False
Example #46
0
def GetAnalogDataFromDatabase(start,end,resolution): # resolution in seconds
    # align start and end with fetchable time
    start_time = int(time.mktime(start.timetuple())/resolution)*resolution
    end_time = int(time.mktime(end.timetuple())/resolution)*resolution
#    t1 = time.time()
#    print('fetching...')
    result = rrdtool.fetch(config.database_path_analog, "AVERAGE","-r",
                           str(resolution),"--start",str(start_time),"--end",str(end_time)) 
#    t2 = time.time()
#    print('time to fetch: {}'.format(t2-t1))
    start_r, end_r, step_r = result[0]
    data = np.round(np.array(result[2],dtype=np.float64)*1000)/1000
#    t3 = time.time()
#    print('time to round: {}'.format(t3-t2))
    time_s = np.linspace(start_r,end_r-1,len(result[2]))
    
#    t4 = time.time()    
    index_all = np.where(np.invert(np.isnan(data[:,0])))
    if len(index_all[0]) == 0: #no useful data in timerange
        return np.zeros([0,2]),[]
    else:
        first = index_all[0][0]
        last = index_all[0][-1]
        data_trim = data[first:last+1,:]
        time_s_trim = time_s[first:last+1]
#        t5 = time.time()
#        print('time to cut: {}'.format(t5-t4))    
        time_ = [datetime.fromtimestamp(t) for t in time_s_trim] 
#        print('time to make timestamp: {}'.format(time.time()-t5))
        return data_trim, time_
Example #47
0
    def _read_rrd(self, rrd_path, hostname, device_name):
        """Main metric fetching method.

        """
        import rrdtool
        metric_count = 0

        try:
            info = rrdtool.info(rrd_path)
        except Exception:
            # Unable to read RRD file, ignore it
            self.log.exception("Unable to read RRD file at %s" % rrd_path)
            return metric_count

        # Find the consolidation functions for the RRD metrics
        c_funcs = set([v for k, v in info.items() if k.endswith('.cf')])

        for c in list(c_funcs):
            last_ts_key = '%s.%s' % (rrd_path, c)
            if last_ts_key not in self.last_ts:
                self.last_ts[last_ts_key] = time.time()
                continue

            start = self.last_ts[last_ts_key]
            last_ts = start

            try:
                fetched = rrdtool.fetch(rrd_path, c, '--start', str(start))
            except rrdtool.error:
                # Start time was out of range, skip this RRD
                self.log.warn("Time %s out of range for %s" %
                              (rrd_path, start))
                return metric_count

            # Extract the data
            (start_ts, end_ts, interval) = fetched[0]
            metric_names = fetched[1]
            points = fetched[2]
            for k, m_name in enumerate(metric_names):
                m_name = self._format_metric_name(m_name, c)
                for i, p in enumerate(points):
                    ts = start_ts + (i * interval)

                    if p[k] is None:
                        continue

                    # Save this metric as a gauge
                    val = self._transform_metric(m_name, p[k])
                    self.gauge(m_name,
                               val,
                               hostname=hostname,
                               device_name=device_name,
                               timestamp=ts,
                               dimensions=self.dimensions)
                    metric_count += 1
                    last_ts = (ts + interval)

            # Update the last timestamp based on the last valid metric
            self.last_ts[last_ts_key] = last_ts
        return metric_count
Example #48
0
def get_rrd_stats(node_id, table_id, clean_flow_id):
    filename = str("%s/%s/%s/%s.rrd" % (rrd_dir,
                                        node_id,
                                        table_id,
                                        clean_flow_id))

    diff = 60 * 120 # 30 min
    #diff2 = 60 * 15
    end = int(time.time())
    begin = end - diff

    data = []
    if os.path.isfile(filename):
        result = rrdtool.fetch(filename, 'AVERAGE', '--start', str(begin), '--end', str(end), '-r', str(10))
        keys = result[1]
        values = result[2]
        begin = result[0][0]
        end = result[0][1]
        step = result[0][2]
        ts = begin
        duration = end - begin
        for value in values:
            date = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
            data.append({'date': date,
                         'bytes': 0 if value[0] is None else value[0],
                         'packets': 0 if value[1] is None else value[1]})
            ts += step
    return data
Example #49
0
def compare_rrd_data_with_threshold(rrd_file_name,thresholdcontext):
     
    start_time = 'now - ' + str(VM_UTIL_20_DAYS*24*60*60)
    ''' start_time = 'now - ' + str(60*60) '''
    end_time = 'now'
    rrd_file = get_rrd_file(rrd_file_name)

    cpu_data = []
    mem_data = []
    dskr_data = []
    dskw_data = []
    nwr_data = []
    nww_data = []

    rrd_logger.info("inside compare_rrd_data_with_threshold function rrd file is :"+str(rrd_file_name))
    if os.path.exists(rrd_file):
        rrd_ret =rrdtool.fetch(rrd_file, 'MIN', '--start', start_time, '--end', end_time)

        fld_info = rrd_ret[1]
        data_info = rrd_ret[2]
        cpu_idx = fld_info.index('cpu')
        mem_idx = fld_info.index('ram')
        dskr_idx = fld_info.index('dr')
        dskw_idx = fld_info.index('dw')
        nwr_idx = fld_info.index('tx')
        nww_idx = fld_info.index('rx')

        cpu_threshold='N'
        read_threshold='N'
        write_threshold='N'
        CPUNoneIdentifier='Y'
        NWRNoneIdentifier='Y'
        NWWNoneIdentifier='Y'

        for row in data_info:
            if (row[cpu_idx] != None) : 
                CPUNoneIdentifier='N'
                if int(row[cpu_idx]) > int(thresholdcontext['CPUThreshold']) : cpu_threshold='Y'
            
            if (row[nwr_idx] != None) : 
                NWRNoneIdentifier='N'
                if int(row[nwr_idx]) > int(thresholdcontext['ReadThreshold']) : read_threshold='Y' 

            if (row[nww_idx] != None) : 
                NWWNoneIdentifier='N'
                if int(row[nww_idx]) > int(thresholdcontext['WriteThreshold']) : write_threshold='Y'
		
            if (cpu_threshold=='Y' or read_threshold=='Y' or write_threshold=='Y') :
                rrd_logger.info("Info about the VM:%s,row[cpu_idx]:%s,row[nwr_idx]:%s,row[nww_idx]:%s" %(rrd_file_name,row[cpu_idx],row[nwr_idx],row[nww_idx])) 
                rrd_logger.info("Threshold is reached once.. VM:"+str(rrd_file_name)+" is in use") 
                return False
           
        ## If only none values are read from the rrd .. do not send a warning email                  
        if(CPUNoneIdentifier=='N' and  NWRNoneIdentifier=='N' and NWWNoneIdentifier =='N'):
            rrd_logger.info("Returning true to send warning email as threshold is never reached for VM:%s" %(rrd_file_name) )
            return True
        else:
            rrd_logger.info("RRD capturing is not correct... returning all null values only for VM:%s" %(rrd_file_name) )
            return False 
Example #50
0
def header(rrd_file):
    """
    Returns a tuple of headers in the rrd file
    rrd_file - path to rrd file.
    """
    rrd_file = str(rrd_file)
    header_index = 1
    return rrdtool.fetch(rrd_file, 'MAX', '-s end-1min')[header_index]
def draw_cpu_bar(hostname):
    fname = os.path.join(RRD_PREFIX, os.path.basename(hostname), "cpu", "virt_cpu_total.rrd")

    try:
        values = rrdtool.fetch(fname, "AVERAGE")[2][-20:]
    except rrdtool.error, e:
        #raise InternalError(str(e))
        values = [(0.0, )]
Example #52
0
def get_bandwidth():
    fn = "/var/lib/pnp4nagios/mesonet/eth0.rrd"
    
    ts = rrdtool.last(fn)
    data = rrdtool.fetch(fn,
                    "AVERAGE", "-s", str(ts - 300), "-e", str(ts))
    samples = data[2]
    j['stats']['bandwidth'] = samples[-2][2]
Example #53
0
def show_widget_header(widget):
    rrd_path = widget.rrd.path()
    info = rrdtool.info(rrd_path)
    last_update = str(info["last_update"])

    current = rrdtool.fetch(rrd_path, "-s", last_update, "-e", "s+1", "LAST")
    titles = map(lambda x: x.replace("_", " "), current[1])
    return "<th>" + "</th><th>".join(titles) + "</th>"