示例#1
0
    def info(self, context, target=None):
        # Figure out the target.
        if target == None:
            target = socket.getfqdn()
        rrdpath = os.path.join(FLAGS.canary_rrdpath, target)

        # Grab available metrics.
        available = glob.glob(os.path.join(rrdpath, "*/*.rrd"))
        metrics = {}

        for filename in available:
            # NOTE: Not sure quite why, but it seems like
            # the rrdtool commands below barf unless they
            # this happens -- maybe barfing on unicode?
            filename = str(filename)

            m = re.match("^%s/([^\/-]+)(-([^\/]+))?/([^\.]+)\.rrd$" % rrdpath, filename)
            if m:
                plugin = m.group(1)
                unit = m.group(3)
                key = m.group(4)

                # NOTE: At this point we construct a metric name that is
                # equivilant to how we deconstruct the name above. It's
                # important that these two operations are symmetric for
                # the sake of a user's sanity.
                if unit:
                    metric = "%s[%s].%s" % (plugin, unit, key)
                else:
                    metric = "%s.%s" % (plugin, key)
                if not (metric in metrics):
                    metrics[metric] = {}

                metrics[metric]["from_time"] = rrdtool.first(filename)
                metrics[metric]["to_time"] = rrdtool.last(filename)

                step = 1
                pdps = []
                cfs = []
                for (k, v) in rrdtool.info(filename).items():
                    if re.match("^step$", k):
                        step = int(v)
                        continue
                    elif re.match("^rra\[\d+\]\.pdp_per_row$", k):
                        pdps.append(int(v))
                        continue
                    elif re.match("^rra\[\d+\]\.cf", k):
                        cfs.append(v)
                        continue

                pdps = list(set(pdps))
                cfs = list(set(cfs))
                cfs.sort()
                resolutions = map(lambda x: step * x, pdps)
                resolutions.sort()

                metrics[metric]["cfs"] = cfs
                metrics[metric]["resolutions"] = resolutions

        return metrics
示例#2
0
文件: rrd.py 项目: vigilo/vigirrd
 def getStartTime(self):
     """Gets the timestamp of the first non-null entry in the RRD"""
     first =  rrdtool.first(self.filename)
     end = rrdtool.last(self.filename)
     cf = self.getPeriodCF(first)
     try:
         info , _ds_rrd , data = rrdtool.fetch(self.filename,
                 cf, "--start", str(first), "--end", str(end))
     except rrdtool.error:
         # Adjust for daylight saving times
         first = first - 3600
         end = end + 3600
         info , _ds_rrd , data = rrdtool.fetch(self.filename,
                 cf, "--start", str(first), "--end", str(end))
     #start_rrd = info[0]
     #end_rrd = info[1]
     step = info[2]
     for line in data:
         all_none = True
         for value in line:
             if value is not None:
                 all_none = False
         if not all_none:
             break
         first = first + step
     if first >= end:
         raise RRDError("The RRD file looks empty !")
     return first
示例#3
0
 def makeGraph(self,nameDB,nameGraph,title):
     rrdtool.graph(
         str(nameGraph+".png"),
         "--start",str(rrdtool.first(nameDB)),
         "--end",str(rrdtool.last(nameDB)),
         str("DEF:"+nameGraph+"="+nameDB+":"+'barrame'+':AVERAGE'),
         str("LINE1:"+nameGraph+"#ff0000")
     )
示例#4
0
文件: rrdtools.py 项目: pcuzner/ccsp
    def rrd_boundary(self, boundary):

        if boundary == 'first':
            obs_time = rrdtool.first(self.filename)
        else:
            obs_time = rrdtool.last(self.filename)

        dt = datetime.datetime.fromtimestamp(obs_time)
        return dt.replace(hour=0, minute=0, second=0, microsecond=0)
 def getCurrentTimeStamp( self, dataPointName ):
     """
     Return timestamp in rrd file for this moment
     """
     fileName = zenPath( 'perf', 'Daemons', self.monitor, 
         '%s_%s' % ( self.daemonName, dataPointName + '.rrd' ) )
     firstTime = rrdtool.first( fileName )
     timestamp = int((time.time() - firstTime) / self.cycleTime) * self.cycleTime + firstTime
     return timestamp
 def getCurrentTimeStamp(self, dataPointName):
     """
     Return timestamp in rrd file for this moment
     """
     fileName = zenPath('perf', 'Daemons', self.monitor,
                        '%s_%s' % (self.daemonName, dataPointName + '.rrd'))
     firstTime = rrdtool.first(fileName)
     timestamp = int((time.time() - firstTime) /
                     self.cycleTime) * self.cycleTime + firstTime
     return timestamp
    def __init__(self, path, name, date_start=None, date_end=None):

        self.rrd_name = name

        if date_start is None or (date_start > rrdtool.last(str(os.path.join(path, name)))):
            self.date_start = str(rrdtool.first(str(os.path.join(path, name))))
        else:
            self.date_start = str(date_start)

        if date_end is None:
            self.date_end = str(rrdtool.last(str(os.path.join(path, name))))
        else:
            self.date_end = str(date_end)

        if float(self.date_start) > float(self.date_end):
            raise Exception(
                "Invalid date_start={0} and date_end={1}".format(str(datetime.fromtimestamp(float(self.date_start))),
                                                                 str(datetime.fromtimestamp(float(self.date_end)))))

        self.user_hash = os.path.split(path)[1]

        self.user_path = os.path.join(
            self.get_first_part_path(path, 3),
            "users",
            "user",
            self.user_hash[:2],
            self.user_hash
        )

        self.uuid = self.get_uuid_from_file(self.user_path)
        self.age = 0
        self.sn = self.get_machine_sn_from_file(self.user_path)
        self.school = "unkown"

        log.debug('*******************************************')
        log.debug('     creating a RRD instance               ')
        log.debug('start: %s', str(datetime.fromtimestamp(float(self.date_start))))
        log.debug('end: %s', str(datetime.fromtimestamp(float(self.date_end))))
        log.debug('PATH: %s', path)
        log.debug('RRD NAME: %s', name)
        log.debug('\n')
        try:
            self.rrd = rrdtool.fetch(str(os.path.join(path, name)), 'AVERAGE', '-r 60', '-s ' + self.date_start,
                                     '-e ' + self.date_end)
        except Exception as e:
            raise Exception("rrdtool.fetch: {0}".format(e))
        log.debug('                   DS                       ')
        for item in self.DS.keys():
            idx = self.get_ds_index(item)
            if idx != -1:
                self.DS[item] = idx
            else:
                log.warning('DS %s not found in header of %s rrd file', item, name)
        log.debug('***********************************************')
示例#8
0
def mincuad(archivo, inicio, final, umbral):
    struct_fecha_inicio = time.strptime(inicio, '%Y-%m-%dT%H:%M:%S')
    iniciosegundos = int(time.mktime(struct_fecha_inicio))
    if (final == ""):
        finalsegundos = int(time.time())
    else:
        struct_fecha_final = time.strptime(final, '%Y-%m-%dT%H:%M:%S')
        finalsegundos = int(time.mktime(struct_fecha_final))

    # Obtencion de los datos de fetch
    [startStop, names, values] = rrdtool.fetch(archivo, 'AVERAGE', '-s',
                                               str(iniciosegundos), 'e',
                                               str(finalsegundos))

    inicioMuestras = rrdtool.first(archivo)
    steps = startStop[2]  #Da los steps
    div = obtenerDiv(iniciosegundos, inicioMuestras, steps)
    values = [v[0] for v in values if v[0] is not None]
    muestras = len(values)
    #Asignacion tipo funcion
    [x, y] = asignacion(values, div, inicioMuestras, steps)
    sumax = sum(x)
    sumay = sum(y)

    mediax = sumax / muestras
    mediay = sumay / muestras

    xmediax = obtenerValorMedia(x, mediax)
    ymediay = obtenerValorMedia(y, mediay)

    xmediaxymediay = producto(xmediax, ymediay)
    xmediaxcuadrado = producto(xmediax, xmediax)

    m = sum(xmediaxymediay) / sum(xmediaxcuadrado)
    b = mediay - (m * mediax)

    resultado = int((umbral - b) / m)

    fechaumbral = time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(resultado))

    tiempoInicialPredicciones = x[0]
    [predecidosx,
     predecidosy] = obtenerPredecidos(m, b, tiempoInicialPredicciones, umbral,
                                      steps)

    archivorrdpredecido = crearGrafica(predecidosy, tiempoInicialPredicciones,
                                       steps, names, archivo)
    actualizarGrafica(archivorrdpredecido, predecidosx, predecidosy)
    graficar(archivo, tiempoInicialPredicciones,
             predecidosx[len(predecidosx) - 1], umbral)

    return fechaumbral
def rrdfetch_wrapper(rrd_db, cf, resolution, start=-3600, end=-1):
    # type check
    assert type(rrd_db) is str
    assert type(cf) is str
    assert type(resolution) is int
    assert type(start) is int
    assert type(end) is int
    
    assert start < end
    #print 'start is %s, end is %s' % (start, end) 

    last = rrdtool.last(rrd_db)
    first = rrdtool.first(rrd_db)

    if start < 0:
        start = last + start
    if end < 0:
        end = last + end

    if start >= last or end <= first:
        return (start, end, resolution), []

    if start < first:
        start = first
    if end > last:
        end = last

    #print start, end
    # adjust the start/end time to match the result with the args
    #start = int(start) - int(resolution)
    #end = int(end) - int(resolution)    # comment this line if we are using the native
                                        # python binding(rrdtoolmodule.so)
    
    data = rrdtool.fetch(rrd_db, cf, "-r", str(resolution), "-e", str(end), \
                         "-s", str(start))
    #logger.debug(r'rrdtool.fetch("%s", "%s", "-r", "%s", "-e", "%s", "-s", "%s")' %\
            #(rrd_db, cf, str(resolution), str(end), str(start)))
    #print data[0]
    #return (data[0][0]+data[0][2], data[0][1], data[0][2]),\
           #data[2][0:-1]
    rstep = data[0][2]
    rstart = data[0][0] + rstep
    rend = data[0][1] - rstep   # minus rstep if we are using the native python binding
    rdata = []
    timestamp = rstart

    for d in data[2][0:-1]:
        rdata.append([timestamp, d[0]])
        timestamp += rstep

    return (rstart, rend, rstep), rdata
示例#10
0
    def get_list(self):
        """
        list vm stats with start and end times
        """
        f = os.listdir(settings.PATH_TO_RRD)

        rrds = {}
        for rrd in f:
            try:
                t = []
                t.append(rrdtool.first(settings.PATH_TO_RRD + rrd))
                t.append(rrdtool.last(settings.PATH_TO_RRD + rrd))
                rrds.update({os.path.splitext(rrd)[0]: t})
            except Exception, e:
                log.error(0, 'stat_error %s %s' % (rrd, e))
示例#11
0
def get_duration_points(rrd_file, graph_period):
    start = rrdtool.first(rrd_file, '--rraindex', graph_period)
    end = rrdtool.last(rrd_file)

    # Start gives us the start timestamp in the data but it might be null/empty
    # (i.e no data entered for that time.)
    # Find the timestamp in which data was first entered.
    command = [
        'rrdtool', 'fetch', rrd_file, 'AVERAGE',
        '--start', str(start), '--end', str(end)]
    output = subprocess.check_output(command)

    actual_start = find_actual_start(output)

    return actual_start, end
示例#12
0
文件: RRDFile.py 项目: cefmmaku/Redes
def graficarRRD(nombre, i):
    tiempo_final = int(rrdtool.last("files/" + nombre + ".rrd"))
    lateral = "";
    if nombre == C.CPU_NAME:
        lateral = "--vertical-label=%"
    else:
        lateral = "--vertical-label=Bytes"
    tiempo_inicial = int(rrdtool.first("files/" + nombre + ".rrd"))
    ret = rrdtool.graphv("images/" + nombre + ".png",
                         "--start", str(tiempo_inicial + 6800),
                         "--end", str(tiempo_final),
                        lateral,
                        "DEF:"+nombre+"=files/" + nombre + ".rrd:"+"cosa"+":AVERAGE",
                        "AREA:"+nombre+"#00FF00:" + nombre)
    return
示例#13
0
文件: monia.py 项目: cc1-cloud/cc1
    def get_list(self):
        """
        list vm stats with start and end times
        """
        f = os.listdir(settings.PATH_TO_RRD)

        rrds = {}
        for rrd in f:
            try:
                t = []
                t.append(rrdtool.first(settings.PATH_TO_RRD + rrd))
                t.append(rrdtool.last(settings.PATH_TO_RRD + rrd))
                rrds.update({os.path.splitext(rrd)[0]: t})
            except Exception, e:
                log.error(0, 'stat_error %s %s' % (rrd, e))
示例#14
0
文件: stats.py 项目: diraol/of_stats
    def _calc_start_end(start, end, rrd):
        """Calculate start and end values for fetch command."""
        # Use integers to calculate resolution
        if end == 'now':
            end = int(time.time())
        if start == 'first':
            with settings.rrd_lock:
                start = rrdtool.first(rrd)

        # For RRDtool to include start and end timestamps.
        if isinstance(start, int):
            start -= 1
        if isinstance(end, int):
            end -= 1

        return start, end
示例#15
0
 def __init__(self, name_host, description, path_to_database, file_name,
              start_point, end_point, type_command, height, width):
     self.name_host = name_host
     self.description = description
     self.path_to_database = path_to_database
     self.file_name = file_name
     self.file = self.path_to_database + "/" + self.file_name
     self.start_point = start_point
     self.end_point = end_point
     self.first = rrdtool.first(self.file)
     self.last = rrdtool.last(self.file)
     self.lastupdate = rrdtool.lastupdate(self.file)
     self.type_command = type_command
     self.height = height
     self.width = width
     self.list_ds = self.parse_ds
     self.list_menu = []
示例#16
0
def get_duration_points(rrd_file, graph_period):
    start = rrdtool.first(rrd_file, '--rraindex', graph_period)
    end = rrdtool.last(rrd_file)

    # Start gives us the start timestamp in the data but it might be null/empty
    # (i.e no data entered for that time.)
    # Find the timestamp in which data was first entered.
    command = [
        'rrdtool', 'fetch', rrd_file, 'AVERAGE', '--start',
        str(start), '--end',
        str(end)
    ]
    output = subprocess.check_output(command)

    actual_start = find_actual_start(output)

    return actual_start, end
示例#17
0
    def _calc_start_end(start, end, n_points, rrd):
        """Calculate start and end values for fetch command."""
        # Use integers to calculate resolution
        if end is None:
            end = int(time.time())
        if start is None:  # Latest n_points
            start = end - n_points * settings.STATS_INTERVAL
        elif start == 'first':  # Usually empty because 'first' is too old
            with settings.RRD_LOCK:
                start = rrdtool.first(rrd)

        # For RRDtool to include start and end timestamps.
        if isinstance(start, int):
            start -= 1
        if isinstance(end, int):
            end -= 1

        return start, end
示例#18
0
def get_first_timestamp_rrd(rrdfile_name):
    """
    get_first_timestamp_rrd:
    get the first timestamp which have a value of numbers

    Args:
        rrdfile_name (string): it is the name of your file/rrd

    Returns:
        a dict with the value and a status_message
        first_timestamp (dict): first real timestamp of rrd (is necessary for creating a png)
    """
    first_timestamp = []
    get_first_status_msg = ""
    # is the given file a rrd
    try:
        db_first_timestamp = rrdtool.first(rrdfile_name)
        get_first_status_msg = f"success: first timestamp of {rrdfile_name} was found"
    except Exception as e:
        get_first_status_msg = f"error: get_first_timestamp_rrd({rrdfile_name}) was not possible: {sys.exc_info()[1]} \n{e}"
    get_first_msg = dict()
    get_first_msg['first_timestamp'] = db_first_timestamp
    get_first_msg['status'] = get_first_status_msg
    return get_first_msg
示例#19
0
            log.exception(0, e)
            return 0

        stats = []
        res = []
        step = ds_info['step']

        for key in ds_info.keys():
            if 'index' in key:
                if key[0:2] == "ds":
                    ds_name = key[3:]
                    ds_name = ds_name[0:ds_name.find(']')]
                    stats.append(ds_name)
            if 'pdp_per_row' in key:
                res.append(ds_info[key] * step)
        first = rrdtool.first(filepath)
        last = rrdtool.last(filepath)

        return {
            'stats': stats,
            'resolutions': res,
            'first': first,
            'last': last
        }

    def get_vm_stats(self,
                     vm,
                     names,
                     start="-5min",
                     end="now",
                     resolution="10"):
示例#20
0
 def getFirstDate(datafile_name):
     info = rrdtool.first(datafile_name)
     return info
示例#21
0
def rrd_fetch(file, cf):
    first = str(rrdtool.first(file))
    last = str(rrdtool.last(file))
    return rrdtool.fetch(file, cf, "-s", first, "-e", last)
示例#22
0
def ApplyHoltData(filename=None):
    if not filename:
        return

    # Assign names for files
    old_rrd = filename
    old_rrd_renamed = old_rrd + ".old"
    new_rrd = old_rrd

    # Regexs to be used later
    dssearch = re.compile('^ds\[(\S+)\]\.type')
    rrasearch = re.compile('^rra\[(\d+)\]\.(\S+)')
    datasearch = re.compile('(AVERAGE|MIN|MAX|LAST)')
    holtsearch = re.compile(
        '(HWPREDICT|SEASONAL|DEVSEASONAL|DEVPREDICT|FAILURES)')
    ds_type_search = re.compile('(GAUGE|COUNTER|DERIVE|ABSOLUTE)')

    datasources = []
    data_rras = []
    holt_rra = {}
    rra = []

    try:
        data = rrdtool.info(old_rrd)
    except:
        return

    start = rrdtool.first(old_rrd)
    step = data['step']

    HOLTWINTERS = holtsearch.search(str(data))
    if HOLTWINTERS:
        return

    # Move the old file out of the way
    shutil.move(old_rrd, old_rrd_renamed)
    sorted_keys = data.keys()
    sorted_keys.sort()
    for key in sorted_keys:
        value = data[key]
        name_match = dssearch.search(key)
        if name_match:
            name = name_match.groups()[0]
            datasources.append(name)
        else:
            rra_match = rrasearch.search(key)
            if rra_match:
                rranum, rraprop = rra_match.groups()
                if rraprop == 'cf':
                    data_match = datasearch.search(value)
                    if data_match:
                        data_rras.append(rranum)
                    else:
                        holt_match = holtsearch.search(value)
                        if holt_match:
                            holt_rra[value] = rranum

    for key in datasources:
        type_str = "ds[%s].type" % key
        min_str = "ds[%s].min" % key
        max_str = "ds[%s].max" % key
        minimal_heartbeat_str = "ds[%s].minimal_heartbeat" % key
        type = data[type_str]
        type_match = ds_type_search.search(type)
        if type_match:
            min_str = "ds[%s].min" % key
            min = data[min_str]
            if not min:
                min = 'U'

            max_str = "ds[%s].max" % key
            max = data[max_str]
            if not max:
                max = 'U'

            minimal_heartbeat_str = "ds[%s].minimal_heartbeat" % key
            minimal_heartbeat = data[minimal_heartbeat_str]
            args = "%s:%s:%s" % (minimal_heartbeat, min, max)
        elif type == 'COMPUTE':
            cdef_str = "ds[%s].cdef" % key
            args = cdef_str
        else:
            continue

        ds_string = 'DS:%s:%s:%s' % (key, type, args)

    for key in data_rras:
        cf_string = "rra[%s].cf" % key
        xff_string = "rra[%s].xff" % key
        pdp_per_row_string = "rra[%s].pdp_per_row" % key
        rows_string = "rra[%s].rows" % key
        rra.append('RRA:%s:%s:%s:%s' % (data[cf_string], data[xff_string], \
            data[pdp_per_row_string], data[rows_string]))

    # New Basic holt winters setup .. based on assumption we will tune
    # parameters later
    rra.append('RRA:HWPREDICT:1440:0.1:0.0035:288')
    args=['foo.rrd','--start',str(start),'--step',str(step),' '.join(datasources),\
        ' '.join(rra)]

    rra_string = ', '.join(rra)
    rrdtool.create(new_rrd, "--start", str(start), "--step", str(step),
                   ds_string, *rra_string.split())

    # Copy the data into the new file
    startStop, names, values = rrdtool.fetch(old_rrd_renamed, 'AVERAGE',
                                             '--start', str(start))
    #print startStop[0]
    time = start
    #print step, time
    for value in values:
        time += step
        if value[0]:
            data_fmt = "%s:%s" % (time, value[0])
            log.warn(data_fmt)
            rrdtool.update(new_rrd, data_fmt)
示例#23
0
 def getFirstDate(datafile_name):
   info = rrdtool.first(datafile_name)
   return info
示例#24
0
def realizar_prediccion(fecha_inicio, hora_inicio, fecha_termino, hora_termino,
                        agente, baseRRD, variable, limInf, limSup):
    dirSinPuntos = agente.replace(
        ".", "_")  # Se quitan los puntos de la IP y se ponen _
    directorio = os.getcwd(
    ) + "/" + dirSinPuntos  # Se obtiene el directorio en el que deben guardarse las graficas .png
    baseRRD = directorio + "/" + baseRRD

    rrdtool.dump(baseRRD, baseRRD +
                 "Test.xml")  # NO borrar, sirve para depurar esta funcion

    propiedades = []
    posix_inicio = calculo_fecha_posix(fecha_inicio, hora_inicio)
    posix_termino = calculo_fecha_posix(fecha_termino, hora_termino)

    print 'Hr inicial: -> ' + str(posix_inicio)
    print 'Hr termino: -> ' + str(posix_termino)

    definicion = "DEF:" + variable + "=" + baseRRD + ":" + variable + ":AVERAGE"  #de la tarea 'procesadores' de Cesar
    color = "AREA:" + variable + "#CC99FF" + ":" + variable

    # Se llena el arreglo con las propiedades
    if (
            posix_inicio < rrdtool.first(baseRRD)
    ):  # Se valida que los tiempos dados esten entre los tiempos que tiene la RRD
        posix_inicio = rrdtool.first(baseRRD)

    #if(posix_termino>rrdtool.last(baseRRD)):
    # print "el dato mas actual de "+baseRRD+ "es del momento"+str(rrdtool.last(baseRRD)), "usare ese"
    #posix_termino=rrdtool.last(baseRRD)

    # Verificacion de limites, para mostrarlos en la grafica
    linea_lim_inf = ""
    linea_lim_sup = ""

    if (limInf != -1):  # Se trazara la linea inferior
        linea_lim_inf = "LINE1:" + str(limInf) + "#6600CC"

    if (limSup != -1):  # Se traza la linea superior
        linea_lim_sup = "LINE1:" + str(limSup) + "#006666"

    ret = rrdtool.graph(
        "grafica" + variable + ".png",
        "--start",
        str(int(posix_inicio)),
        "--end",
        str(int(posix_termino) + TIEMPO_EXTRA),
        "--vertical-label=" + variable,
        "--vertical-label=" + "Uso de " + variable,
        "--lower-limit=0",
        "--upper-limit=100",
        "--rigid",
        definicion,
        color,
        linea_lim_inf,
        linea_lim_sup,
        "VDEF:a=" + variable + ",LSLSLOPE",
        "VDEF:b=" + variable + ",LSLINT",
        "CDEF:avg2=" + variable + ",POP,a,COUNT,*,b,+",
        "CDEF:pred=avg2,0," + limSup + ",LIMIT",
        "VDEF:maxpred=pred,MAXIMUM",
        "GPRINT:maxpred:%c:strftime",
        "GPRINT:maxpred:%5.4lf",
        "PRINT:maxpred:%c:strftime",  #se imprime la variable en ret para usarse a nivel de python
        "PRINT:maxpred:%5.4lf",
        "CDEF:predB=avg2,0," + limInf + ",LIMIT",
        "VDEF:minpred=predB,MINIMUM",
        "GPRINT:minpred:%c:strftime",
        "GPRINT:minpred:%5.4lf",
        "PRINT:minpred:%c:strftime",
        "PRINT:minpred:%5.4lf",
        "LINE2:avg2#99FF33")
    print ret

    momentoSUP = ret[2][0]
    valorSUP = ret[2][1]
    momentoINF = ret[2][2]
    valorINF = ret[2][3]

    # enviaAlerta("Prediccion lim.superior: "+momentoSUP+" Valor:"+valorSUP+
    #"Prediccion lim.inferior: "+momentoINF+" Valor:"+valorINF, "grafica"+ variable + ".png")

    print momentoSUP + valorSUP + momentoINF + valorINF
示例#25
0
def sum_data(options):
    """ summarize collected data """

    # check if summary-file exists and is newer than database
    sumfile = os.path.splitext(options.dbfile)[0] + ".summary"
    if os.path.exists(sumfile) and (os.path.getmtime(options.dbfile) <=
                                    os.path.getmtime(sumfile)):
        # summary is current
        f = open(sumfile, "r")
        result = json.load(f)
        f.close()
        return result
    else:
        options.logger.msg("INFO", "creating summary-file: %s" % sumfile)

    # create summary
    try:
        if options.ts_start > 0:
            first = options.ts_start
        else:
            # either no data was collected or the summary file was deleted
            options.logger.msg("WARN", "trying to recreate start timepoint")
            first = rrdtool.first(options.dbfile)
            options.logger.msg("INFO", "estimated start is %r" % first)
        last = rrdtool.last(options.dbfile)
    except Exception as e:
        options.logger.msg("TRACE", traceback.format_exc())
        options.logger.msg("ERROR", "no data in database: %s" % options.dbfile)
        sys.exit(3)

    # extract avg and max values
    I_def = "DEF:I=%s:I:AVERAGE" % options.dbfile
    I_avg = "VDEF:I_avg=I,AVERAGE"
    I_max = "VDEF:I_max=I,MAXIMUM"

    U_def = "DEF:U=%s:U:AVERAGE" % options.dbfile
    U_avg = "VDEF:U_avg=U,AVERAGE"
    U_max = "VDEF:U_max=U,MAXIMUM"

    P_def = "DEF:P=%s:P:AVERAGE" % options.dbfile
    P_avg = "VDEF:P_avg=P,AVERAGE"
    P_max = "VDEF:P_max=P,MAXIMUM"

    args = [
        "rrdtool", "graphv", options.dbfile, "--start",
        str(first), "--end",
        str(last), I_def, I_avg, I_max, U_def, U_avg, U_max, P_def, P_avg,
        P_max, "PRINT:I_avg:%8.4lf", "PRINT:I_max:%8.4lf",
        "PRINT:U_avg:%8.4lf", "PRINT:U_max:%8.4lf", "PRINT:P_avg:%6.2lf",
        "PRINT:P_max:%6.2lf"
    ]
    info = rrdtool.graphv(options.dbfile, args[3:])
    summary = {
        "ts_start": first,
        "ts_end": last,
        "U_avg": float(info['print[2]']),
        "U_max": float(info['print[3]']),
        "P_avg": float(info['print[4]']),
        "P_max": float(info['print[5]']),
        "P_tot": round((last - first + 1) * float(info['print[4]']) / 3600, 2)
    }
    try:
        if options.voltage:
            summary["I_avg"] = float(info['print[0]'])
            summary["I_max"] = float(info['print[1]'])
        else:
            summary["I_avg"] = int(float(info['print[0]']))
            summary["I_max"] = int(float(info['print[1]']))
    except:
        pass

    # write results to file
    f = open(sumfile, "w")
    json.dump(summary, f, indent=2, sort_keys=True)
    f.close()

    return summary
示例#26
0
文件: monia.py 项目: cc1-cloud/cc1
            log.exception(0, e)
            return 0

        stats = []
        res = []
        step = ds_info['step']

        for key in ds_info.keys():
            if 'index' in key:
                if key[0:2] == "ds":
                    ds_name = key[3:]
                    ds_name = ds_name[0: ds_name.find(']')]
                    stats.append(ds_name)
            if 'pdp_per_row' in key:
                res.append(ds_info[key] * step)
        first = rrdtool.first(filepath)
        last = rrdtool.last(filepath)

        return {'stats': stats, 'resolutions': res, 'first': first, 'last': last}

    def get_vm_stats(self, vm, names, start="-5min", end="now", resolution="10"):
        if not check_stat_exists(vm):
            raise CMException('stat_not_exists')

        res = []
        filename = get_path(vm)
        info, ds_rrd, data = rrdtool.fetch(filename, "AVERAGE", "--start", str(start), "--end", str(end), "--resolution", str(resolution))
        start_rrd = info[0]
        end_rrd = info[1]
        step = info[2]
        ts = start_rrd
示例#27
0
def ApplyHoltData(filename=None):
    if not filename:
        return

    # Assign names for files
    old_rrd = filename
    old_rrd_renamed = old_rrd + ".old"
    new_rrd = old_rrd

    # Regexs to be used later
    dssearch = re.compile("^ds\[(\S+)\]\.type")
    rrasearch = re.compile("^rra\[(\d+)\]\.(\S+)")
    datasearch = re.compile("(AVERAGE|MIN|MAX|LAST)")
    holtsearch = re.compile("(HWPREDICT|SEASONAL|DEVSEASONAL|DEVPREDICT|FAILURES)")
    ds_type_search = re.compile("(GAUGE|COUNTER|DERIVE|ABSOLUTE)")

    datasources = []
    data_rras = []
    holt_rra = {}
    rra = []

    try:
        data = rrdtool.info(old_rrd)
    except:
        return

    start = rrdtool.first(old_rrd)
    step = data["step"]

    HOLTWINTERS = holtsearch.search(str(data))
    if HOLTWINTERS:
        return

    # Move the old file out of the way
    shutil.move(old_rrd, old_rrd_renamed)
    sorted_keys = data.keys()
    sorted_keys.sort()
    for key in sorted_keys:
        value = data[key]
        name_match = dssearch.search(key)
        if name_match:
            name = name_match.groups()[0]
            datasources.append(name)
        else:
            rra_match = rrasearch.search(key)
            if rra_match:
                rranum, rraprop = rra_match.groups()
                if rraprop == "cf":
                    data_match = datasearch.search(value)
                    if data_match:
                        data_rras.append(rranum)
                    else:
                        holt_match = holtsearch.search(value)
                        if holt_match:
                            holt_rra[value] = rranum

    for key in datasources:
        type_str = "ds[%s].type" % key
        min_str = "ds[%s].min" % key
        max_str = "ds[%s].max" % key
        minimal_heartbeat_str = "ds[%s].minimal_heartbeat" % key
        type = data[type_str]
        type_match = ds_type_search.search(type)
        if type_match:
            min_str = "ds[%s].min" % key
            min = data[min_str]
            if not min:
                min = "U"

            max_str = "ds[%s].max" % key
            max = data[max_str]
            if not max:
                max = "U"

            minimal_heartbeat_str = "ds[%s].minimal_heartbeat" % key
            minimal_heartbeat = data[minimal_heartbeat_str]
            args = "%s:%s:%s" % (minimal_heartbeat, min, max)
        elif type == "COMPUTE":
            cdef_str = "ds[%s].cdef" % key
            args = cdef_str
        else:
            continue

        ds_string = "DS:%s:%s:%s" % (key, type, args)

    for key in data_rras:
        cf_string = "rra[%s].cf" % key
        xff_string = "rra[%s].xff" % key
        pdp_per_row_string = "rra[%s].pdp_per_row" % key
        rows_string = "rra[%s].rows" % key
        rra.append("RRA:%s:%s:%s:%s" % (data[cf_string], data[xff_string], data[pdp_per_row_string], data[rows_string]))

    # New Basic holt winters setup .. based on assumption we will tune
    # parameters later
    rra.append("RRA:HWPREDICT:1440:0.1:0.0035:288")
    args = ["foo.rrd", "--start", str(start), "--step", str(step), " ".join(datasources), " ".join(rra)]

    rra_string = ", ".join(rra)
    rrdtool.create(new_rrd, "--start", str(start), "--step", str(step), ds_string, *rra_string.split())

    # Copy the data into the new file
    startStop, names, values = rrdtool.fetch(old_rrd_renamed, "AVERAGE", "--start", str(start))
    # print startStop[0]
    time = start
    # print step, time
    for value in values:
        time += step
        if value[0]:
            data_fmt = "%s:%s" % (time, value[0])
            log.warn(data_fmt)
            rrdtool.update(new_rrd, data_fmt)
def EjecutarP(hilo, comunidad, ip, port, name):
    agentPath = lbPathmc + name + "/"
    fname = "netPred.rrd"

    consulta = int(consultaSNMP(comunidad, ip, port, '1.3.6.1.2.1.2.2.1.10.1'))
    valor = "N:" + str(consulta)
    ret = rrdtool.update(str(agentPath + name + fname), valor)
    rrdtool.dump(str(agentPath + name + fname),
                 str(agentPath) + name + 'netP.xml')

    title = "Deteccion de comportamiento anomalo"

    ultimo = rrdtool.last(str(agentPath + name + fname))

    inicio = ultimo - 300
    ayerInicio = (inicio - 300)
    ayerFinal = ultimo - 300
    primero = rrdtool.first(str(agentPath + name + fname))
    #cambia el valor de alfa
    rrdtool.tune(str(agentPath + name + fname), '--alpha', '0.8')
    #rrdtool.tune(str(agentPath + name + fname),'--beta','0.1')
    #rrdtool.tune(str(agentPath + name + fname),'--gamma','0.1')

    ret = rrdtool.graphv(
        str(agentPath + name) + "prediccion.png",
        '--start',
        str(inicio),
        '--end',
        str(ultimo + 5),
        '--title=' + title,
        "--vertical-label=Bytes/s",
        '--slope-mode',
        "DEF:obs=" + str(agentPath + name + fname) + ":inoctets:AVERAGE",
        #"DEF:outoctets=" + str(agentPath + fname) + ":outoctets:AVERAGE",
        "DEF:pred=" + str(agentPath + name + fname) + ":inoctets:HWPREDICT",
        "DEF:dev=" + str(agentPath + name + fname) + ":inoctets:DEVPREDICT",
        "DEF:fail=" + str(agentPath + name + fname) + ":inoctets:FAILURES",
        "DEF:yvalue=" + str(agentPath + name + fname) +
        ":inoctets:AVERAGE:start=" + str(ayerInicio) + ":end=" +
        str(ayerFinal),
        'SHIFT:yvalue:300',
        #"RRA:DEVSEASONAL:1d:0.1:2",
        #"RRA:DEVPREDICT:5d:5",
        #"RRA:FAILURES:1d:7:9:5""
        "CDEF:scaledh=yvalue,8,*",
        "CDEF:scaledobs=obs,8,*",
        "CDEF:upper=pred,dev,2,*,+",
        "CDEF:lower=pred,dev,2,*,-",
        "CDEF:scaledupper=upper,8,*",
        "CDEF:scaledlower=lower,8,*",
        "CDEF:scaledpred=pred,8,*",
        "AREA:scaledh#C9C9C9:Yesterday",
        "TICK:fail#FDD017:1.0:FFallas",
        "LINE3:scaledobs#00FF00:In traffic",
        "LINE1:scaledpred#FF00FF:Prediccion\\n",
        #"LINE1:outoctets#0000FF:Out traffic",
        "LINE1:scaledupper#ff0000:Upper Bound Average bits in\\n",
        "LINE1:scaledlower#0000FF:Lower Bound Average bits in",
        "VDEF:lastfail=fail,LAST",
        #"VDEF:max=fail,MAXIMUM",
        #"VDEF:min=fail,MINIMUM",
        "PRINT:lastfail: %c :strftime",
        "PRINT:lastfail:%6.2lf %S ",
        'PRINT:fail:MIN:%1.0lf',
        'PRINT:fail:MAX:%1.0lf',
    )

    #bandera=0
    time_falla = ret['print[0]']
    ultima_falla = ret['print[1]']
    fmin = ret['print[2]']
    fmax = ret['print[3]']

    #f = open(str(agentPath)+"log.txt","a")
    #print("Fallas "+ str(ultima_falla) +"----"+ str(time_falla))

    if float(ultima_falla) == 1:
        if hilo.ban == 0:  # inicio de falla bandera de 0 a 1
            hilo.inicio = str(time_falla)
            hilo.ban = 1
            print("INICIO DE FALLA")
            if hilo.unaVez == 0:
                sendAlertEmail(
                    "Agente : " + name + " Inicio aberración : " +
                    str(hilo.inicio),
                    str(agentPath) + "prediccion.png",
                    str(agentPath + name + fname))
                hilo.unaVez = 1
        elif hilo.ban == 1:  # aun no termina la falla y gardo el ultimo tiempo
            hilo.fin = str(time_falla)
            print("CONTINUA" + hilo.fin)
    elif float(ultima_falla
               ) == 0 and hilo.ban == 1:  #termina la falla bandera de 1 a 0
        hilo.ban = 0

        f = open(str(lbPathmc) + "log.txt", "a")
        f.write(str("\tFalla " + name + "\n"))
        f.write("Inicio : " + str(hilo.inicio) + "\n")
        if hilo.fin == "":
            hilo.fin = str(time_falla)
            f.write("Fin: " + str(hilo.fin) + "\n")
        else:
            f.write("Fin: " + str(hilo.fin) + "\n")
        print("FIN DE FALLA" + hilo.fin)
        if hilo.unaVez == 1:
            sendAlertEmail(
                "Agente : " + name + " Fin aberración : " + str(hilo.fin),
                str(agentPath) + "prediccion.png",
                str(agentPath + name + fname))
            hilo.unaVez = 2
        hilo.fin = ""
        hilo.inicio = ""
        f.close()
示例#29
0
    def info(self, context, target=None):
        # Figure out the target.
        if target is None:
            target = socket.getfqdn()
        rrdpath = os.path.join(CONF.canary_rrdpath, target)

        # Grab available metrics.
        available = glob.glob(os.path.join(rrdpath, '*/*.rrd'))
        metrics = {}

        for filename in available:
            # NOTE: Not sure quite why, but it seems like
            # the rrdtool commands below barf unless they
            # this happens -- maybe barfing on unicode?
            filename = str(filename)

            m = re.match("^%s/([^\/-]+)(-([^\/]+))?/([^\.]+)\.rrd$" % rrdpath,
                         filename)
            if m:
                plugin = m.group(1)
                unit = m.group(3)
                key = m.group(4)

                # NOTE: At this point we construct a metric name that is
                # equivilant to how we deconstruct the name above. It's
                # important that these two operations are symmetric for
                # the sake of a user's sanity.
                if unit:
                    metric = "%s[%s].%s" % (plugin, unit, key)
                else:
                    metric = "%s.%s" % (plugin, key)
                if not (metric in metrics):
                    metrics[metric] = {}

                metrics[metric]["from_time"] = rrdtool.first(filename)
                metrics[metric]["to_time"] = rrdtool.last(filename)

                step = 1
                pdps = []
                cfs = []
                for (k, v) in rrdtool.info(filename).items():
                    if re.match("^step$", k):
                        step = int(v)
                        continue
                    elif re.match("^rra\[\d+\]\.pdp_per_row$", k):
                        pdps.append(int(v))
                        continue
                    elif re.match("^rra\[\d+\]\.cf", k):
                        cfs.append(v)
                        continue

                pdps = list(set(pdps))
                cfs = list(set(cfs))
                cfs.sort()
                resolutions = map(lambda x: step * x, pdps)
                resolutions.sort()

                metrics[metric]["cfs"] = cfs
                metrics[metric]["resolutions"] = resolutions

        return metrics