Example #1
0
def identity(ctx, name):
    """
    Identity function:
    Returns datapoints where the value equals the timestamp of the datapoint.
    Useful when you have another series where the value is a timestamp, and
    you want to compare it to the time of the datapoint, to render an age

    Example::

        &target=identity("The.time.series")

    This would create a series named "The.time.series" that contains points
    where x(t) == t.
    """
    step = 60
    start = int(epoch(ctx["startTime"]))
    end = int(epoch(ctx["endTime"]))
    return [
        TimeSeries(
            "identity(%s)" % name,
            epoch(ctx["startTime"]),
            epoch(ctx["endTime"]),
            [(t, t) for t in range(start, end, step)]
        )
    ]
Example #2
0
def nPercentile(ctx, series_list, n):
    """Returns n-percent of each series in the series_list."""
    assert n, "The requested percent is required to be greater than 0"

    results = []
    for s in series_list:
        pv = get_percentile(s, n)[0]
        if pv is not None:
            name = "nPercentile(%s, %g)" % (s.name, n)
            ps = TimeSeries(
                name, s.start, s.end,
                [(pv, t) for _, t in s]
            )
            ps.pathExpression = name
            results += [ps]
    return results
Example #3
0
def randomWalkFunction(ctx, name):
    """
    Short Alias: randomWalk()

    Returns a random walk starting at 0. This is great for testing when there
    is no real data in whisper.

    Example::

        &target=randomWalk("The.time.series")

    This would create a series named "The.time.series" that contains points
    where x(t) == x(t-1)+random()-0.5, and x(0) == 0.
    """
    step = 60
    delta = datetime.timedelta(seconds=step)
    when = ctx["startTime"]
    values = []
    current = 0
    while when < ctx["endTime"]:
        t = epoch(when)
        values += [(current, t)]
        current += random.random() - 0.5
        when += delta
    return [
        TimeSeries(
            "randomWalk(%s)" % name,
            epoch(ctx["startTime"]),
            epoch(ctx["endTime"]),
            values
        )
    ]
Example #4
0
def sinFunction(ctx, name, amplitude=1):
    """
    Short Alias: sin()

    Just returns the sine of the current time. The optional amplitude parameter
    changes the amplitude of the wave.

    Example::

        &target=sin("The.time.series", 2)

    This would create a series named "The.time.series" that contains sin(x)*2.
    """
    step = 60
    delta = datetime.timedelta(seconds=step)
    when = ctx["startTime"]
    values = []
    while when < ctx["endTime"]:
        t = epoch(when)
        values += [(math.sin(t) * amplitude, t)]
        when += delta
    return [
        TimeSeries(
            "sin(%s)" % name,
            epoch(ctx["startTime"]),
            epoch(ctx["endTime"]),
            values
        )
    ]
Example #5
0
def multiplySeries(ctx, *series_lists):
    """
    Takes two or more series and multiplies their points. A constant may not be
    used. To multiply by a constant, use the scale() function.

    Example::

        &target=multiplySeries(Series.dividends,Series.divisors)


    """
    def mul(*factors):
        if None in factors:
            return None

        product = 1
        for factor in factors:
            product *= float(factor)
        return product

    if is_empty(series_lists):
        return []
    if len(series_lists) == 1:
        return series_lists
    name, series_lists = normalize("multiplySeries", series_lists)
    return [TimeSeries.fit_map(name, series_lists, mul, safe=True)]
Example #6
0
def minSeries(ctx, *series_lists):
    """
    Takes one metric or a wildcard series_list.
    For each datapoint from each metric passed in, pick the minimum value and
    graph it.

    Example::

        &target=minSeries(Server*.connections.total)
    """
    if is_empty(series_lists):
        return []
    name, series_lists = normalize("minSeries", series_lists)
    return [TimeSeries.fit_map(name, series_lists, min, safe=True)]
Example #7
0
def countSeries(ctx, *series_lists):
    """
    Draws a horizontal line representing the number of nodes found in the
    series_list.

    Example::

        &target=countSeries(carbon.agents.*.*)

    """
    def count(a):
        return int(len(a))

    if is_empty(series_lists):
        return []
    name, series_lists = normalize("countSeries", series_lists)
    return [TimeSeries.fit_map(name, series_lists, count, safe=True)]
Example #8
0
def get_forecast(symbol, outlook_len=SEQUENCE_LEN):
    """

    :param symbol:
    :param outlook_len:
    :return:
    """
    outlook = list()

    if symbol in DAILY.keys():
        _seed = DAILY[symbol].get_seed(SEQUENCE_LEN)
        seed = numpy.array([_seed])
        if len(seed.shape) == 3 and seed.shape[1] == SEQUENCE_LEN:
            while len(outlook) < outlook_len:
                _forecast = _model_.predict(seed, verbose=0)
                outlook += [_forecast[0][0].tolist()]
                seed = numpy.array([(_seed + outlook)[-SEQUENCE_LEN:]])
            return TimeSeries.from_forecast(symbol, outlook)
Example #9
0
def diffSeries(ctx, *series_lists):
    """
    Can take two or more metrics.
    Subtracts parameters 2 through n from parameter 1.

    Example::

        &target=diffSeries(service.connections.total,
                           service.connections.failed)

    """
    def diff(values):
        return sum(
            [values[0] if values[0] is not None else 0] +
            [-v for v in values[1:] if v is not None]
        )

    if is_empty(series_lists):
        return []
    name, series_lists = normalize("diffSeries", series_lists)
    return [TimeSeries.fit_map(name, series_lists, diff)]
Example #10
0
def averageSeries(ctx, *series_lists):
    """
    Short Alias: avg()

    Takes one metric or a wildcard series_list.
    Draws the average value of all metrics passed at each time.

    Example::

        &target=averageSeries(company.server.*.threads.busy)

    """
    def avg(p):
        if p:
            return sum(p) / len(p)
        else:
            return None

    if is_empty(series_lists):
        return []
    name, series_lists = normalize("averageSeries", series_lists)
    return [TimeSeries.fit_map(name, series_lists, avg, safe=True)]
Example #11
0
def sumSeries(ctx, *series_lists):
    """
    Short form: sum()

    This will add metrics together and return the sum at each datapoint. (See
    integral for a sum over time)

    Example::

        &target=sum(company.server.application*.requestsHandled)

    This would show the sum of all requests handled per minute (provided
    requestsHandled are collected once a minute).     If metrics with different
    retention rates are combined, the coarsest metric is graphed, and the sum
    of the other metrics is averaged for the metrics with finer retention
    rates.

    """
    if is_empty(series_lists):
        return []
    name, series_lists = normalize("sumSeries", series_lists)
    return [TimeSeries.fit_map(name, series_lists, sum, safe=True)]
Example #12
0
def rangeOfSeries(ctx, *series_lists):
    """
    Takes a wildcard series_list.
    Distills down a set of inputs into the range of the series

    Example::

        &target=rangeOfSeries(Server*.connections.total)

    """
    def rng(a):
        min_a = min(a)
        max_a = max(a)
        if min_a is None or max_a is None:
            return None
        else:
            return max_a - min_a

    if is_empty(series_lists):
        return []
    name, series_lists = normalize("rangeOfSeries", series_lists)
    return [TimeSeries.fit_map(name, series_lists, rng, safe=True)]
Example #13
0
def percentileOfSeries(ctx, series_lists, n, interpolate=False):
    """
    percentileOfSeries returns a single series which is composed of the
    n-percentile values taken across a wildcard series at each point.
    Unless `interpolate` is set to True, percentile values are actual values
    contained in one of the supplied series.
    """
    if n <= 0:
        raise ValueError(
            'The requested percent is required to be greater than 0')

    if not series_lists:
        return []
    _, series_lists = normalize("percentileOfSeries", series_lists)
    name = "percentileOfSeries(%s,%g)" % (series_lists[0].pathExpression, n)
    return [
        TimeSeries.fit_map(
            name, series_lists,
            lambda x: get_percentile(x, n, interpolate),
            safe=True
        )
    ]
Example #14
0
def stddevSeries(ctx, *series_lists):
    """

    Takes one metric or a wildcard series_list.
    Draws the standard deviation of all metrics passed at each time.

    Example::

        &target=stddevSeries(company.server.*.threads.busy)

    """
    def stddev(a):
        sm = sum(a)
        ln = len(a)
        avg = sm / ln
        s = 0
        for v in a:
            s += (v - avg) ** 2
        return math.sqrt(s / ln)

    if is_empty(series_lists):
        return []
    name, series_lists = normalize("stddevSeries", series_lists)
    return [TimeSeries.fit_map(name, series_lists, stddev, safe=True)]
Example #15
0
def derivative(ctx, series_list):
    """
    This is the opposite of the integral function. This is useful for taking a
    running total metric and calculating the delta between subsequent data
    points.

    This function does not normalize for periods of time, as a true derivative
    would. Instead see the perSecond() function to calculate a rate of change
    over time.

    Example::

        &target=derivative(company.server.application01.ifconfig.TXPackets)

    Each time you run ifconfig, the RX and TXPackets are higher (assuming there
    is network traffic.) By applying the derivative function, you can get an
    idea of the packets per minute sent or received, even though you're only
    recording the total.
    """
    results = []
    for series in series_list:
        new_values = []
        prev = None
        for val, t in series:
            if None in (prev, val):
                new_values += [(None, t)]
                prev = val
                continue
            new_values += [(val - prev, t)]
            prev = val
        name = "derivative(%s)" % series.name
        results += [
            TimeSeries("derivative(%s)" % series.name,
                       series.start, series.end, new_values)
        ]
    return results
Example #16
0
def nonNegativeDerivative(ctx, series_list, max_value=None):
    """
    Same as the derivative function above, but ignores datapoints that trend
    down. Useful for counters that increase for a long time, then wrap or
    reset. (Such as if a network interface is destroyed and recreated by
    unloading and re-loading a kernel module, common with USB / WiFi cards.

    Example::

        &target=nonNegativederivative(
            company.server.application01.ifconfig.TXPackets)

    """
    results = []

    for series in series_list:
        new_values = []
        prev = None
        for val, t in series:
            if None in (prev, val):
                new_values.append(None)
                prev = val
                continue
            diff = val - prev
            if diff >= 0:
                new_values.append(diff)
            elif max_value is not None and max_value >= val:
                new_values.append((max_value - prev) + val + 1)
            else:
                new_values.append(None)
            prev = val
        results += [
            TimeSeries("nonNegativeDerivative(%s)" % series.name,
                       series.start, series.end, new_values)
        ]
    return results
Example #17
0
class RenderApplication(ExtApplication):
    """
    Graphite-compatible render
    """
    title = "Render"

    DEFAULT_GRAPH_WIDTH = 330
    DEFAULT_GRAPH_HEIGTH = 250

    # Empty space around the borders of chart
    X_PADDING = 10
    Y_PADDING = 10
    #

    @view(url="^$", method=["GET"], access="launch",
          validate={
              "graphType": StringParameter(
                  default="line",
                  choices=GraphTypes.keys()
              ),
              "pieMode": StringParameter(
                  default="average",
                  # @todo: Specify all modes
                  choices=["average"]
              ),
              "cacheTimeout": IntParameter(
                  min_value=0,
                  default=config.getint("pm_render", "cache_duration")
              ),
              "target": ListOfParameter(
                  element=StringParameter(),
                  convert=True, default=[]
              ),
              "localOnly": StringParameter(default="0"),
              "tz": StringParameter(default=TIME_ZONE),
              "pickle": StringParameter(required=False),
              "rawData": StringParameter(required=False),
              "jsonp": StringParameter(required=False),
              "format": StringParameter(required=False),
              "noCache": StringParameter(required=False),
              "maxDataPoints": IntParameter(required=False)
          },
          api=True)
    def api_render(self, request,
                   graphType=None, pieMode=None, cacheTimeout=None,
                   target=None, localOnly=None, tz=None, pickle=None,
                   rawData=None, jsonp=None,
                   noCache=None, format=None,
                   maxDataPoints=None,
                   **kwargs):
        # Get timezone info
        try:
            tz = pytz.timezone(tz)
        except pytz.UnknownTimeZoneError:
            tz = pytz.timezone(TIME_ZONE)
        # Get format
        if pickle is not None:
            format = "pickle"
        elif rawData is not None:
            format = "raw"
        # Get time range
        try:
            t0 = parseATTime(kwargs.get("from", "-1d"))
            t1 = parseATTime(kwargs.get("until", "now"))
        except Exception, why:
            return self.response_bad_request(
                "Cannot parse time: %s" % why
            )
        if t0 == t1:
            return self.response_bad_request("Empty time range")
        # Collect parameters
        request_opts = {
            "graphType": graphType,
            "graphClass": GraphTypes[graphType],
            "pieMode": pieMode,
            "targets": target or [],
            "localOnly": localOnly == "1",
            "tzinfo": tz,
            "format": format,
            "noCache": noCache is not None,
            "startTime": min(t0, t1),
            "endTime": max(t0, t1),
            "cacheTimeout": cacheTimeout
        }
        if format:
            request_opts["format"] = format
            if jsonp is not None:
                request_opts["jsonp"] = jsonp
        # Fill possible graph options
        graph_opts = {
            "width": self.DEFAULT_GRAPH_WIDTH,
            "height": self.DEFAULT_GRAPH_HEIGTH,
        }
        if format == "svg":
            graph_opts["outputFormat"] = "svg"
        for opt in request_opts["graphClass"].customizable:
            if opt in kwargs:
                v = kwargs[opt]
                if opt not in ("fgcolor", "bgcolor", "fontColor"):
                    try:
                        graph_opts[opt] = int(v)
                        continue
                    except ValueError:
                        pass
                try:
                    graph_opts[opt] = float(v)
                    continue
                except ValueError:
                    pass
                if v.lower() in ("true", "false"):
                    graph_opts[opt] = v.lower() == "true"
                    continue
                if not v or v.lower() == "default":
                    continue
                graph_opts[opt] = v
        use_cache = not request_opts["noCache"]
        cache_timeout = request_opts["cacheTimeout"]
        ctx = {
            "startTime": request_opts["startTime"],
            "endTime": request_opts["endTime"],
            "localOnly": request_opts["localOnly"],
            "maxDataPoints": maxDataPoints,
            "data": []
        }
        data = ctx["data"]
        # Try to use cached response
        if use_cache:
            request_key = hashRequest(request)
            cached_response = cache.get(request_key)
            if cached_response:
                return cached_response
            else:
                request_opts["requestKey"] = request_key
        # Cache miss, prepare requested data
        if graphType == "pie":
            for t in request_opts["targets"]:
                if ":" in t:
                    try:
                        name, value = t.split(":", 1)
                        data += [(name, float(value))]
                    except ValueError:
                        raise ValueError("Invalid target: '%s'" % t)
                else:
                    for series in evaluateTarget(ctx, t):
                        f = PieFunctions(request_opts["pieMode"])
                        data += [(series.name, f(ctx, series) or 0)]
        elif graphType == "line":
            if use_cache:
                # Store cached data
                data_key = hashData(request_opts["targets"],
                                    request_opts["startTime"],
                                    request_opts["endTime"])
                cached_data = cache.get(data_key)
            else:
                cached_data = None
            if cached_data is None:
                for t in request_opts["targets"]:
                    if not t.strip():
                        continue
                    data.extend(evaluateTarget(ctx, t))
                if use_cache:
                    cache.set(
                        data_key,
                        [d.get_info() for d in data],
                        cache_timeout
                    )
            else:
                # Convert cached data to Time Series
                data = [TimeSeries(**a) for a in cached_data]
        # Return data in requested format
        h = getattr(self, "get_%s_response" % request_opts["format"], None)
        if h:
            r = h(data, request_opts)
        else:
            graph_opts["data"] = data
            r = self.render_graph(request_opts, graph_opts)
        r["Pragma"] = "no-cache"
        r["Cache-Control"] = "no-cache"
        return r