Ejemplo n.º 1
0
def expand_view(request):
  "View for expanding a pattern into matching metric paths"
  queryParams = request.GET.copy()
  queryParams.update(request.POST)

  local_only = int( queryParams.get('local', 0) )
  group_by_expr = int( queryParams.get('groupByExpr', 0) )
  leaves_only = int( queryParams.get('leavesOnly', 0) )
  jsonp = queryParams.get('jsonp', False)
  forward_headers = extractForwardHeaders(request)

  results = {}
  for query in queryParams.getlist('query'):
    results[query] = set()
    for node in STORE.find(query, local=local_only, headers=forward_headers):
      if node.is_leaf or not leaves_only:
        results[query].add( node.path )

  # Convert our results to sorted lists because sets aren't json-friendly
  if group_by_expr:
    for query, matches in results.items():
      results[query] = sorted(matches)
  else:
    results = sorted( reduce(set.union, results.values(), set()) )

  result = {
    'results' : results
  }

  response = json_response_for(request, result, jsonp=jsonp)
  response['Pragma'] = 'no-cache'
  response['Cache-Control'] = 'no-cache'
  return response
Ejemplo n.º 2
0
def expand_view(request):
    "View for expanding a pattern into matching metric paths"
    queryParams = request.GET.copy()
    queryParams.update(request.POST)

    local_only = int(queryParams.get('local', 0))
    group_by_expr = int(queryParams.get('groupByExpr', 0))
    leaves_only = int(queryParams.get('leavesOnly', 0))
    jsonp = queryParams.get('jsonp', False)
    forward_headers = extractForwardHeaders(request)

    results = {}
    for query in queryParams.getlist('query'):
        results[query] = set()
        for node in STORE.find(query,
                               local=local_only,
                               headers=forward_headers):
            if node.is_leaf or not leaves_only:
                results[query].add(node.path)

    # Convert our results to sorted lists because sets aren't json-friendly
    if group_by_expr:
        for query, matches in results.items():
            results[query] = sorted(matches)
    else:
        results = sorted(reduce(set.union, results.values(), set()))

    result = {'results': results}

    response = json_response_for(request, result, jsonp=jsonp)
    response['Pragma'] = 'no-cache'
    response['Cache-Control'] = 'no-cache'
    return response
Ejemplo n.º 3
0
    def test_extractForwardHeaders(self):
        class DummyRequest(object):
            META = {
                'HTTP_X_TEST1': 'test',
            }

        headers = extractForwardHeaders(DummyRequest())
        self.assertEqual(headers, {'X-Test1': 'test'})
Ejemplo n.º 4
0
  def test_extractForwardHeaders(self):
    class DummyRequest(object):
      META = {
        'HTTP_X_TEST1': 'test',
      }

    headers = extractForwardHeaders(DummyRequest())
    self.assertEqual(headers, {'X-Test1': 'test'})
Ejemplo n.º 5
0
def index_json(request):
    queryParams = request.GET.copy()
    queryParams.update(request.POST)

    try:
        jsonp = queryParams.get('jsonp', False)

        requestContext = {
            'localOnly': int(queryParams.get('local', 0)),
            'forwardHeaders': extractForwardHeaders(request),
        }

        matches = STORE.get_index(requestContext)
    except Exception:
        log.exception()
        return json_response_for(request, [], jsonp=jsonp, status=500)

    return json_response_for(request, matches, jsonp=jsonp)
Ejemplo n.º 6
0
def index_json(request):
  queryParams = request.GET.copy()
  queryParams.update(request.POST)

  try:
    jsonp = queryParams.get('jsonp', False)

    requestContext = {
      'localOnly': int( queryParams.get('local', 0) ),
      'forwardHeaders': extractForwardHeaders(request),
    }

    matches = STORE.get_index(requestContext)
  except Exception:
    log.exception()
    return json_response_for(request, [], jsonp=jsonp, status=500)

  return json_response_for(request, matches, jsonp=jsonp)
Ejemplo n.º 7
0
def renderView(request):
    start = time()
    (graphOptions, requestOptions) = parseOptions(request)
    useCache = 'noCache' not in requestOptions
    cacheTimeout = requestOptions['cacheTimeout']
    # TODO: Make that a namedtuple or a class.
    requestContext = {
        'startTime': requestOptions['startTime'],
        'endTime': requestOptions['endTime'],
        'now': requestOptions['now'],
        'localOnly': requestOptions['localOnly'],
        'template': requestOptions['template'],
        'tzinfo': requestOptions['tzinfo'],
        'forwardHeaders': extractForwardHeaders(request),
        'data': [],
        'prefetched': {},
    }
    data = requestContext['data']

    # First we check the request cache
    if useCache:
        requestKey = hashRequest(request)
        cachedResponse = cache.get(requestKey)
        if cachedResponse:
            log.cache('Request-Cache hit [%s]' % requestKey)
            log.rendering('Returned cached response in %.6f' %
                          (time() - start))
            return cachedResponse
        else:
            log.cache('Request-Cache miss [%s]' % requestKey)

    # Now we prepare the requested data
    if requestOptions['graphType'] == 'pie':
        for target in requestOptions['targets']:
            if target.find(':') >= 0:
                try:
                    name, value = target.split(':', 1)
                    value = float(value)
                except:
                    raise ValueError("Invalid target '%s'" % target)
                data.append((name, value))
            else:
                seriesList = evaluateTarget(requestContext, target)

                for series in seriesList:
                    func = PieFunctions[requestOptions['pieMode']]
                    data.append((series.name, func(requestContext, series)
                                 or 0))

    elif requestOptions['graphType'] == 'line':
        # Let's see if at least our data is cached
        if useCache:
            targets = requestOptions['targets']
            startTime = requestOptions['startTime']
            endTime = requestOptions['endTime']
            dataKey = hashData(targets, startTime, endTime)
            cachedData = cache.get(dataKey)
            if cachedData:
                log.cache("Data-Cache hit [%s]" % dataKey)
            else:
                log.cache("Data-Cache miss [%s]" % dataKey)
        else:
            cachedData = None

        if cachedData is not None:
            requestContext['data'] = data = cachedData
        else:  # Have to actually retrieve the data now
            targets = requestOptions['targets']
            if settings.REMOTE_PREFETCH_DATA and not requestOptions.get(
                    'localOnly'):
                prefetchRemoteData(requestContext, targets)

            for target in targets:
                if not target.strip():
                    continue
                t = time()
                seriesList = evaluateTarget(requestContext, target)
                log.rendering("Retrieval of %s took %.6f" %
                              (target, time() - t))
                data.extend(seriesList)

            if useCache:
                cache.add(dataKey, data, cacheTimeout)

        # If data is all we needed, we're done
        format = requestOptions.get('format')
        if format == 'csv':
            response = HttpResponse(content_type='text/csv')
            writer = csv.writer(response, dialect='excel')

            for series in data:
                for i, value in enumerate(series):
                    timestamp = datetime.fromtimestamp(
                        series.start + (i * series.step),
                        requestOptions['tzinfo'])
                    writer.writerow(
                        (series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"),
                         value))

            return response

        if format == 'json':
            jsonStart = time()

            series_data = []
            if 'maxDataPoints' in requestOptions and any(data):
                startTime = min([series.start for series in data])
                endTime = max([series.end for series in data])
                timeRange = endTime - startTime
                maxDataPoints = requestOptions['maxDataPoints']
                for series in data:
                    numberOfDataPoints = timeRange / series.step
                    if maxDataPoints < numberOfDataPoints:
                        valuesPerPoint = math.ceil(
                            float(numberOfDataPoints) / float(maxDataPoints))
                        secondsPerPoint = int(valuesPerPoint * series.step)
                        # Nudge start over a little bit so that the consolidation bands align with each call
                        # removing 'jitter' seen when refreshing.
                        nudge = secondsPerPoint + (
                            series.start % series.step) - (series.start %
                                                           secondsPerPoint)
                        series.start = series.start + nudge
                        valuesToLose = int(nudge / series.step)
                        for r in range(1, valuesToLose):
                            del series[0]
                        series.consolidate(valuesPerPoint)
                        timestamps = range(int(series.start),
                                           int(series.end) + 1,
                                           int(secondsPerPoint))
                    else:
                        timestamps = range(int(series.start),
                                           int(series.end) + 1,
                                           int(series.step))
                    datapoints = zip(series, timestamps)
                    series_data.append(
                        dict(target=series.name,
                             tags=series.tags,
                             datapoints=datapoints))
            elif 'noNullPoints' in requestOptions and any(data):
                for series in data:
                    values = []
                    for (index, v) in enumerate(series):
                        if v is not None:
                            timestamp = series.start + (index * series.step)
                            values.append((v, timestamp))
                    if len(values) > 0:
                        series_data.append(
                            dict(target=series.name,
                                 tags=series.tags,
                                 datapoints=values))
            else:
                for series in data:
                    timestamps = range(int(series.start),
                                       int(series.end) + 1, int(series.step))
                    datapoints = zip(series, timestamps)
                    series_data.append(
                        dict(target=series.name,
                             tags=series.tags,
                             datapoints=datapoints))

            output = json.dumps(
                series_data,
                indent=(2 if requestOptions['pretty'] else None)).replace(
                    'None,',
                    'null,').replace('NaN,',
                                     'null,').replace('Infinity,', '1e9999,')

            if 'jsonp' in requestOptions:
                response = HttpResponse(content="%s(%s)" %
                                        (requestOptions['jsonp'], output),
                                        content_type='text/javascript')
            else:
                response = HttpResponse(content=output,
                                        content_type='application/json')

            if useCache:
                cache.add(requestKey, response, cacheTimeout)
                patch_response_headers(response, cache_timeout=cacheTimeout)
            else:
                add_never_cache_headers(response)
            log.rendering('JSON rendering time %6f' % (time() - jsonStart))
            log.rendering('Total request processing time %6f' %
                          (time() - start))
            return response

        if format == 'dygraph':
            labels = ['Time']
            result = '{}'
            if data:
                datapoints = [[
                    ts
                ] for ts in range(data[0].start, data[0].end, data[0].step)]
                for series in data:
                    labels.append(series.name)
                    for i, point in enumerate(series):
                        if point is None:
                            point = 'null'
                        elif point == float('inf'):
                            point = 'Infinity'
                        elif point == float('-inf'):
                            point = '-Infinity'
                        elif math.isnan(point):
                            point = 'null'
                        datapoints[i].append(point)
                line_template = '[%%s000%s]' % ''.join([', %s'] * len(data))
                lines = [
                    line_template % tuple(points) for points in datapoints
                ]
                result = '{"labels" : %s, "data" : [%s]}' % (
                    json.dumps(labels), ', '.join(lines))
            response = HttpResponse(content=result,
                                    content_type='application/json')

            if useCache:
                cache.add(requestKey, response, cacheTimeout)
                patch_response_headers(response, cache_timeout=cacheTimeout)
            else:
                add_never_cache_headers(response)
            log.rendering('Total dygraph rendering time %.6f' %
                          (time() - start))
            return response

        if format == 'rickshaw':
            series_data = []
            for series in data:
                timestamps = range(series.start, series.end, series.step)
                datapoints = [{
                    'x': x,
                    'y': y
                } for x, y in zip(timestamps, series)]
                series_data.append(
                    dict(target=series.name, datapoints=datapoints))
            if 'jsonp' in requestOptions:
                response = HttpResponse(
                    content="%s(%s)" %
                    (requestOptions['jsonp'], json.dumps(series_data)),
                    mimetype='text/javascript')
            else:
                response = HttpResponse(content=json.dumps(series_data),
                                        content_type='application/json')

            if useCache:
                cache.add(requestKey, response, cacheTimeout)
                patch_response_headers(response, cache_timeout=cacheTimeout)
            else:
                add_never_cache_headers(response)
            log.rendering('Total rickshaw rendering time %.6f' %
                          (time() - start))
            return response

        if format == 'raw':
            response = HttpResponse(content_type='text/plain')
            for series in data:
                response.write(
                    "%s,%d,%d,%d|" %
                    (series.name, series.start, series.end, series.step))
                response.write(','.join(map(repr, series)))
                response.write('\n')

            log.rendering('Total rawData rendering time %.6f' %
                          (time() - start))
            return response

        if format == 'svg':
            graphOptions['outputFormat'] = 'svg'
        elif format == 'pdf':
            graphOptions['outputFormat'] = 'pdf'

        if format == 'pickle':
            response = HttpResponse(content_type='application/pickle')
            seriesInfo = [series.getInfo() for series in data]
            pickle.dump(seriesInfo, response, protocol=-1)

            log.rendering('Total pickle rendering time %.6f' %
                          (time() - start))
            return response

    # We've got the data, now to render it
    graphOptions['data'] = data
    if settings.REMOTE_RENDERING:  # Rendering on other machines is faster in some situations
        image = delegateRendering(requestOptions['graphType'], graphOptions,
                                  requestContext['forwardHeaders'])
    else:
        image = doImageRender(requestOptions['graphClass'], graphOptions)

    useSVG = graphOptions.get('outputFormat') == 'svg'
    if useSVG and 'jsonp' in requestOptions:
        response = HttpResponse(content="%s(%s)" %
                                (requestOptions['jsonp'], json.dumps(image)),
                                content_type='text/javascript')
    elif graphOptions.get('outputFormat') == 'pdf':
        response = buildResponse(image, 'application/x-pdf')
    else:
        response = buildResponse(image,
                                 'image/svg+xml' if useSVG else 'image/png')

    if useCache:
        cache.add(requestKey, response, cacheTimeout)
        patch_response_headers(response, cache_timeout=cacheTimeout)
    else:
        add_never_cache_headers(response)

    log.rendering('Total rendering time %.6f seconds' % (time() - start))
    return response
Ejemplo n.º 8
0
def parseOptions(request):
    queryParams = request.GET.copy()
    queryParams.update(request.POST)

    # Start with some defaults
    graphOptions = {'width': 330, 'height': 250}
    requestOptions = {}

    graphType = queryParams.get('graphType', 'line')
    assert graphType in GraphTypes, "Invalid graphType '%s', must be one of %s" % (
        graphType, GraphTypes.keys())
    graphClass = GraphTypes[graphType]

    # Fill in the requestOptions
    requestOptions['graphType'] = graphType
    requestOptions['graphClass'] = graphClass
    requestOptions['pieMode'] = queryParams.get('pieMode', 'average')
    cacheTimeout = int(
        queryParams.get('cacheTimeout', settings.DEFAULT_CACHE_DURATION))
    requestOptions['targets'] = []
    requestOptions['forwardHeaders'] = extractForwardHeaders(request)

    # Extract the targets out of the queryParams
    mytargets = []
    # Normal format: ?target=path.1&target=path.2
    if len(queryParams.getlist('target')) > 0:
        mytargets = queryParams.getlist('target')

    # Rails/PHP/jQuery common practice format: ?target[]=path.1&target[]=path.2
    elif len(queryParams.getlist('target[]')) > 0:
        mytargets = queryParams.getlist('target[]')

    # Collect the targets
    for target in mytargets:
        requestOptions['targets'].append(target)

    template = dict()
    for key, val in queryParams.items():
        if key.startswith("template["):
            template[key[9:-1]] = val
    requestOptions['template'] = template

    if 'pickle' in queryParams:
        requestOptions['format'] = 'pickle'
    if 'rawData' in queryParams:
        requestOptions['format'] = 'raw'
    if 'format' in queryParams:
        requestOptions['format'] = queryParams['format']
        if 'jsonp' in queryParams:
            requestOptions['jsonp'] = queryParams['jsonp']

    requestOptions['pretty'] = bool(queryParams.get('pretty'))

    if 'noCache' in queryParams:
        requestOptions['noCache'] = True
    if 'maxDataPoints' in queryParams and queryParams['maxDataPoints'].isdigit(
    ):
        requestOptions['maxDataPoints'] = int(queryParams['maxDataPoints'])
    if 'noNullPoints' in queryParams:
        requestOptions['noNullPoints'] = True

    requestOptions['localOnly'] = queryParams.get('local') == '1'

    # Fill in the graphOptions
    format = requestOptions.get('format')
    if format == 'svg':
        graphOptions['outputFormat'] = 'svg'
    elif format == 'pdf':
        graphOptions['outputFormat'] = 'pdf'
    else:
        graphOptions['outputFormat'] = 'png'

    for opt in graphClass.customizable:
        if opt in queryParams:
            val = queryParams[opt]
            if (val.isdigit() or
                (val.startswith('-')
                 and val[1:].isdigit())) and 'color' not in opt.lower():
                val = int(val)
            elif '.' in val and (val.replace('.', '', 1).isdigit() or
                                 (val.startswith('-')
                                  and val[1:].replace('.', '', 1).isdigit())):
                val = float(val)
            elif val.lower() in ('true', 'false'):
                val = val.lower() == 'true'
            elif val.lower() == 'default' or val == '':
                continue
            graphOptions[opt] = val

    tzinfo = pytz.timezone(settings.TIME_ZONE)
    if 'tz' in queryParams:
        try:
            tzinfo = pytz.timezone(queryParams['tz'])
        except pytz.UnknownTimeZoneError:
            pass
    requestOptions['tzinfo'] = tzinfo

    # Get the time interval for time-oriented graph types
    if graphType == 'line' or graphType == 'pie':
        if 'now' in queryParams:
            now = parseATTime(queryParams['now'], tzinfo)
        else:
            now = datetime.now(tzinfo)

        if 'until' in queryParams:
            untilTime = parseATTime(queryParams['until'], tzinfo, now)
        else:
            untilTime = now
        if 'from' in queryParams:
            fromTime = parseATTime(queryParams['from'], tzinfo, now)
        else:
            fromTime = parseATTime('-1d', tzinfo, now)

        startTime = min(fromTime, untilTime)
        endTime = max(fromTime, untilTime)
        assert startTime != endTime, "Invalid empty time range"

        requestOptions['startTime'] = startTime
        requestOptions['endTime'] = endTime
        timeRange = endTime - startTime
        queryTime = timeRange.days * 86400 + timeRange.seconds  # convert the time delta to seconds
        if settings.DEFAULT_CACHE_POLICY and not queryParams.get(
                'cacheTimeout'):
            timeouts = [
                timeout for period, timeout in settings.DEFAULT_CACHE_POLICY
                if period <= queryTime
            ]
            cacheTimeout = max(timeouts or (0, ))
        requestOptions['now'] = now

    if cacheTimeout == 0:
        requestOptions['noCache'] = True
    requestOptions['cacheTimeout'] = cacheTimeout

    requestOptions['xFilesFactor'] = float(
        queryParams.get('xFilesFactor', settings.DEFAULT_XFILES_FACTOR))

    return (graphOptions, requestOptions)
Ejemplo n.º 9
0
def find_view(request):
    "View for finding metrics matching a given pattern"

    queryParams = request.GET.copy()
    queryParams.update(request.POST)

    format = queryParams.get('format', 'treejson')
    leaves_only = queryParamAsInt(queryParams, 'leavesOnly', 0)
    local_only = queryParamAsInt(queryParams, 'local', 0)
    wildcards = queryParamAsInt(queryParams, 'wildcards', 0)

    tzinfo = pytz.timezone(settings.TIME_ZONE)
    if 'tz' in queryParams:
        try:
            value = queryParams['tz']
            tzinfo = pytz.timezone(value)
        except pytz.UnknownTimeZoneError:
            pass
        except Exception as e:
            raise InputParameterError(
                'Invalid value {value} for param tz: {err}'.format(
                    value=repr(value), err=str(e)))

    if 'now' in queryParams:
        try:
            value = queryParams['now']
            now = parseATTime(value, tzinfo)
        except Exception as e:
            raise InputParameterError(
                'Invalid value {value} for param now: {err}'.format(
                    value=repr(value), err=str(e)))
    else:
        now = datetime.now(tzinfo)

    if 'from' in queryParams and str(queryParams['from']) != '-1':
        try:
            value = queryParams['from']
            fromTime = int(epoch(parseATTime(value, tzinfo, now)))
        except Exception as e:
            raise InputParameterError(
                'Invalid value {value} for param from: {err}'.format(
                    value=repr(value), err=str(e)))
    else:
        fromTime = -1

    if 'until' in queryParams and str(queryParams['until']) != '-1':
        try:
            value = queryParams['until']
            untilTime = int(epoch(parseATTime(value, tzinfo, now)))
        except Exception as e:
            raise InputParameterError(
                'Invalid value {value} for param until: {err}'.format(
                    value=repr(value), err=str(e)))
    else:
        untilTime = -1

    nodePosition = queryParamAsInt(queryParams, 'position', -1)
    jsonp = queryParams.get('jsonp', False)
    forward_headers = extractForwardHeaders(request)

    if fromTime == -1:
        fromTime = None
    if untilTime == -1:
        untilTime = None

    automatic_variants = queryParamAsInt(queryParams, 'automatic_variants', 0)

    try:
        query = str(queryParams['query'])
    except KeyError:
        raise InputParameterError('Missing required parameter \'query\'')

    if query == '':
        raise InputParameterError('Required parameter \'query\' is empty')

    if '.' in query:
        base_path = query.rsplit('.', 1)[0] + '.'
    else:
        base_path = ''

    if format == 'completer':
        query = query.replace('..', '*.')
        if not query.endswith('*'):
            query += '*'

        if automatic_variants:
            query_parts = query.split('.')
            for i, part in enumerate(query_parts):
                if ',' in part and '{' not in part:
                    query_parts[i] = '{%s}' % part
            query = '.'.join(query_parts)

    try:
        matches = list(
            STORE.find(
                query,
                fromTime,
                untilTime,
                local=local_only,
                headers=forward_headers,
                leaves_only=leaves_only,
            ))
    except Exception:
        log.exception()
        raise

    log.info('find_view query=%s local_only=%s matches=%d' %
             (query, local_only, len(matches)))
    matches.sort(key=lambda node: node.name)
    log.info(
        "received remote find request: pattern=%s from=%s until=%s local_only=%s format=%s matches=%d"
        % (query, fromTime, untilTime, local_only, format, len(matches)))

    if format == 'treejson':
        profile = getProfile(request)
        content = tree_json(matches,
                            base_path,
                            wildcards=profile.advancedUI or wildcards)
        response = json_response_for(request, content, jsonp=jsonp)

    elif format == 'nodelist':
        content = nodes_by_position(matches, nodePosition)
        response = json_response_for(request, content, jsonp=jsonp)

    elif format == 'pickle':
        content = pickle_nodes(matches)
        response = HttpResponse(content, content_type='application/pickle')

    elif format == 'msgpack':
        content = msgpack_nodes(matches)
        response = HttpResponse(content, content_type='application/x-msgpack')

    elif format == 'json':
        content = json_nodes(matches)
        response = json_response_for(request, content, jsonp=jsonp)

    elif format == 'completer':
        results = []
        for node in matches:
            node_info = dict(path=node.path,
                             name=node.name,
                             is_leaf=str(int(node.is_leaf)))
            if not node.is_leaf:
                node_info['path'] += '.'
            results.append(node_info)

        if len(results) > 1 and wildcards:
            wildcardNode = {'name': '*'}
            results.append(wildcardNode)

        response = json_response_for(request, {'metrics': results},
                                     jsonp=jsonp)

    else:
        return HttpResponseBadRequest(
            content="Invalid value for 'format' parameter",
            content_type='text/plain')

    response['Pragma'] = 'no-cache'
    response['Cache-Control'] = 'no-cache'
    return response
Ejemplo n.º 10
0
def _requestContext(request):
    return {
        'forwardHeaders': extractForwardHeaders(request),
    }
Ejemplo n.º 11
0
def renderView(request):
  start = time()
  (graphOptions, requestOptions) = parseOptions(request)
  useCache = 'noCache' not in requestOptions
  cacheTimeout = requestOptions['cacheTimeout']
  # TODO: Make that a namedtuple or a class.
  requestContext = {
    'startTime' : requestOptions['startTime'],
    'endTime' : requestOptions['endTime'],
    'now': requestOptions['now'],
    'localOnly' : requestOptions['localOnly'],
    'template' : requestOptions['template'],
    'tzinfo' : requestOptions['tzinfo'],
    'forwardHeaders': extractForwardHeaders(request),
    'data' : [],
    'prefetched' : {},
  }
  data = requestContext['data']

  # First we check the request cache
  if useCache:
    requestKey = hashRequest(request)
    cachedResponse = cache.get(requestKey)
    if cachedResponse:
      log.cache('Request-Cache hit [%s]' % requestKey)
      log.rendering('Returned cached response in %.6f' % (time() - start))
      return cachedResponse
    else:
      log.cache('Request-Cache miss [%s]' % requestKey)

  # Now we prepare the requested data
  if requestOptions['graphType'] == 'pie':
    for target in requestOptions['targets']:
      if target.find(':') >= 0:
        try:
          name,value = target.split(':',1)
          value = float(value)
        except:
          raise ValueError("Invalid target '%s'" % target)
        data.append( (name,value) )
      else:
        seriesList = evaluateTarget(requestContext, target)

        for series in seriesList:
          func = PieFunctions[requestOptions['pieMode']]
          data.append( (series.name, func(requestContext, series) or 0 ))

  elif requestOptions['graphType'] == 'line':
    # Let's see if at least our data is cached
    if useCache:
      targets = requestOptions['targets']
      startTime = requestOptions['startTime']
      endTime = requestOptions['endTime']
      dataKey = hashData(targets, startTime, endTime)
      cachedData = cache.get(dataKey)
      if cachedData:
        log.cache("Data-Cache hit [%s]" % dataKey)
      else:
        log.cache("Data-Cache miss [%s]" % dataKey)
    else:
      cachedData = None

    if cachedData is not None:
      requestContext['data'] = data = cachedData
    else: # Have to actually retrieve the data now
      targets = requestOptions['targets']
      if settings.REMOTE_PREFETCH_DATA and not requestOptions.get('localOnly'):
        prefetchRemoteData(requestContext, targets)

      for target in targets:
        if not target.strip():
          continue
        t = time()
        seriesList = evaluateTarget(requestContext, target)
        log.rendering("Retrieval of %s took %.6f" % (target, time() - t))
        data.extend(seriesList)

      if useCache:
        cache.add(dataKey, data, cacheTimeout)

    # If data is all we needed, we're done
    format = requestOptions.get('format')
    if format == 'csv':
      response = HttpResponse(content_type='text/csv')
      writer = csv.writer(response, dialect='excel')

      for series in data:
        for i, value in enumerate(series):
          timestamp = datetime.fromtimestamp(series.start + (i * series.step), requestOptions['tzinfo'])
          writer.writerow((series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value))

      return response

    if format == 'json':
      jsonStart = time()

      series_data = []
      if 'maxDataPoints' in requestOptions and any(data):
        startTime = min([series.start for series in data])
        endTime = max([series.end for series in data])
        timeRange = endTime - startTime
        maxDataPoints = requestOptions['maxDataPoints']
        for series in data:
          numberOfDataPoints = timeRange/series.step
          if maxDataPoints < numberOfDataPoints:
            valuesPerPoint = math.ceil(float(numberOfDataPoints) / float(maxDataPoints))
            secondsPerPoint = int(valuesPerPoint * series.step)
            # Nudge start over a little bit so that the consolidation bands align with each call
            # removing 'jitter' seen when refreshing.
            nudge = secondsPerPoint + (series.start % series.step) - (series.start % secondsPerPoint)
            series.start = series.start + nudge
            valuesToLose = int(nudge/series.step)
            for r in range(1, valuesToLose):
              del series[0]
            series.consolidate(valuesPerPoint)
            timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint))
          else:
            timestamps = range(int(series.start), int(series.end) + 1, int(series.step))
          datapoints = zip(series, timestamps)
          series_data.append(dict(target=series.name, tags=series.tags, datapoints=datapoints))
      elif 'noNullPoints' in requestOptions and any(data):
        for series in data:
          values = []
          for (index,v) in enumerate(series):
            if v is not None:
              timestamp = series.start + (index * series.step)
              values.append((v,timestamp))
          if len(values) > 0:
            series_data.append(dict(target=series.name, tags=series.tags, datapoints=values))
      else:
        for series in data:
          timestamps = range(int(series.start), int(series.end) + 1, int(series.step))
          datapoints = zip(series, timestamps)
          series_data.append(dict(target=series.name, tags=series.tags, datapoints=datapoints))

      output = json.dumps(series_data, indent=(2 if requestOptions['pretty'] else None)).replace('None,', 'null,').replace('NaN,', 'null,').replace('Infinity,', '1e9999,')

      if 'jsonp' in requestOptions:
        response = HttpResponse(
          content="%s(%s)" % (requestOptions['jsonp'], output),
          content_type='text/javascript')
      else:
        response = HttpResponse(
          content=output,
          content_type='application/json')

      if useCache:
        cache.add(requestKey, response, cacheTimeout)
        patch_response_headers(response, cache_timeout=cacheTimeout)
      else:
        add_never_cache_headers(response)
      log.rendering('JSON rendering time %6f' % (time() - jsonStart))
      log.rendering('Total request processing time %6f' % (time() - start))
      return response

    if format == 'dygraph':
      labels = ['Time']
      result = '{}'
      if data:
        datapoints = [[ts] for ts in range(data[0].start, data[0].end, data[0].step)]
        for series in data:
          labels.append(series.name)
          for i, point in enumerate(series):
            if point is None:
              point = 'null'
            elif point == float('inf'):
              point = 'Infinity'
            elif point == float('-inf'):
              point = '-Infinity'
            elif math.isnan(point):
              point = 'null'
            datapoints[i].append(point)
        line_template = '[%%s000%s]' % ''.join([', %s'] * len(data))
        lines = [line_template % tuple(points) for points in datapoints]
        result = '{"labels" : %s, "data" : [%s]}' % (json.dumps(labels), ', '.join(lines))
      response = HttpResponse(content=result, content_type='application/json')

      if useCache:
        cache.add(requestKey, response, cacheTimeout)
        patch_response_headers(response, cache_timeout=cacheTimeout)
      else:
        add_never_cache_headers(response)
      log.rendering('Total dygraph rendering time %.6f' % (time() - start))
      return response

    if format == 'rickshaw':
      series_data = []
      for series in data:
        timestamps = range(series.start, series.end, series.step)
        datapoints = [{'x' : x, 'y' : y} for x, y in zip(timestamps, series)]
        series_data.append( dict(target=series.name, datapoints=datapoints) )
      if 'jsonp' in requestOptions:
        response = HttpResponse(
          content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)),
          mimetype='text/javascript')
      else:
        response = HttpResponse(content=json.dumps(series_data),
                                content_type='application/json')

      if useCache:
        cache.add(requestKey, response, cacheTimeout)
        patch_response_headers(response, cache_timeout=cacheTimeout)
      else:
        add_never_cache_headers(response)
      log.rendering('Total rickshaw rendering time %.6f' % (time() - start))
      return response

    if format == 'raw':
      response = HttpResponse(content_type='text/plain')
      for series in data:
        response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) )
        response.write( ','.join(map(repr,series)) )
        response.write('\n')

      log.rendering('Total rawData rendering time %.6f' % (time() - start))
      return response

    if format == 'svg':
      graphOptions['outputFormat'] = 'svg'
    elif format == 'pdf':
      graphOptions['outputFormat'] = 'pdf'

    if format == 'pickle':
      response = HttpResponse(content_type='application/pickle')
      seriesInfo = [series.getInfo() for series in data]
      pickle.dump(seriesInfo, response, protocol=-1)

      log.rendering('Total pickle rendering time %.6f' % (time() - start))
      return response


  # We've got the data, now to render it
  graphOptions['data'] = data
  if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations
    image = delegateRendering(requestOptions['graphType'], graphOptions, requestContext['forwardHeaders'])
  else:
    image = doImageRender(requestOptions['graphClass'], graphOptions)

  useSVG = graphOptions.get('outputFormat') == 'svg'
  if useSVG and 'jsonp' in requestOptions:
    response = HttpResponse(
      content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)),
      content_type='text/javascript')
  elif graphOptions.get('outputFormat') == 'pdf':
    response = buildResponse(image, 'application/x-pdf')
  else:
    response = buildResponse(image, 'image/svg+xml' if useSVG else 'image/png')

  if useCache:
    cache.add(requestKey, response, cacheTimeout)
    patch_response_headers(response, cache_timeout=cacheTimeout)
  else:
    add_never_cache_headers(response)

  log.rendering('Total rendering time %.6f seconds' % (time() - start))
  return response
Ejemplo n.º 12
0
def _requestContext(request, queryParams):
    return {
        'forwardHeaders': extractForwardHeaders(request),
        'localOnly': queryParams.get('local') == '1',
    }
Ejemplo n.º 13
0
def find_view(request):
  "View for finding metrics matching a given pattern"

  queryParams = request.GET.copy()
  queryParams.update(request.POST)

  format = queryParams.get('format', 'treejson')
  leaves_only = int( queryParams.get('leavesOnly', 0) )
  local_only = int( queryParams.get('local', 0) )
  wildcards = int( queryParams.get('wildcards', 0) )

  tzinfo = pytz.timezone(settings.TIME_ZONE)
  if 'tz' in queryParams:
    try:
      tzinfo = pytz.timezone(queryParams['tz'])
    except pytz.UnknownTimeZoneError:
      pass

  if 'now' in queryParams:
    now = parseATTime(queryParams['now'], tzinfo)
  else:
    now = datetime.now(tzinfo)

  if 'from' in queryParams and str(queryParams['from']) != '-1':
    fromTime = int(epoch(parseATTime(queryParams['from'], tzinfo, now)))
  else:
    fromTime = -1

  if 'until' in queryParams and str(queryParams['from']) != '-1':
    untilTime = int(epoch(parseATTime(queryParams['until'], tzinfo, now)))
  else:
    untilTime = -1

  nodePosition = int( queryParams.get('position', -1) )
  jsonp = queryParams.get('jsonp', False)
  forward_headers = extractForwardHeaders(request)

  if fromTime == -1:
    fromTime = None
  if untilTime == -1:
    untilTime = None

  automatic_variants = int( queryParams.get('automatic_variants', 0) )

  try:
    query = str(queryParams['query'])
  except KeyError:
    return HttpResponseBadRequest(content="Missing required parameter 'query'",
                                  content_type='text/plain')

  if query == '':
    return HttpResponseBadRequest(content="Required parameter 'query' is empty",
                                  content_type='text/plain')

  if '.' in query:
    base_path = query.rsplit('.', 1)[0] + '.'
  else:
    base_path = ''

  if format == 'completer':
    query = query.replace('..', '*.')
    if not query.endswith('*'):
      query += '*'

    if automatic_variants:
      query_parts = query.split('.')
      for i,part in enumerate(query_parts):
        if ',' in part and '{' not in part:
          query_parts[i] = '{%s}' % part
      query = '.'.join(query_parts)

  try:
    matches = list(STORE.find(
      query, fromTime, untilTime,
      local=local_only,
      headers=forward_headers,
      leaves_only=leaves_only,
    ))
  except Exception:
    log.exception()
    raise

  log.info('find_view query=%s local_only=%s matches=%d' % (query, local_only, len(matches)))
  matches.sort(key=lambda node: node.name)
  log.info("received remote find request: pattern=%s from=%s until=%s local_only=%s format=%s matches=%d" % (query, fromTime, untilTime, local_only, format, len(matches)))

  if format == 'treejson':
    profile = getProfile(request)
    content = tree_json(matches, base_path, wildcards=profile.advancedUI or wildcards)
    response = json_response_for(request, content, jsonp=jsonp)

  elif format == 'nodelist':
    content = nodes_by_position(matches, nodePosition)
    response = json_response_for(request, content, jsonp=jsonp)

  elif format == 'pickle':
    content = pickle_nodes(matches)
    response = HttpResponse(content, content_type='application/pickle')

  elif format == 'msgpack':
    content = msgpack_nodes(matches)
    response = HttpResponse(content, content_type='application/x-msgpack')

  elif format == 'json':
    content = json_nodes(matches)
    response = json_response_for(request, content, jsonp=jsonp)

  elif format == 'completer':
    results = []
    for node in matches:
      node_info = dict(path=node.path, name=node.name, is_leaf=str(int(node.is_leaf)))
      if not node.is_leaf:
        node_info['path'] += '.'
      results.append(node_info)

    if len(results) > 1 and wildcards:
      wildcardNode = {'name' : '*'}
      results.append(wildcardNode)

    response = json_response_for(request, { 'metrics' : results }, jsonp=jsonp)

  else:
    return HttpResponseBadRequest(
        content="Invalid value for 'format' parameter",
        content_type='text/plain')

  response['Pragma'] = 'no-cache'
  response['Cache-Control'] = 'no-cache'
  return response
Ejemplo n.º 14
0
def _requestContext(request):
  return {
    'forwardHeaders': extractForwardHeaders(request),
  }
Ejemplo n.º 15
0
def parseOptions(request):
  queryParams = request.GET.copy()
  queryParams.update(request.POST)

  # Start with some defaults
  graphOptions = {'width' : 330, 'height' : 250}
  requestOptions = {}

  graphType = queryParams.get('graphType','line')
  if graphType not in GraphTypes:
    raise AssertionError("Invalid graphType '%s', must be one of %s"
                         % (graphType,list(GraphTypes)))
  graphClass = GraphTypes[graphType]

  # Fill in the requestOptions
  requestOptions['graphType'] = graphType
  requestOptions['graphClass'] = graphClass
  requestOptions['pieMode'] = queryParams.get('pieMode', 'average')
  cacheTimeout = int( queryParams.get('cacheTimeout', settings.DEFAULT_CACHE_DURATION) )
  requestOptions['targets'] = []
  requestOptions['forwardHeaders'] = extractForwardHeaders(request)

  # Extract the targets out of the queryParams
  mytargets = []
  # Normal format: ?target=path.1&target=path.2
  if len(queryParams.getlist('target')) > 0:
    mytargets = queryParams.getlist('target')

  # Rails/PHP/jQuery common practice format: ?target[]=path.1&target[]=path.2
  elif len(queryParams.getlist('target[]')) > 0:
    mytargets = queryParams.getlist('target[]')

  # Collect the targets
  for target in mytargets:
    requestOptions['targets'].append(target)

  template = dict()
  for key, val in queryParams.items():
    if key.startswith("template["):
      template[key[9:-1]] = val
  requestOptions['template'] = template

  if 'pickle' in queryParams:
    requestOptions['format'] = 'pickle'
  if 'rawData' in queryParams:
    requestOptions['format'] = 'raw'
  if 'format' in queryParams:
    requestOptions['format'] = queryParams['format']
    if 'jsonp' in queryParams:
      requestOptions['jsonp'] = queryParams['jsonp']

  requestOptions['pretty'] = bool(queryParams.get('pretty'))

  if 'noCache' in queryParams:
    requestOptions['noCache'] = True
  if 'maxDataPoints' in queryParams and queryParams['maxDataPoints'].isdigit():
    requestOptions['maxDataPoints'] = int(queryParams['maxDataPoints'])
  if 'noNullPoints' in queryParams:
    requestOptions['noNullPoints'] = True

  requestOptions['localOnly'] = queryParams.get('local') == '1'

  # Fill in the graphOptions
  format = requestOptions.get('format')
  if format == 'svg':
    graphOptions['outputFormat'] = 'svg'
  elif format == 'pdf':
    graphOptions['outputFormat'] = 'pdf'
  else:
    graphOptions['outputFormat'] = 'png'

  for opt in graphClass.customizable:
    if opt in queryParams:
      val = queryParams[opt]
      if (val.isdigit() or (val.startswith('-') and val[1:].isdigit())) and 'color' not in opt.lower():
        val = int(val)
      elif '.' in val and (val.replace('.','',1).isdigit() or (val.startswith('-') and val[1:].replace('.','',1).isdigit())):
        val = float(val)
      elif val.lower() in ('true','false'):
        val = val.lower() == 'true'
      elif val.lower() == 'default' or val == '':
        continue
      graphOptions[opt] = val

  tzinfo = pytz.timezone(settings.TIME_ZONE)
  if 'tz' in queryParams:
    try:
      tzinfo = pytz.timezone(queryParams['tz'])
    except pytz.UnknownTimeZoneError:
      pass
  requestOptions['tzinfo'] = tzinfo

  # Get the time interval for time-oriented graph types
  if graphType == 'line' or graphType == 'pie':
    if 'now' in queryParams:
        now = parseATTime(queryParams['now'], tzinfo)
    else:
        now = datetime.now(tzinfo)

    if 'until' in queryParams:
      untilTime = parseATTime(queryParams['until'], tzinfo, now)
    else:
      untilTime = now
    if 'from' in queryParams:
      fromTime = parseATTime(queryParams['from'], tzinfo, now)
    else:
      fromTime = parseATTime('-1d', tzinfo, now)

    startTime = min(fromTime, untilTime)
    endTime = max(fromTime, untilTime)
    assert startTime != endTime, "Invalid empty time range"

    requestOptions['startTime'] = startTime
    requestOptions['endTime'] = endTime
    timeRange = endTime - startTime
    queryTime = timeRange.days * 86400 + timeRange.seconds # convert the time delta to seconds
    if settings.DEFAULT_CACHE_POLICY and not queryParams.get('cacheTimeout'):
      timeouts = [timeout for period,timeout in settings.DEFAULT_CACHE_POLICY if period <= queryTime]
      cacheTimeout = max(timeouts or (0,))
    requestOptions['now'] = now

  if cacheTimeout == 0:
    requestOptions['noCache'] = True
  requestOptions['cacheTimeout'] = cacheTimeout

  requestOptions['xFilesFactor'] = float( queryParams.get('xFilesFactor', settings.DEFAULT_XFILES_FACTOR) )

  return (graphOptions, requestOptions)
Ejemplo n.º 16
0
def _requestContext(request, queryParams):
  return {
    'forwardHeaders': extractForwardHeaders(request),
    'localOnly': queryParams.get('local') == '1',
  }