Пример #1
0
def post_event(request):
    if request.method == 'POST':
        event = json.loads(request.body)
        assert isinstance(event, dict)

        tags = event.get('tags')
        if tags is not None:
            if isinstance(tags, list):
                tags = ' '.join(tags)
            elif not isinstance(tags, basestring):
                return HttpResponse(json.dumps({
                    'error':
                    '"tags" must be an array or space-separated string'
                }),
                                    status=400)
        else:
            tags = None
        if 'when' in event:
            when = epoch_to_dt(event['when'])
        else:
            when = now()

        Event.objects.create(
            what=event.get('what'),
            tags=tags,
            when=when,
            data=event.get('data', ''),
        )

        return HttpResponse(status=200)
    else:
        return HttpResponse(status=405)
Пример #2
0
def findSeries(request):
    if request.method not in ['GET', 'POST']:
        return HttpResponse(status=405)

    queryParams = request.GET.copy()
    queryParams.update(request.POST)

    exprs = []
    # Normal format: ?expr=tag1=value1&expr=tag2=value2
    if len(queryParams.getlist('expr')) > 0:
        exprs = queryParams.getlist('expr')
    # Rails/PHP/jQuery common practice format: ?expr[]=tag1=value1&expr[]=tag2=value2
    elif len(queryParams.getlist('expr[]')) > 0:
        exprs = queryParams.getlist('expr[]')

    if not exprs:
        return HttpResponse(json.dumps(
            {'error': 'no tag expressions specified'}),
                            content_type='application/json',
                            status=400)

    return HttpResponse(json.dumps(
        STORE.tagdb.find_series(
            exprs,
            requestContext=_requestContext(request),
        ) if STORE.tagdb else [],
        indent=(2 if queryParams.get('pretty') else None),
        sort_keys=bool(queryParams.get('pretty'))),
                        content_type='application/json')
Пример #3
0
def renderViewJson(requestOptions, data):
    series_data = []

    if any(data):
        startTime = min([series.start for series in data])
        endTime = max([series.end for series in data])
        timeRange = endTime - startTime

        for series in data:
            if 'maxDataPoints' in requestOptions:
                maxDataPoints = requestOptions['maxDataPoints']
                if maxDataPoints == 1:
                    series.consolidate(len(series))
                else:
                    numberOfDataPoints = len(series)
                    if maxDataPoints < numberOfDataPoints:
                        valuesPerPoint = math.ceil(
                            float(numberOfDataPoints) / float(maxDataPoints))
                        secondsPerPoint = int(valuesPerPoint * series.step)
                        # Nudge start over a little bit so that the consolidation bands align with each call
                        # removing 'jitter' seen when refreshing.
                        nudge = secondsPerPoint + (
                            series.start % series.step) - (series.start %
                                                           secondsPerPoint)
                        series.start = series.start + nudge
                        valuesToLose = int(nudge / series.step)
                        for r in range(1, valuesToLose):
                            del series[0]
                        series.consolidate(valuesPerPoint)

            datapoints = series.datapoints()

            if 'noNullPoints' in requestOptions:
                datapoints = [
                    point for point in datapoints
                    if point[0] is not None and not math.isnan(point[0])
                ]
                if not datapoints:
                    continue

            series_data.append(
                dict(target=series.name,
                     tags=series.tags,
                     datapoints=datapoints))

    output = json.dumps(
        series_data,
        indent=(2 if requestOptions.get('pretty') else None)).replace(
            'None,', 'null,').replace('NaN,',
                                      'null,').replace('Infinity,', '1e9999,')

    if 'jsonp' in requestOptions:
        response = HttpResponse(content="%s(%s)" %
                                (requestOptions['jsonp'], output),
                                content_type='text/javascript')
    else:
        response = HttpResponse(content=output,
                                content_type='application/json')

    return response
Пример #4
0
def autoCompleteValues(request):
    if request.method not in ['GET', 'POST']:
        return HttpResponse(status=405)

    queryParams = request.GET.copy()
    queryParams.update(request.POST)

    exprs = []
    # Normal format: ?expr=tag1=value1&expr=tag2=value2
    if len(queryParams.getlist('expr')) > 0:
        exprs = queryParams.getlist('expr')
    # Rails/PHP/jQuery common practice format: ?expr[]=tag1=value1&expr[]=tag2=value2
    elif len(queryParams.getlist('expr[]')) > 0:
        exprs = queryParams.getlist('expr[]')

    tag = queryParams.get('tag')
    if not tag:
        return HttpResponse(json.dumps({'error': 'no tag specified'}),
                            content_type='application/json',
                            status=400)

    valuePrefix = queryParams.get('valuePrefix')

    result = STORE.tagdb.auto_complete_values(
        exprs,
        tag,
        valuePrefix,
        limit=queryParams.get('limit'),
        requestContext=_requestContext(request))

    return HttpResponse(json.dumps(
        result,
        indent=(2 if queryParams.get('pretty') else None),
        sort_keys=bool(queryParams.get('pretty'))),
                        content_type='application/json')
Пример #5
0
def post_event(request):
    if request.method == 'POST':
        event = json.loads(request.body)
        assert isinstance(event, dict)

        tags = event.get('tags')
        if tags:
            if not isinstance(tags, list):
                return HttpResponse(json.dumps(
                    {'error': '"tags" must be an array'}),
                                    status=400)
            tags = ' '.join(tags)
        if 'when' in event:
            when = make_aware(
                datetime.datetime.utcfromtimestamp(event.get('when')),
                pytz.utc)
        else:
            when = now()

        Event.objects.create(
            what=event.get('what'),
            tags=tags,
            when=when,
            data=event.get('data', ''),
        )

        return HttpResponse(status=200)
    else:
        return HttpResponse(status=405)
Пример #6
0
def tagList(request):
    if request.method != 'GET':
        return HttpResponse(status=405)

    return HttpResponse(
        json.dumps(STORE.tagdb.list_tags() if STORE.tagdb else []),
        content_type='application/json')
Пример #7
0
def tagDetails(request, tag):
    if request.method != 'GET':
        return HttpResponse(status=405)

    return HttpResponse(
        json.dumps(STORE.tagdb.get_tag(tag) if STORE.tagdb else None),
        content_type='application/json')
Пример #8
0
def renderViewDygraph(requestOptions, data):
    labels = ['Time']
    output = '{}'
    if data:
        datapoints = [[ts]
                      for ts in range(data[0].start, data[0].end, data[0].step)
                      ]
        for series in data:
            labels.append(series.name)
            for i, point in enumerate(series):
                if point is None:
                    point = 'null'
                elif point == float('inf'):
                    point = 'Infinity'
                elif point == float('-inf'):
                    point = '-Infinity'
                elif math.isnan(point):
                    point = 'null'
                datapoints[i].append(point)
        line_template = '[%%s000%s]' % ''.join([', %s'] * len(data))
        lines = [line_template % tuple(points) for points in datapoints]
        output = '{"labels" : %s, "data" : [%s]}' % (json.dumps(labels),
                                                     ', '.join(lines))

    if 'jsonp' in requestOptions:
        response = HttpResponse(content="%s(%s)" %
                                (requestOptions['jsonp'], output),
                                content_type='text/javascript')
    else:
        response = HttpResponse(content=output,
                                content_type='application/json')

    return response
Пример #9
0
def search(request):
    query = request.POST.get('query')
    if not query:
        return HttpResponse("")

    patterns = query.split()
    regexes = [re.compile(p, re.I) for p in patterns]

    def matches(s):
        for regex in regexes:
            if regex.search(s):
                return True
        return False

    results = []

    index_file = open(settings.INDEX_FILE)
    for line in index_file:
        if matches(line):
            results.append(line.strip())
        if len(results) >= 100:
            break

    index_file.close()
    result_string = ','.join(results)
    return HttpResponse(result_string, content_type='text/plain')
Пример #10
0
def tagDetails(request, tag):
    if request.method != 'GET':
        return HttpResponse(status=405)

    return HttpResponse(json.dumps(
        STORE.tagdb.get_tag(tag, valueFilter=request.GET.get('filter'))
        if STORE.tagdb else None,
        indent=(2 if request.GET.get('pretty') else None),
        sort_keys=bool(request.GET.get('pretty'))),
                        content_type='application/json')
Пример #11
0
def tagList(request):
    if request.method != 'GET':
        return HttpResponse(status=405)

    return HttpResponse(json.dumps(
        STORE.tagdb.list_tags(
            tagFilter=request.GET.get('filter')) if STORE.tagdb else [],
        indent=(2 if request.GET.get('pretty') else None),
        sort_keys=bool(request.GET.get('pretty'))),
                        content_type='application/json')
Пример #12
0
def get_data(request):
    if 'jsonp' in request.REQUEST:
        response = HttpResponse("%s(%s)" %
                                (request.REQUEST.get('jsonp'),
                                 json.dumps(fetch(request), cls=EventEncoder)),
                                content_type='text/javascript')
    else:
        response = HttpResponse(json.dumps(fetch(request), cls=EventEncoder),
                                content_type="application/json")
    return response
Пример #13
0
def tagSeries(request):
    if request.method != 'POST':
        return HttpResponse(status=405)

    path = request.POST.get('path')
    if not path:
        return HttpResponse(json.dumps({'error': 'no path specified'}),
                            status=400)

    return HttpResponse(
        json.dumps(STORE.tagdb.tag_series(path)) if STORE.tagdb else 'null',
        content_type='application/json')
Пример #14
0
def evaluate(request):
  if 'commandInput' not in request.GET:
    output = commands.stderr("No commandInput parameter!")
    return HttpResponse(output, content_type='text/plain')

  #Variable substitution
  profile = getProfile(request)
  my_vars = {}
  for variable in profile.variable_set.all():
    my_vars[variable.name] = variable.value
  cmd = request.GET['commandInput']
  while '$' in cmd and not cmd.startswith('code'):
    i = cmd.find('$')
    j = i+1
    for char in cmd[i+1:]:
      if char not in letters: break
      j += 1
    var = cmd[i+1:j]
    if var in my_vars:
      cmd = cmd[:i] + my_vars[var] + cmd[j:]
    else:
      output = commands.stderr("Unknown variable %s" % var)
      return HttpResponse(output, content_type='text/plain')

  if cmd == '?': cmd = 'help'

  try:
    tokens = parser.parseInput(cmd)

    if not tokens.command:
      output = commands.stderr("Invalid syntax")
      return HttpResponse(output, content_type='text/plain')

    handler_name = '_' + tokens.command
    handler = vars(commands).get(handler_name)
    if handler is None:
      output = commands.stderr("Unknown command")
      return HttpResponse(output, content_type='text/plain')

    args = dict( tokens.items() )
    del args['command']
    output = handler(request, **args)
  except:
    output = commands.printException()

  #Save command to history
  history = profile.history.split('\n')
  history.insert(0,cmd)
  while len(history) > 30: history.pop()
  profile.history = '\n'.join(history)
  profile.save()

  return HttpResponse(output, content_type='text/plain')
Пример #15
0
def get_data(request):
    query_params = request.GET.copy()
    query_params.update(request.POST)

    if 'jsonp' in query_params:
        response = HttpResponse("%s(%s)" %
                                (query_params.get('jsonp'),
                                 json.dumps(fetch(request), cls=EventEncoder)),
                                content_type='text/javascript')
    else:
        response = HttpResponse(json.dumps(fetch(request), cls=EventEncoder),
                                content_type='application/json')
    return response
Пример #16
0
def json_response(nodes, request=None):
    if request:
        jsonp = request.REQUEST.get('jsonp', False)
    else:
        jsonp = False
    json_data = json.dumps(nodes)
    if jsonp:
        response = HttpResponse("%s(%s)" % (jsonp, json_data),
                                content_type="text/javascript")
    else:
        response = HttpResponse(json_data, content_type="application/json")
    response['Pragma'] = 'no-cache'
    response['Cache-Control'] = 'no-cache'
    return response
Пример #17
0
def json_response(nodes, request=None):
  if request:
    jsonp = request.GET.get('jsonp', False) or request.POST.get('jsonp', False)
  else:
    jsonp = False
  #json = str(nodes) #poor man's json encoder for simple types
  json_data = json.dumps(nodes)
  if jsonp:
    response = HttpResponse("%s(%s)" % (jsonp, json_data),
                            content_type="text/javascript")
  else:
    response = HttpResponse(json_data, content_type="application/json")
  response['Pragma'] = 'no-cache'
  response['Cache-Control'] = 'no-cache'
  return response
Пример #18
0
def renderViewRaw(requestOptions, data):
  response = HttpResponse(content_type='text/plain')

  for series in data:
    response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) )
    response.write( ','.join(map(repr,series)) )
    response.write('\n')

  return response
Пример #19
0
def full_path_zon_test(request):
    # 0. Parse parameters
    queryParams = request.GET.copy()
    queryParams.update(request.POST)
    # TODO: remove the default values, the default values are what we are using currently
    port = queryParams.get('port', '31509')
    metric_name = queryParams.get('metric', 'test.fullstack.graphite')

    # 1. Send data to graphite
    random_data = _send_random_data(metric_name)

    # 2. Wait (allow some latency)
    time.sleep(1)

    # 3. Query graphite
    try:
        res = urlopen(
            "http://localhost:{0}/render/?format=json&target={1}&from=-1min&noCache"
            .format(port, metric_name))
        s = res.read().decode('utf-8')
        json_obj = json.loads(s)
    except Exception:
        raise

    # 4. Check Result
    result = ""
    details = "Expected value: {0}".format(random_data)
    if not json_obj:
        result = "fail"
        details = "graphite query response is empty"
    else:
        json_obj = json_obj[0]
        if "target" not in json_obj:
            result = "fail"
            details = "target field is missing"
        elif "datapoints" not in json_obj:
            result = "fail"
            details = "datapoints field is missing"
        elif len(json_obj["datapoints"]) != 1:
            result = "fail"
            details = "the number of returned datapoints is inconsistent"
        elif json_obj["datapoints"][0][0] is None:
            result = "fail"
            details = "datapoint is returned but its value is None"
        else:
            real_value = int(json_obj["datapoints"][0][0])
            expected_value = random_data
            if real_value == expected_value:
                result = "pass"
            else:
                result = "fail"
                details = "Expected value: {0}, Real Value: {1}".format(
                    expected_value, real_value)

    # 5. Response
    result_json_obj = {"result": result, "details": details}
    response = HttpResponse(content=json.dumps(result_json_obj),
                            content_type='application/json')
    return response
Пример #20
0
def cache_metric(request):
    queryParams = request.GET.copy()
    metric = queryParams.get('metric', 'yun.test')
    datapoints = CarbonLink.query(metric)
    result_json_obj = {"target": metric, "datapoints": datapoints}
    response = HttpResponse(content=json.dumps(result_json_obj),
                            content_type='application/json')
    return response
Пример #21
0
def save_event(request):
    if request.method == 'POST' || request.method == 'PUT':
        event = json.loads(request.body)
        assert isinstance(event, dict)

        tags = event.get('tags')
        if tags:
            if not isinstance(tags, list):
                return HttpResponse(
                    json.dumps({'error': '"tags" must be an array'}),
                    status=400)
            tags = ' '.join(tags)
        if 'when' in event:
            when = make_aware(
                datetime.datetime.utcfromtimestamp(
                    event.get('when')), pytz.utc)
        else:
            when = now()

        if request.method == 'POST':
            Event.objects.create(
                what=event.get('what'),
                tags=tags,
                when=when,
                data=event.get('data', ''),
            )
        elif request.method == 'PUT':
            event_id = event.get('id')
            try:
                e = Event.objects.get(id=event_id)
                e.what = event.get('what')
                e.tags = tags
                e.when = when
                e.data = event.get('data', '')
                e.save()
            except ObjectDoesNotExist:
                error = {'error': 'Event matching query does not exist'}
                response = JsonResponse(error, status=404)
                return response
        else
            return HttpResponse(status=405)

        return HttpResponse(status=200)
    else:
        return HttpResponse(status=405)
Пример #22
0
def tagList(request, queryParams):
    if request.method != 'GET':
        return HttpResponse(status=405)

    return STORE.tagdb.list_tags(
        tagFilter=request.GET.get('filter'),
        limit=request.GET.get('limit'),
        requestContext=_requestContext(request),
    )
Пример #23
0
def post_event(request):
    if request.method == 'POST':
        event = json.loads(request.body)
        assert isinstance(event, dict)

        values = {}
        values["what"] = event["what"]
        values["tags"] = event.get("tags", None)
        values["when"] = datetime.datetime.fromtimestamp(
            event.get("when", time.time()))
        if "data" in event:
            values["data"] = event["data"]

        e = models.Event(**values)
        e.save()

        return HttpResponse(status=200)
    else:
        return HttpResponse(status=405)
Пример #24
0
def renderViewRickshaw(requestOptions, data):
    series_data = []
    for series in data:
        timestamps = range(series.start, series.end, series.step)
        datapoints = [{'x': x, 'y': y} for x, y in zip(timestamps, series)]
        series_data.append(dict(target=series.name, datapoints=datapoints))

    output = json.dumps(series_data,
                        indent=(2 if requestOptions.get('pretty') else None))

    if 'jsonp' in requestOptions:
        response = HttpResponse(content="%s(%s)" %
                                (requestOptions['jsonp'], output),
                                content_type='text/javascript')
    else:
        response = HttpResponse(content=output,
                                content_type='application/json')

    return response
Пример #25
0
def tagDetails(request, queryParams, tag):
    if request.method != 'GET':
        return HttpResponse(status=405)

    return STORE.tagdb.get_tag(
        tag,
        valueFilter=queryParams.get('filter'),
        limit=queryParams.get('limit'),
        requestContext=_requestContext(request),
    )
Пример #26
0
def tagSeries(request, queryParams):
    if request.method != 'POST':
        return HttpResponse(status=405)

    path = queryParams.get('path')
    if not path:
        raise HttpError('no path specified', status=400)

    return STORE.tagdb.tag_series(path,
                                  requestContext=_requestContext(request))
Пример #27
0
def renderViewCsv(requestOptions, data):
  response = HttpResponse(content_type='text/csv')
  writer = csv.writer(response, dialect='excel')

  for series in data:
    for i, value in enumerate(series):
      timestamp = datetime.fromtimestamp(series.start + (i * series.step), requestOptions['tzinfo'])
      writer.writerow((series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value))

  return response
Пример #28
0
def post_event(request):
    if request.method == 'POST':
        event = json.loads(request.body)
        assert isinstance(event, dict)

        if 'when' in event:
            when = make_aware(
                datetime.datetime.utcfromtimestamp(event['when']), pytz.utc)
        else:
            when = now()
        Event.objects.create(
            what=event['what'],
            tags=event.get("tags"),
            when=when,
            data=event.get("data", ""),
        )
        return HttpResponse(status=200)
    else:
        return HttpResponse(status=405)
Пример #29
0
def renderViewRaw(requestOptions, data):
  response = HttpResponse(content_type='text/plain')

  for series in data:
    response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) )
    response.write( ','.join(map(repr,series)) )
    response.write('\n')

  return response
Пример #30
0
def autocomplete(request):
  assert 'path' in request.GET, "Invalid request, no 'path' parameter!"
  path = request.GET['path']
  shortnames = bool( request.GET.get('short') )

  if request.GET['path'][:1] == '!':
    profile = getProfile(request)
    html = completer.completeHistory(path, profile)
  else:
    html = completer.completePath(path, shortnames=shortnames)

  return HttpResponse( html )
Пример #31
0
def delete_event(request):
    if request.method == 'DELETE':
        try:
            data = json.loads(request.body)
            assert isinstance(event_ids, dict)

            event_ids = data.get('event_ids')
            if not isinstance(event_ids, list)
                return HttpResponse(
                    json.dumps({'error': '"event_ids" must be an array'}),
                    status=400)

            for event_id in event_ids:
                Event.objects.delete(id=event_id)

            return HttpResponse(status=200)
        except ObjectDoesNotExist:
            error = {'error': 'Event matching query does not exist'}
            response = JsonResponse(error, status=404)
            return response
    else:
        return HttpResponse(status=405)
Пример #32
0
def renderView(request):
  start = time()
  (graphOptions, requestOptions) = parseOptions(request)
  useCache = 'noCache' not in requestOptions
  cacheTimeout = requestOptions['cacheTimeout']
  requestContext = {
    'startTime' : requestOptions['startTime'],
    'endTime' : requestOptions['endTime'],
    'localOnly' : requestOptions['localOnly'],
    'template' : requestOptions['template'],
    'targets': [],
    'data' : []
  }
  data = requestContext['data']

  # First we check the request cache
  if useCache:
    requestKey = hashRequest(request)
    cachedResponse = cache.get(requestKey)
    if cachedResponse:
      log.cache('Request-Cache hit [%s]' % requestKey)
      log.rendering('Returned cached response in %.6f' % (time() - start))
      requestContext['cachedResponse'] = True
      requestContext['targets'].append((requestOptions['targets'], time() - start))
      log_query(request, requestOptions, requestContext, time() - start)
      return cachedResponse
    else:
      log.cache('Request-Cache miss [%s]' % requestKey)

  # Now we prepare the requested data
  if requestOptions['graphType'] == 'pie':
    for target in requestOptions['targets']:
      if target.find(':') >= 0:
        try:
          name,value = target.split(':',1)
          value = float(value)
        except:
          raise ValueError("Invalid target '%s'" % target)
        data.append( (name,value) )
      else:
        t = time()
        seriesList = evaluateTarget(requestContext, target)

        for series in seriesList:
          func = PieFunctions[requestOptions['pieMode']]
          data.append( (series.name, func(requestContext, series) or 0 ))
        requestContext['targets'].append((target, time() - t))

  elif requestOptions['graphType'] == 'line':
    # Let's see if at least our data is cached
    if useCache:
      t = time()
      targets = requestOptions['targets']
      startTime = requestOptions['startTime']
      endTime = requestOptions['endTime']
      dataKey = hashData(targets, startTime, endTime)
      cachedData = cache.get(dataKey)
      if cachedData:
        log.cache("Data-Cache hit [%s]" % dataKey)
        requestContext['cachedData'] = True
        requestContext['targets'].append((targets, time() - t))
      else:
        log.cache("Data-Cache miss [%s]" % dataKey)
    else:
      cachedData = None

    if cachedData is not None:
      requestContext['data'] = data = cachedData
    else: # Have to actually retrieve the data now
      for target in requestOptions['targets']:
        if not target.strip():
          continue
        t = time()
        seriesList = evaluateTarget(requestContext, target)
        log.rendering("Retrieval of %s took %.6f" % (target, time() - t))
        requestContext['targets'].append((target, time() - t))
        data.extend(seriesList)

      if useCache:
        cache.add(dataKey, data, cacheTimeout)

    # If data is all we needed, we're done
    format = requestOptions.get('format')
    if format == 'csv':
      response = HttpResponse(content_type='text/csv')
      writer = csv.writer(response, dialect='excel')

      for series in data:
        for i, value in enumerate(series):
          timestamp = datetime.fromtimestamp(series.start + (i * series.step), requestOptions['tzinfo'])
          writer.writerow((series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value))

      log_query(request, requestOptions, requestContext, time() - start)
      return response

    if format == 'json':
      series_data = []
      if 'maxDataPoints' in requestOptions and any(data):
        startTime = min([series.start for series in data])
        endTime = max([series.end for series in data])
        timeRange = endTime - startTime
        maxDataPoints = requestOptions['maxDataPoints']
        for series in data:
          numberOfDataPoints = timeRange/series.step
          if maxDataPoints < numberOfDataPoints:
            valuesPerPoint = math.ceil(float(numberOfDataPoints) / float(maxDataPoints))
            secondsPerPoint = int(valuesPerPoint * series.step)
            # Nudge start over a little bit so that the consolidation bands align with each call
            # removing 'jitter' seen when refreshing.
            nudge = secondsPerPoint + (series.start % series.step) - (series.start % secondsPerPoint)
            series.start = series.start + nudge
            valuesToLose = int(nudge/series.step)
            for r in range(1, valuesToLose):
              del series[0]
            series.consolidate(valuesPerPoint)
            timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint))
          else:
            timestamps = range(int(series.start), int(series.end) + 1, int(series.step))
          datapoints = zip(series, timestamps)
          series_data.append(dict(target=series.name, datapoints=datapoints))
      else:
        for series in data:
          timestamps = range(int(series.start), int(series.end) + 1, int(series.step))
          datapoints = zip(series, timestamps)
          series_data.append(dict(target=series.name, datapoints=datapoints))

      if 'jsonp' in requestOptions:
        response = HttpResponse(
          content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)),
          content_type='text/javascript')
      else:
        response = HttpResponse(content=json.dumps(series_data),
                                content_type='application/json')

      if useCache:
        cache.add(requestKey, response, cacheTimeout)
        patch_response_headers(response, cache_timeout=cacheTimeout)
      else:
        add_never_cache_headers(response)
      log_query(request, requestOptions, requestContext, time() - start)
      return response

    if format == 'raw':
      response = HttpResponse(content_type='text/plain')
      for series in data:
        response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) )
        response.write( ','.join(map(str,series)) )
        response.write('\n')

      log.rendering('Total rawData rendering time %.6f' % (time() - start))
      log_query(request, requestOptions, requestContext, time() - start)
      return response

    if format == 'svg':
      graphOptions['outputFormat'] = 'svg'

    if format == 'pickle':
      response = HttpResponse(content_type='application/pickle')
      seriesInfo = [series.getInfo() for series in data]
      pickle.dump(seriesInfo, response, protocol=-1)

      log.rendering('Total pickle rendering time %.6f' % (time() - start))
      log_query(request, requestOptions, requestContext, time() - start)
      return response


  # We've got the data, now to render it
  graphOptions['data'] = data
  if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations
    image = delegateRendering(requestOptions['graphType'], graphOptions)
  else:
    image = doImageRender(requestOptions['graphClass'], graphOptions)

  useSVG = graphOptions.get('outputFormat') == 'svg'
  if useSVG and 'jsonp' in requestOptions:
    response = HttpResponse(
      content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)),
      content_type='text/javascript')
  else:
    response = buildResponse(image, 'image/svg+xml' if useSVG else 'image/png')

  if useCache:
    cache.add(requestKey, response, cacheTimeout)
    patch_response_headers(response, cache_timeout=cacheTimeout)
  else:
    add_never_cache_headers(response)

  log.rendering('Total rendering time %.6f seconds' % (time() - start))
  log_query(request, requestOptions, requestContext, time() - start)
  return response
Пример #33
0
def renderView(request):
    start = time()
    (graphOptions, requestOptions) = parseOptions(request)
    useCache = "noCache" not in requestOptions
    cacheTimeout = requestOptions["cacheTimeout"]
    requestContext = {
        "startTime": requestOptions["startTime"],
        "endTime": requestOptions["endTime"],
        "localOnly": requestOptions["localOnly"],
        "template": requestOptions["template"],
        "data": [],
    }
    data = requestContext["data"]

    # First we check the request cache
    if useCache:
        requestKey = hashRequest(request)
        cachedResponse = cache.get(requestKey)
        if cachedResponse:
            log.cache("Request-Cache hit [%s]" % requestKey)
            log.rendering("Returned cached response in %.6f" % (time() - start))
            return cachedResponse
        else:
            log.cache("Request-Cache miss [%s]" % requestKey)

    # Now we prepare the requested data
    if requestOptions["graphType"] == "pie":
        for target in requestOptions["targets"]:
            if target.find(":") >= 0:
                try:
                    name, value = target.split(":", 1)
                    value = float(value)
                except:
                    raise ValueError("Invalid target '%s'" % target)
                data.append((name, value))
            else:
                seriesList = evaluateTarget(requestContext, target)

                for series in seriesList:
                    func = PieFunctions[requestOptions["pieMode"]]
                    data.append((series.name, func(requestContext, series) or 0))

    elif requestOptions["graphType"] == "line":
        # Let's see if at least our data is cached
        if useCache:
            targets = requestOptions["targets"]
            startTime = requestOptions["startTime"]
            endTime = requestOptions["endTime"]
            dataKey = hashData(targets, startTime, endTime)
            cachedData = cache.get(dataKey)
            if cachedData:
                log.cache("Data-Cache hit [%s]" % dataKey)
            else:
                log.cache("Data-Cache miss [%s]" % dataKey)
        else:
            cachedData = None

        if cachedData is not None:
            requestContext["data"] = data = cachedData
        else:  # Have to actually retrieve the data now
            for target in requestOptions["targets"]:
                if not target.strip():
                    continue
                t = time()
                seriesList = evaluateTarget(requestContext, target)
                log.rendering("Retrieval of %s took %.6f" % (target, time() - t))
                data.extend(seriesList)

            if useCache:
                cache.add(dataKey, data, cacheTimeout)

        # If data is all we needed, we're done
        format = requestOptions.get("format")
        if format == "csv":
            response = HttpResponse(content_type="text/csv")
            writer = csv.writer(response, dialect="excel")

            for series in data:
                for i, value in enumerate(series):
                    timestamp = datetime.fromtimestamp(series.start + (i * series.step), requestOptions["tzinfo"])
                    writer.writerow((series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value))

            return response

        if format == "json":
            series_data = []
            if "maxDataPoints" in requestOptions and any(data):
                startTime = min([series.start for series in data])
                endTime = max([series.end for series in data])
                timeRange = endTime - startTime
                maxDataPoints = requestOptions["maxDataPoints"]
                for series in data:
                    numberOfDataPoints = timeRange / series.step
                    if maxDataPoints < numberOfDataPoints:
                        valuesPerPoint = math.ceil(float(numberOfDataPoints) / float(maxDataPoints))
                        secondsPerPoint = int(valuesPerPoint * series.step)
                        # Nudge start over a little bit so that the consolidation bands align with each call
                        # removing 'jitter' seen when refreshing.
                        nudge = secondsPerPoint + (series.start % series.step) - (series.start % secondsPerPoint)
                        series.start = series.start + nudge
                        valuesToLose = int(nudge / series.step)
                        for r in range(1, valuesToLose):
                            del series[0]
                        series.consolidate(valuesPerPoint)
                        timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint))
                    else:
                        timestamps = range(int(series.start), int(series.end) + 1, int(series.step))
                    datapoints = zip(series, timestamps)
                    series_data.append(dict(target=series.name, datapoints=datapoints))
            else:
                for series in data:
                    timestamps = range(int(series.start), int(series.end) + 1, int(series.step))
                    datapoints = zip(series, timestamps)
                    series_data.append(dict(target=series.name, datapoints=datapoints))

            if "jsonp" in requestOptions:
                response = HttpResponse(
                    content="%s(%s)" % (requestOptions["jsonp"], json.dumps(series_data)),
                    content_type="text/javascript",
                )
            else:
                response = HttpResponse(content=json.dumps(series_data), content_type="application/json")

            if useCache:
                cache.add(requestKey, response, cacheTimeout)
                patch_response_headers(response, cache_timeout=cacheTimeout)
            else:
                add_never_cache_headers(response)
            return response

        if format == "raw":
            response = HttpResponse(content_type="text/plain")
            for series in data:
                response.write("%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step))
                response.write(",".join(map(str, series)))
                response.write("\n")

            log.rendering("Total rawData rendering time %.6f" % (time() - start))
            return response

        if format == "svg":
            graphOptions["outputFormat"] = "svg"
        elif format == "pdf":
            graphOptions["outputFormat"] = "pdf"

        if format == "pickle":
            response = HttpResponse(content_type="application/pickle")
            seriesInfo = [series.getInfo() for series in data]
            pickle.dump(seriesInfo, response, protocol=-1)

            log.rendering("Total pickle rendering time %.6f" % (time() - start))
            return response

    # We've got the data, now to render it
    graphOptions["data"] = data
    if settings.REMOTE_RENDERING:  # Rendering on other machines is faster in some situations
        image = delegateRendering(requestOptions["graphType"], graphOptions)
    else:
        image = doImageRender(requestOptions["graphClass"], graphOptions)

    useSVG = graphOptions.get("outputFormat") == "svg"
    if useSVG and "jsonp" in requestOptions:
        response = HttpResponse(
            content="%s(%s)" % (requestOptions["jsonp"], json.dumps(image)), content_type="text/javascript"
        )
    elif graphOptions.get("outputFormat") == "pdf":
        response = buildResponse(image, "application/x-pdf")
    else:
        response = buildResponse(image, "image/svg+xml" if useSVG else "image/png")

    if useCache:
        cache.add(requestKey, response, cacheTimeout)
        patch_response_headers(response, cache_timeout=cacheTimeout)
    else:
        add_never_cache_headers(response)

    log.rendering("Total rendering time %.6f seconds" % (time() - start))
    return response
Пример #34
0
def renderView(request):
  start = time()
  (graphOptions, requestOptions) = parseOptions(request)
  useCache = 'noCache' not in requestOptions
  cacheTimeout = requestOptions['cacheTimeout']
  # TODO: Make that a namedtuple or a class.
  requestContext = {
    'startTime' : requestOptions['startTime'],
    'endTime' : requestOptions['endTime'],
    'now': requestOptions['now'],
    'localOnly' : requestOptions['localOnly'],
    'template' : requestOptions['template'],
    'tzinfo' : requestOptions['tzinfo'],
    'forwardHeaders': extractForwardHeaders(request),
    'data' : [],
    'prefetched' : {},
  }
  data = requestContext['data']

  # First we check the request cache
  if useCache:
    requestKey = hashRequest(request)
    cachedResponse = cache.get(requestKey)
    if cachedResponse:
      log.cache('Request-Cache hit [%s]' % requestKey)
      log.rendering('Returned cached response in %.6f' % (time() - start))
      return cachedResponse
    else:
      log.cache('Request-Cache miss [%s]' % requestKey)

  # Now we prepare the requested data
  if requestOptions['graphType'] == 'pie':
    for target in requestOptions['targets']:
      if target.find(':') >= 0:
        try:
          name,value = target.split(':',1)
          value = float(value)
        except:
          raise ValueError("Invalid target '%s'" % target)
        data.append( (name,value) )
      else:
        seriesList = evaluateTarget(requestContext, target)

        for series in seriesList:
          func = PieFunctions[requestOptions['pieMode']]
          data.append( (series.name, func(requestContext, series) or 0 ))

  elif requestOptions['graphType'] == 'line':
    # Let's see if at least our data is cached
    if useCache:
      targets = requestOptions['targets']
      startTime = requestOptions['startTime']
      endTime = requestOptions['endTime']
      dataKey = hashData(targets, startTime, endTime)
      cachedData = cache.get(dataKey)
      if cachedData:
        log.cache("Data-Cache hit [%s]" % dataKey)
      else:
        log.cache("Data-Cache miss [%s]" % dataKey)
    else:
      cachedData = None

    if cachedData is not None:
      requestContext['data'] = data = cachedData
    else: # Have to actually retrieve the data now
      targets = requestOptions['targets']
      if settings.REMOTE_PREFETCH_DATA and not requestOptions.get('localOnly'):
        prefetchRemoteData(requestContext, targets)

      for target in targets:
        if not target.strip():
          continue
        t = time()
        seriesList = evaluateTarget(requestContext, target)
        log.rendering("Retrieval of %s took %.6f" % (target, time() - t))
        data.extend(seriesList)

      if useCache:
        cache.add(dataKey, data, cacheTimeout)

    # If data is all we needed, we're done
    format = requestOptions.get('format')
    if format == 'csv':
      response = HttpResponse(content_type='text/csv')
      writer = csv.writer(response, dialect='excel')

      for series in data:
        for i, value in enumerate(series):
          timestamp = datetime.fromtimestamp(series.start + (i * series.step), requestOptions['tzinfo'])
          writer.writerow((series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value))

      return response

    if format == 'json':
      jsonStart = time()

      series_data = []
      if 'maxDataPoints' in requestOptions and any(data):
        startTime = min([series.start for series in data])
        endTime = max([series.end for series in data])
        timeRange = endTime - startTime
        maxDataPoints = requestOptions['maxDataPoints']
        for series in data:
          numberOfDataPoints = timeRange/series.step
          if maxDataPoints < numberOfDataPoints:
            valuesPerPoint = math.ceil(float(numberOfDataPoints) / float(maxDataPoints))
            secondsPerPoint = int(valuesPerPoint * series.step)
            # Nudge start over a little bit so that the consolidation bands align with each call
            # removing 'jitter' seen when refreshing.
            nudge = secondsPerPoint + (series.start % series.step) - (series.start % secondsPerPoint)
            series.start = series.start + nudge
            valuesToLose = int(nudge/series.step)
            for r in range(1, valuesToLose):
              del series[0]
            series.consolidate(valuesPerPoint)
            timestamps = range(int(series.start), int(series.end) + 1, int(secondsPerPoint))
          else:
            timestamps = range(int(series.start), int(series.end) + 1, int(series.step))
          datapoints = zip(series, timestamps)
          series_data.append(dict(target=series.name, tags=series.tags, datapoints=datapoints))
      elif 'noNullPoints' in requestOptions and any(data):
        for series in data:
          values = []
          for (index,v) in enumerate(series):
            if v is not None:
              timestamp = series.start + (index * series.step)
              values.append((v,timestamp))
          if len(values) > 0:
            series_data.append(dict(target=series.name, tags=series.tags, datapoints=values))
      else:
        for series in data:
          timestamps = range(int(series.start), int(series.end) + 1, int(series.step))
          datapoints = zip(series, timestamps)
          series_data.append(dict(target=series.name, tags=series.tags, datapoints=datapoints))

      output = json.dumps(series_data, indent=(2 if requestOptions['pretty'] else None)).replace('None,', 'null,').replace('NaN,', 'null,').replace('Infinity,', '1e9999,')

      if 'jsonp' in requestOptions:
        response = HttpResponse(
          content="%s(%s)" % (requestOptions['jsonp'], output),
          content_type='text/javascript')
      else:
        response = HttpResponse(
          content=output,
          content_type='application/json')

      if useCache:
        cache.add(requestKey, response, cacheTimeout)
        patch_response_headers(response, cache_timeout=cacheTimeout)
      else:
        add_never_cache_headers(response)
      log.rendering('JSON rendering time %6f' % (time() - jsonStart))
      log.rendering('Total request processing time %6f' % (time() - start))
      return response

    if format == 'dygraph':
      labels = ['Time']
      result = '{}'
      if data:
        datapoints = [[ts] for ts in range(data[0].start, data[0].end, data[0].step)]
        for series in data:
          labels.append(series.name)
          for i, point in enumerate(series):
            if point is None:
              point = 'null'
            elif point == float('inf'):
              point = 'Infinity'
            elif point == float('-inf'):
              point = '-Infinity'
            elif math.isnan(point):
              point = 'null'
            datapoints[i].append(point)
        line_template = '[%%s000%s]' % ''.join([', %s'] * len(data))
        lines = [line_template % tuple(points) for points in datapoints]
        result = '{"labels" : %s, "data" : [%s]}' % (json.dumps(labels), ', '.join(lines))
      response = HttpResponse(content=result, content_type='application/json')

      if useCache:
        cache.add(requestKey, response, cacheTimeout)
        patch_response_headers(response, cache_timeout=cacheTimeout)
      else:
        add_never_cache_headers(response)
      log.rendering('Total dygraph rendering time %.6f' % (time() - start))
      return response

    if format == 'rickshaw':
      series_data = []
      for series in data:
        timestamps = range(series.start, series.end, series.step)
        datapoints = [{'x' : x, 'y' : y} for x, y in zip(timestamps, series)]
        series_data.append( dict(target=series.name, datapoints=datapoints) )
      if 'jsonp' in requestOptions:
        response = HttpResponse(
          content="%s(%s)" % (requestOptions['jsonp'], json.dumps(series_data)),
          mimetype='text/javascript')
      else:
        response = HttpResponse(content=json.dumps(series_data),
                                content_type='application/json')

      if useCache:
        cache.add(requestKey, response, cacheTimeout)
        patch_response_headers(response, cache_timeout=cacheTimeout)
      else:
        add_never_cache_headers(response)
      log.rendering('Total rickshaw rendering time %.6f' % (time() - start))
      return response

    if format == 'raw':
      response = HttpResponse(content_type='text/plain')
      for series in data:
        response.write( "%s,%d,%d,%d|" % (series.name, series.start, series.end, series.step) )
        response.write( ','.join(map(repr,series)) )
        response.write('\n')

      log.rendering('Total rawData rendering time %.6f' % (time() - start))
      return response

    if format == 'svg':
      graphOptions['outputFormat'] = 'svg'
    elif format == 'pdf':
      graphOptions['outputFormat'] = 'pdf'

    if format == 'pickle':
      response = HttpResponse(content_type='application/pickle')
      seriesInfo = [series.getInfo() for series in data]
      pickle.dump(seriesInfo, response, protocol=-1)

      log.rendering('Total pickle rendering time %.6f' % (time() - start))
      return response


  # We've got the data, now to render it
  graphOptions['data'] = data
  if settings.REMOTE_RENDERING: # Rendering on other machines is faster in some situations
    image = delegateRendering(requestOptions['graphType'], graphOptions, requestContext['forwardHeaders'])
  else:
    image = doImageRender(requestOptions['graphClass'], graphOptions)

  useSVG = graphOptions.get('outputFormat') == 'svg'
  if useSVG and 'jsonp' in requestOptions:
    response = HttpResponse(
      content="%s(%s)" % (requestOptions['jsonp'], json.dumps(image)),
      content_type='text/javascript')
  elif graphOptions.get('outputFormat') == 'pdf':
    response = buildResponse(image, 'application/x-pdf')
  else:
    response = buildResponse(image, 'image/svg+xml' if useSVG else 'image/png')

  if useCache:
    cache.add(requestKey, response, cacheTimeout)
    patch_response_headers(response, cache_timeout=cacheTimeout)
  else:
    add_never_cache_headers(response)

  log.rendering('Total rendering time %.6f seconds' % (time() - start))
  return response