コード例 #1
0
  def findHelper(self, where):
    query = 'SELECT * FROM search WHERE %s' % (where)

    metrics = []
    def processResult(key, family, column, val, ts):
      metrics.append(key)

    HyperTablePool.doQuery(query, processResult)
    return metrics
コード例 #2
0
ファイル: views.py プロジェクト: slackhappy/graphite-ht
def searchHypertable(request):
  query = addPrefix(request.POST['query'])
  log.info('query: %s', query)
  if not query:
    return HttpResponse("")

  query = 'SELECT * FROM search WHERE ROW REGEXP ".*%s.*"' % (query)

  metrics = []
  def processResult(key, family, column, val, ts):
    metrics.append(removePrefix(key))

  HyperTablePool.doQuery(query, processResult)
  result_string = ','.join(metrics)
  return HttpResponse(result_string, mimetype='text/plain')
コード例 #3
0
  def find(self, query):
    query = addPrefix(query)
    query_parts = query.split('.')
  
    pattern = '.'.join(query_parts[0:-1]) + '|'
    query = 'SELECT * FROM tree WHERE row =^ "%s"' % pattern
    log.info('find query: %s' % query)

    nodes = []
    def processResult(key, family, column, val, ts):
      if column == 'has_children':
        nodes.append(HyperNode(key.replace('|', '.'), val == '0'))

    HyperTablePool.doQuery(query, processResult)
    return nodes
コード例 #4
0
  def _loadFromHyperTable(self):
    fh = None
    # if the index_path exists, update it
    if os.path.exists(self.index_path):
      fh = open(self.index_path, 'a')

    spec = ScanSpec(keys_only=True, start_time=self.last_atime, versions=1)
    s = time.time()
    self.last_atime = int(s) * 10**9L
    metrics = []
    def processResult(key, family, column, val, ts):
      if not self._existsInTree(key):
        if fh:
          fh.write(key + '\n')
        self._add(key)

    HyperTablePool.doScan(spec, "search", processResult)
    if fh:
      fh.close()
    log.info("[HyperIndex] index reload took %.6f seconds" % (time.time() - s))
コード例 #5
0
ファイル: datalib.py プロジェクト: slackhappy/graphite-ht
def fetchDataFromHyperTable(requestContext, pathExpr):
  if pathExpr.lower().startswith('graphite.'):
    pathExpr = pathExpr[9:]

  pathExpr = addPrefix(pathExpr)
  metrics = [addPrefix(m) for m in HyperStore().find(pathExpr)]

  startTime = requestContext['startTime'].strftime('%Y-%m-%d %H:%M:%S')
  endTime = requestContext['endTime'].strftime('%Y-%m-%d %H:%M:%S')
  start, end, step = timestamp(requestContext['startTime']), timestamp(requestContext['endTime']), 10
  buckets = (end - start) / step

  where = ' OR '.join(['ROW = "%s"' % m for m in metrics])
  query = 'SELECT metric FROM metrics WHERE (%s) AND "%s" < TIMESTAMP < "%s"' % (where, startTime, endTime)
  log.info(query)

  valuesMap = {}
  for m in metrics:
    valuesMap[m] = [None for x in xrange(0, buckets)]

  def processResult(key, family, column, val, ts):
    its = long(ts) / 1000000000L
    bucket = int((its - start) / step)
    if bucket >= 0 or bucket < buckets:
      if valuesMap[key][bucket]:
        valuesMap[key][bucket] = float(val)
      else:
        valuesMap[key][bucket] = float(val)

  HyperTablePool.doQuery(query, processResult)

  seriesList = []
  for m in metrics:
    series = TimeSeries(removePrefix(m), start, end, step, valuesMap[m])
    series.pathExpression = pathExpr # hack to pass expressions through to render functions
    seriesList.append(series)

  return seriesList
コード例 #6
0
ファイル: datalib.py プロジェクト: foursquare/graphite
def fetchDataFromHyperTable(requestContext, pathExpr):
  #TODO: use django settings
  MIN_INTERVAL_SECS = 10
  COL_INTERVAL_SECS = 60 * 60


  log.info('fetching %s' % pathExpr)
  pathExpr = addPrefix(pathExpr)
  metricData = hypertable_index.findMetric(pathExpr)
  metrics = [addPrefix(m[0]) for m in metricData]
  metricRate = {}
  for m in metricData:
    if not m[1]:
      log.info("metric %s doesn't specify a rate! Not rendering..." % m[0])
      metrics.remove(m[0])
    else:
      metricRate[m[0]] = m[1]

  if not metrics:
    return []

  startDateTime = requestContext['startTime']
  endDateTime = requestContext['endTime']

  start, end = int(timestamp(requestContext['startTime'])), int(timestamp(requestContext['endTime']))

  startColString = startDateTime.strftime('metric:%Y-%m-%d %H')
  endColString = endDateTime.strftime('metric:%Y-%m-%d %H')
  cellIntervals = [ CellInterval(m, startColString, True, m, endColString, True) for m in metrics ]

  if cellIntervals == None:
    return []

  nanosStart = start * 10**9L
  nanosEnd = end * 10**9L

  scan_spec = ScanSpec(None, None, None, 1)
  scan_spec.start_time = nanosStart
  scan_spec.end_time = nanosEnd
  scan_spec.cell_intervals = cellIntervals
  scan_spec.versions = COL_INTERVAL_SECS / MIN_INTERVAL_SECS

  log.info(startDateTime)
  log.info(endDateTime)
  log.info(scan_spec)

  valuesMap = defaultdict(list)

  def processResult(key, family, column, val, ts):
    its = long(ts) / 10**9L  #nanoseconds -> seconds
    valuesMap[key].append((its, val))



  HyperTablePool.doScan(scan_spec, "metrics", processResult)

  elapsed = end - start

  for m in valuesMap.keys():
    # resample everything to 'best' granularity
    steps = int(end - start) / metricRate[m]

    # push final values
    finalValues = [None] * steps
    for x in valuesMap[m]:
      bucket = int(min(round(float(x[0] - start) / float(metricRate[m])), steps - 1))
      finalValues[bucket] = float(x[1])
    valuesMap[m] = finalValues

  seriesList = []
  for m in sorted(valuesMap.keys()):
    series = TimeSeries(removePrefix(m), start, end, metricRate[m], valuesMap[m])
    series.pathExpression = pathExpr # hack to pass expressions through to render functions
    seriesList.append(series)

  return seriesList
コード例 #7
0
ファイル: datalib.py プロジェクト: fatihzkaratana/graphite
def fetchDataFromHyperTable(requestContext, pathExpr):
    # TODO: use django settings
    MIN_INTERVAL_SECS = 10
    COL_INTERVAL_SECS = 60 * 60

    log.info("fetching %s" % pathExpr)
    pathExpr = addPrefix(pathExpr)
    metrics = [addPrefix(m) for m in hypertable_index.findMetric(pathExpr)]

    if not metrics:
        return []

    startDateTime = requestContext["startTime"]
    endDateTime = requestContext["endTime"]

    start, end = int(timestamp(requestContext["startTime"])), int(timestamp(requestContext["endTime"]))

    startColString = startDateTime.strftime("metric:%Y-%m-%d %H")
    endColString = endDateTime.strftime("metric:%Y-%m-%d %H")
    cellIntervals = [CellInterval(m, startColString, True, m, endColString, True) for m in metrics]

    if cellIntervals == None:
        return []

    nanosStart = start * 10 ** 9L
    nanosEnd = end * 10 ** 9L

    scan_spec = ScanSpec(None, None, None, 1)
    scan_spec.start_time = nanosStart
    scan_spec.end_time = nanosEnd
    scan_spec.cell_intervals = cellIntervals
    scan_spec.versions = COL_INTERVAL_SECS / MIN_INTERVAL_SECS

    log.info(startDateTime)
    log.info(endDateTime)
    log.info(scan_spec)

    valuesMap = defaultdict(list)
    sortedVals = {}
    metricStep = {}

    def processResult(key, family, column, val, ts):
        its = long(ts) / 10 ** 9L  # nanoseconds -> seconds
        valuesMap[key].append((its, val))

    HyperTablePool.doScan(scan_spec, "metrics", processResult)

    elapsed = end - start

    stepsSeen = defaultdict(int)

    for m in valuesMap.keys():
        # determine step size (the minimum evenly divisible step found)
        minStep = elapsed
        sortedVals[m] = sorted(valuesMap[m], key=lambda x: x[0])
        for i in range(1, len(sortedVals[m])):
            step = sortedVals[m][i][0] - sortedVals[m][i - 1][0]
            if elapsed % step == 0:
                stepsSeen[step] += 1
    mostCommonStep = -1
    mostCommonCount = 0
    for k, v in stepsSeen.iteritems():
        if v > mostCommonCount:
            mostCommonCount = v
            mostCommonStep = k
    # hack for no data
    if mostCommonStep == -1:
        mostCommonStep = 60

    for m in valuesMap.keys():
        # resample everything to 'best' granularity
        steps = int(end - start) / mostCommonStep

        # push final values
        finalValues = [None] * steps
        for x in sortedVals[m]:
            bucket = int(min(round(float(x[0] - start) / float(mostCommonStep)), steps - 1))
            finalValues[bucket] = float(x[1])
        valuesMap[m] = finalValues

    seriesList = []
    for m in sorted(valuesMap.keys()):
        series = TimeSeries(removePrefix(m), start, end, mostCommonStep, valuesMap[m])
        series.pathExpression = pathExpr  # hack to pass expressions through to render functions
        seriesList.append(series)

    return seriesList