Exemple #1
0
def extractPathExpressions(requestContext, targets):
    # Returns a list of unique pathExpressions found in the targets list

    pathExpressions = set()

    def extractPathExpression(requestContext, tokens, replacements=None):
        if tokens.template:
            arglist = dict()
            if tokens.template.kwargs:
                arglist.update(
                    dict([(kwarg.argname, evaluateScalarTokens(kwarg.args[0]))
                          for kwarg in tokens.template.kwargs]))
            if tokens.template.args:
                arglist.update(
                    dict([(str(i + 1), evaluateScalarTokens(arg))
                          for i, arg in enumerate(tokens.template.args)]))
            if 'template' in requestContext:
                arglist.update(requestContext['template'])
            extractPathExpression(requestContext, tokens.template, arglist)
        elif tokens.expression:
            extractPathExpression(requestContext, tokens.expression,
                                  replacements)
            if tokens.expression.pipedCalls:
                for token in tokens.expression.pipedCalls:
                    extractPathExpression(requestContext, token, replacements)
        elif tokens.pathExpression:
            expression = tokens.pathExpression
            if replacements:
                for name in replacements:
                    if expression != '$' + name:
                        expression = expression.replace(
                            '$' + name, str(replacements[name]))
            pathExpressions.add(expression)
        elif tokens.call:
            # if we're prefetching seriesByTag, look up the matching series and prefetch those
            if tokens.call.funcname == 'seriesByTag':
                if STORE.tagdb:
                    for series in STORE.tagdb.find_series(
                            tuple([
                                t.string[1:-1] for t in tokens.call.args
                                if t.string
                            ]),
                            requestContext=requestContext,
                    ):
                        pathExpressions.add(series)
            else:
                for a in tokens.call.args:
                    extractPathExpression(requestContext, a, replacements)

    for target in targets:
        if not target:
            continue

        if isinstance(target, six.string_types):
            if not target.strip():
                continue
            target = grammar.parseString(target)
        extractPathExpression(requestContext, target)

    return list(pathExpressions)
Exemple #2
0
    def _tag_patterns(self, patterns, requestContext):
      tag_patterns = []
      pattern_aliases = defaultdict(list)

      for pattern in patterns:
        # if pattern isn't a seriesByTag call, just add it to the list
        if not pattern.startswith('seriesByTag('):
          tag_patterns.append(pattern)
          continue

        # perform the tagdb lookup
        exprs = tuple([
          t.string[1:-1]
          for t in grammar.parseString(pattern).expression.call.args
          if t.string
        ])
        taggedSeries = self.tagdb.find_series(exprs, requestContext=requestContext)
        if not taggedSeries:
          continue

        # add to translation table for path matching
        for series in taggedSeries:
          pattern_aliases[series].append(pattern)

        # add to list of resolved patterns
        tag_patterns.extend(taggedSeries)

      return sorted(set(tag_patterns)), pattern_aliases
Exemple #3
0
    def _tag_patterns(self, patterns, requestContext):
        tag_patterns = []
        pattern_aliases = defaultdict(list)

        for pattern in patterns:
            # if pattern isn't a seriesByTag call, just add it to the list
            if not pattern.startswith('seriesByTag('):
                tag_patterns.append(pattern)
                continue

            # perform the tagdb lookup
            exprs = tuple([
                t.string[1:-1]
                for t in grammar.parseString(pattern).expression.call.args
                if t.string
            ])
            taggedSeries = self.tagdb.find_series(
                exprs, requestContext=requestContext)
            if not taggedSeries:
                continue

            # add to translation table for path matching
            for series in taggedSeries:
                pattern_aliases[series].append(pattern)

            # add to list of resolved patterns
            tag_patterns.extend(taggedSeries)

        return sorted(set(tag_patterns)), pattern_aliases
Exemple #4
0
def evaluateTarget(requestContext, targets, noPrefetch=False):
    if not isinstance(targets, list):
        targets = [targets]

    if settings.REMOTE_PREFETCH_DATA and not requestContext.get(
            'localOnly') and not noPrefetch:
        prefetchRemoteData(requestContext, targets)

    seriesList = []

    for target in targets:
        if isinstance(target, basestring):
            if not target.strip():
                continue
            target = grammar.parseString(target)

        result = evaluateTokens(requestContext, target)

        # we have to return a list of TimeSeries objects
        if isinstance(result, TimeSeries):
            seriesList.append(result)
        elif result:
            seriesList.extend(result)

    return seriesList
Exemple #5
0
def extractPathExpressions(targets):
  # Returns a list of unique pathExpressions found in the targets list

  pathExpressions = set()

  def extractPathExpression(tokens):
    if tokens.expression:
      return extractPathExpression(tokens.expression)
    elif tokens.pathExpression:
      pathExpressions.add(tokens.pathExpression)
    elif tokens.call:
      # if we're prefetching seriesByTag, look up the matching series and prefetch those
      if tokens.call.funcname == 'seriesByTag':
        if STORE.tagdb:
          for series in STORE.tagdb.find_series([t.string[1:-1] for t in tokens.call.args if t.string]):
            pathExpressions.add(series)
      else:
        for a in tokens.call.args:
          extractPathExpression(a)

  for target in targets:
    tokens = grammar.parseString(target)
    extractPathExpression(tokens)

  return list(pathExpressions)
Exemple #6
0
def extractPathExpressions(targets):
    # Returns a list of unique pathExpressions found in the targets list

    pathExpressions = set()

    def extractPathExpression(tokens):
        if tokens.expression:
            return extractPathExpression(tokens.expression)
        elif tokens.pathExpression:
            pathExpressions.add(tokens.pathExpression)
        elif tokens.call:
            # if we're prefetching seriesByTag, look up the matching series and prefetch those
            if tokens.call.funcname == 'seriesByTag':
                if STORE.tagdb:
                    for series in STORE.tagdb.find_series(
                        [t.string[1:-1] for t in tokens.call.args
                         if t.string]):
                        pathExpressions.add(series)
            else:
                for a in tokens.call.args:
                    extractPathExpression(a)

    for target in targets:
        tokens = grammar.parseString(target)
        extractPathExpression(tokens)

    return list(pathExpressions)
def evaluateTarget(requestContext, targets):
  if not isinstance(targets, list):
    targets = [targets]

  pathExpressions = extractPathExpressions(requestContext, targets)
  prefetchData(requestContext, pathExpressions)

  seriesList = []

  for target in targets:
    if not target:
      continue

    if isinstance(target, six.string_types):
      if not target.strip():
        continue

      target = grammar.parseString(target)

    try:
      result = evaluateTokens(requestContext, target)
    except InputParameterError as e:
      e.setTargets(requestContext.get('targets', []))
      e.setSourceIdHeaders(requestContext.get('sourceIdHeaders', {}))
      raise

    # we have to return a list of TimeSeries objects
    if isinstance(result, TimeSeries):
      seriesList.append(result)
    elif result:
      seriesList.extend(result)

  return seriesList
Exemple #8
0
def evaluateTarget(requestContext, targets):
    if not isinstance(targets, list):
        targets = [targets]

    pathExpressions = extractPathExpressions(requestContext, targets)
    prefetchData(requestContext, pathExpressions)

    seriesList = []

    for target in targets:
        if not target:
            continue

        if isinstance(target, six.string_types):
            if not target.strip():
                continue
            target = grammar.parseString(target)

        result = evaluateTokens(requestContext, target)

        # we have to return a list of TimeSeries objects
        if isinstance(result, TimeSeries):
            seriesList.append(result)
        elif result:
            seriesList.extend(result)

    return seriesList
Exemple #9
0
def evaluateTarget(requestContext, targets):
  if not isinstance(targets, list):
    targets = [targets]

  pathExpressions = extractPathExpressions(requestContext, targets)
  prefetchData(requestContext, pathExpressions)

  seriesList = []

  for target in targets:
    if not target:
      continue

    if isinstance(target, six.string_types):
      if not target.strip():
        continue
      target = grammar.parseString(target)

    result = evaluateTokens(requestContext, target)

    # we have to return a list of TimeSeries objects
    if isinstance(result, TimeSeries):
      seriesList.append(result)
    elif result:
      seriesList.extend(result)

  return seriesList
Exemple #10
0
 def find_metric_by_tags_many(self, patterns, cache):
     result = {}
     for pattern in patterns:
         args = grammar.parseString(pattern).expression.call.args
         exprs = [t.string[1:-1] for t in args if t.string]
         queries = [parse_tagspec(e) for e in exprs]
         result[pattern] = self.metric_index.match_by_tags(queries, cache)
     return result
Exemple #11
0
def evaluateTarget(requestContext, target):
    tokens = grammar.parseString(target)
    result = evaluateTokens(requestContext, tokens)

    if type(result) is TimeSeries:
        return [result]  #we have to return a list of TimeSeries objects

    else:
        return result
Exemple #12
0
def evaluateTarget(requestContext, target):
  tokens = grammar.parseString(target)
  result = evaluateTokens(requestContext, tokens)

  if type(result) is TimeSeries:
    return [result] #we have to return a list of TimeSeries objects

  else:
    return result
Exemple #13
0
def evaluateTarget(target, timeInterval):
  tokens = grammar.parseString(target)
  result = evaluateTokens(tokens, timeInterval)

  if type(result) is TimeSeries:
    return [result] #we have to return a list of TimeSeries objects

  else:
    return result
Exemple #14
0
def extractPathExpressions(requestContext, targets):
  # Returns a list of unique pathExpressions found in the targets list

  pathExpressions = set()

  def extractPathExpression(requestContext, tokens, replacements=None):
    if tokens.template:
      arglist = dict()
      if tokens.template.kwargs:
        arglist.update(dict([(kwarg.argname, evaluateScalarTokens(kwarg.args[0])) for kwarg in tokens.template.kwargs]))
      if tokens.template.args:
        arglist.update(dict([(str(i+1), evaluateScalarTokens(arg)) for i, arg in enumerate(tokens.template.args)]))
      if 'template' in requestContext:
        arglist.update(requestContext['template'])
      extractPathExpression(requestContext, tokens.template, arglist)
    elif tokens.expression:
      extractPathExpression(requestContext, tokens.expression, replacements)
      if tokens.expression.pipedCalls:
        for token in tokens.expression.pipedCalls:
          extractPathExpression(requestContext, token, replacements)
    elif tokens.pathExpression:
      expression = tokens.pathExpression
      if replacements:
        for name in replacements:
          if expression != '$'+name:
            expression = expression.replace('$'+name, str(replacements[name]))
      pathExpressions.add(expression)
    elif tokens.call:
      # if we're prefetching seriesByTag, pass the entire call back as a path expression
      if tokens.call.funcname == 'seriesByTag':
        pathExpressions.add(tokens.call.raw)
      else:
        for a in tokens.call.args:
          extractPathExpression(requestContext, a, replacements)

  for target in targets:
    if not target:
      continue

    if isinstance(target, six.string_types):
      if not target.strip():
        continue
      target = grammar.parseString(target)
    extractPathExpression(requestContext, target)

  return list(pathExpressions)
Exemple #15
0
def extractPathExpressions(targets):
    # Returns a list of unique pathExpressions found in the targets list

    pathExpressions = set()

    def extractPathExpression(tokens):
        if tokens.expression:
            return extractPathExpression(tokens.expression)
        elif tokens.pathExpression:
            pathExpressions.add(tokens.pathExpression)
        elif tokens.call:
            for a in tokens.call.args:
                extractPathExpression(a)

    for target in targets:
        tokens = grammar.parseString(target)
        extractPathExpression(tokens)

    return list(pathExpressions)
Exemple #16
0
def extractPathExpressions(targets):
  # Returns a list of unique pathExpressions found in the targets list

  pathExpressions = set()

  def extractPathExpression(tokens):
    if tokens.expression:
      return extractPathExpression(tokens.expression)
    elif tokens.pathExpression:
      pathExpressions.add(tokens.pathExpression)
    elif tokens.call:
      for a in tokens.call.args:
        extractPathExpression(a)

  for target in targets:
    tokens = grammar.parseString(target)
    extractPathExpression(tokens)

  return list(pathExpressions)
def extractPathExpressions(targets):
  # Returns a list of unique pathExpressions found in the targets list

  pathExpressions = []

  def extractPathExpression(tokens):
    if tokens.expression:
      return extractPathExpression(tokens.expression)
    elif tokens.pathExpression:
      pathExpressions.append(tokens.pathExpression)
    elif tokens.call:
      [extractPathExpression(arg) for arg in tokens.call.args]

  for target in targets:
    tokens = grammar.parseString(target)
    extractPathExpression(tokens)

  s = set(pathExpressions)
  pathExpressions = list(s)
  return pathExpressions
def extractPathExpressions(targets):
    # Returns a list of unique pathExpressions found in the targets list

    pathExpressions = []

    def extractPathExpression(tokens):
        if tokens.expression:
            return extractPathExpression(tokens.expression)
        elif tokens.pathExpression:
            pathExpressions.append(tokens.pathExpression)
        elif tokens.call:
            [extractPathExpression(arg) for arg in tokens.call.args]

    for target in targets:
        tokens = grammar.parseString(target)
        extractPathExpression(tokens)

    s = set(pathExpressions)
    pathExpressions = list(s)
    return pathExpressions