Exemplo n.º 1
0
def set_metadata_view(request):
  results = {}

  if request.method == 'GET':
    metric = request.GET['metric']
    key = request.GET['key']
    value = request.GET['value']
    try:
      results[metric] = CarbonLink.set_metadata(metric, key, value)
    except:
      log.exception()
      results[metric] = dict(error="Unexpected error occurred in CarbonLink.set_metadata(%s, %s)" % (metric, key))

  elif request.method == 'POST':
    if request.META.get('CONTENT_TYPE') == 'application/json':
      operations = json.loads( request.body )
    else:
      operations = json.loads( request.POST['operations'] )

    for op in operations:
      metric = None
      try:
        metric, key, value = op['metric'], op['key'], op['value']
        results[metric] = CarbonLink.set_metadata(metric, key, value)
      except:
        log.exception()
        if metric:
          results[metric] = dict(error="Unexpected error occurred in bulk CarbonLink.set_metadata(%s)" % metric)

  else:
    results = dict(error='Invalid request method')

  return json_response_for(request, results)
Exemplo n.º 2
0
    def fetch(self, startTime, endTime):
        try:
            data = whisper.fetch(self.fs_path, startTime, endTime)
        except IOError:
            log.exception("Failed fetch of whisper file '%s'" % self.fs_path)
            return None
        if not data:
            return None

        time_info, values = data
        (start, end, step) = time_info

        meta_info = whisper.info(self.fs_path)
        aggregation_method = meta_info['aggregationMethod']
        lowest_step = min(
            [i['secondsPerPoint'] for i in meta_info['archives']])
        # Merge in data from carbon's cache
        cached_datapoints = []
        try:
            cached_datapoints = CarbonLink().query(self.real_metric_path)
        except:
            log.exception("Failed CarbonLink query '%s'" %
                          self.real_metric_path)
            cached_datapoints = []

        if isinstance(cached_datapoints, dict):
            cached_datapoints = cached_datapoints.items()

        values = merge_with_cache(cached_datapoints, start, step, values,
                                  aggregation_method)

        return time_info, values
Exemplo n.º 3
0
def set_metadata_view(request):
    results = {}

    if request.method == "GET":
        metric = request.GET["metric"]
        key = request.GET["key"]
        value = request.GET["value"]
        try:
            results[metric] = CarbonLink.set_metadata(metric, key, value)
        except:
            log.exception()
            results[metric] = dict(error="Unexpected error occurred in CarbonLink.set_metadata(%s, %s)" % (metric, key))

    elif request.method == "POST":
        if request.META.get("CONTENT_TYPE") == "application/json":
            operations = json.loads(request.body)
        else:
            operations = json.loads(request.POST["operations"])

        for op in operations:
            metric = None
            try:
                metric, key, value = op["metric"], op["key"], op["value"]
                results[metric] = CarbonLink.set_metadata(metric, key, value)
            except:
                log.exception()
                if metric:
                    results[metric] = dict(
                        error="Unexpected error occurred in bulk CarbonLink.set_metadata(%s)" % metric
                    )

    else:
        results = dict(error="Invalid request method")

    return json_response_for(request, results)
Exemplo n.º 4
0
  def fetch(self, startTime, endTime):
    try:
      data = whisper.fetch(self.fs_path, startTime, endTime)
    except IOError:
      log.exception("Failed fetch of whisper file '%s'" % self.fs_path)
      return None
    if not data:
      return None

    time_info, values = data
    (start,end,step) = time_info

    meta_info = whisper.info(self.fs_path)
    aggregation_method = meta_info['aggregationMethod']
    lowest_step = min([i['secondsPerPoint'] for i in meta_info['archives']])
    # Merge in data from carbon's cache
    cached_datapoints = []
    try:
      cached_datapoints = CarbonLink().query(self.real_metric_path)
    except:
      log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
      cached_datapoints = []

    if isinstance(cached_datapoints, dict):
      cached_datapoints = cached_datapoints.items()

    values = merge_with_cache(cached_datapoints,
                              start,
                              step,
                              values,
                              aggregation_method)

    return time_info, values
Exemplo n.º 5
0
def set_metadata_view(request):
  results = {}

  if request.method == 'GET':
    metric = request.GET['metric']
    key = request.GET['key']
    value = request.GET['value']
    try:
      results[metric] = CarbonLink.set_metadata(metric, key, value)
    except:
      log.exception()
      results[metric] = dict(error="Unexpected error occurred in CarbonLink.set_metadata(%s, %s)" % (metric, key))

  elif request.method == 'POST':
    if request.META.get('CONTENT_TYPE') == 'application/json':
      operations = json.loads( request.body )
    else:
      operations = json.loads( request.POST['operations'] )

    for op in operations:
      metric = None
      try:
        metric, key, value = op['metric'], op['key'], op['value']
        results[metric] = CarbonLink.set_metadata(metric, key, value)
      except:
        log.exception()
        if metric:
          results[metric] = dict(error="Unexpected error occurred in bulk CarbonLink.set_metadata(%s)" % metric)

  else:
    results = dict(error="Invalid request method")

  return json_response_for(request, results)
Exemplo n.º 6
0
def merge_with_carbonlink(metric, start, step, values, aggregation_method=None, raw_step=None):
    """Get points from carbonlink and merge them with existing values."""
    cached_datapoints = []
    try:
        cached_datapoints = CarbonLink().query(metric)
    except BaseException:
        log.exception("Failed CarbonLink query '%s'" % metric)
        cached_datapoints = []

    if isinstance(cached_datapoints, dict):
        cached_datapoints = list(cached_datapoints.items())

    return merge_with_cache(
        cached_datapoints, start, step, values,
        func=aggregation_method, raw_step=raw_step)
Exemplo n.º 7
0
def merge_with_carbonlink(metric, start, step, values, aggregation_method=None, raw_step=None):
    """Get points from carbonlink and merge them with existing values."""
    cached_datapoints = []
    try:
        cached_datapoints = CarbonLink().query(metric)
    except BaseException:
        log.exception("Failed CarbonLink query '%s'" % metric)
        cached_datapoints = []

    if isinstance(cached_datapoints, dict):
        cached_datapoints = list(cached_datapoints.items())

    return merge_with_cache(
        cached_datapoints, start, step, values,
        func=aggregation_method, raw_step=raw_step)
Exemplo n.º 8
0
    def find_nodes(self, query, cache_incomplete_nodes=None):
        clean_patterns = query.pattern.replace('\\', '')
        has_wildcard = clean_patterns.find('{') > -1 or clean_patterns.find(
            '[') > -1 or clean_patterns.find('*') > -1 or clean_patterns.find(
                '?') > -1

        if cache_incomplete_nodes is None:
            cache_incomplete_nodes = {}

        # CarbonLink has some hosts
        if CarbonLink.hosts:
            metric = clean_patterns

            # Let's combine these two cases:
            # 1) has_wildcard
            # 2) single metric query
            # Expand queries in CarbonLink
            # we will get back a list of tuples (metric_name, is_leaf) here.
            # For example,
            # [(metric1, False), (metric2, True)]
            metrics = CarbonLink.expand_query(metric)
            # dedup, because of BranchNodes
            metrics = list(set(metrics))
            # check all metrics in same valid query range
            prechecks = []
            for m, is_leaf in metrics:
                if is_leaf:
                    prechecks.append(CarbonLink.precheck(m, query.startTime))
                else:  # return True for BranchNode
                    prechecks.append((True, True))
            exists = all((exist for exist, partial_exist in prechecks))
            partial_exists = all(
                (partial_exist for exist, partial_exist in prechecks))
            if exists:
                for metric, is_leaf in metrics:
                    if is_leaf:
                        reader = CarbonCacheReader(metric)
                        yield LeafNode(metric, reader)
                    else:
                        yield BranchNode(metric)
            elif partial_exists:
                for metric, is_leaf in metrics:
                    if is_leaf:
                        reader = CarbonCacheReader(metric)
                        cache_incomplete_nodes[metric] = LeafNode(
                            metric, reader)
                    else:
                        cache_incomplete_nodes[metric] = BranchNode(metric)
Exemplo n.º 9
0
    def fetch(self, startTime, endTime):
        data = self.cassandra_node.read(startTime, endTime)
        time_info = (data.startTime, data.endTime, data.timeStep)
        values = list(data.values)
        #log.exception("{0}, {1}, {2}".format(data.startTime, data.endTime, data.timeStep))
        #log.exception(values)

        # Merge in data from carbon's cache
        try:
            cached_datapoints = CarbonLink.query(self.real_metric_path)
        except:
            log.exception("Failed CarbonLink query '%s'" %
                          self.real_metric_path)
            cached_datapoints = []

        for (timestamp, value) in cached_datapoints:
            interval = timestamp - (timestamp % data.timeStep)

            try:
                i = int(interval - data.startTime) / data.timeStep
                values[i] = value
            except:
                pass

        return (time_info, values)
Exemplo n.º 10
0
    def fetch(self, start_time, end_time):
        # Fetch data from carbon cache through CarbonLink
        schema = CarbonLink.get_storage_schema(self.metric)
        archives = schema["archives"]
        # Get lowest step
        lowest_step = min([arch[0] for arch in archives])

        now = int(time.time())
        max_retention = max([arch[0] * arch[1] for arch in archives])
        oldest_time = now - max_retention

        # format and extract from/until time
        from_and_until_time = self._format_and_extract_time(
            start_time, end_time, max_retention)
        if not from_and_until_time:
            return None
        from_time, until_time = from_and_until_time

        # calcucate step
        diff = now - from_time
        # sorted_archives = sorted(archives, key=lambda x: x[0] * x[1])
        step = self._calculate_step(archives, diff)
        if not step:
            return None

        # Only check carbon-cache if step == lowest_step
        if step == lowest_step:
            return self._query_and_format_cache_data(from_time, until_time,
                                                     step)
        return None
Exemplo n.º 11
0
    def fetch(self, startTime, endTime, now=None, requestContext=None):
        try:
            data = whisper.fetch(self.fs_path, startTime, endTime, now)
        except IOError:
            log.exception("Failed fetch of whisper file '%s'" % self.fs_path)
            return None
        if not data:
            return None

        time_info, values = data
        (start, end, step) = time_info

        meta_info = whisper.info(self.fs_path)
        aggregation_method = meta_info['aggregationMethod']
        # Merge in data from carbon's cache
        cached_datapoints = []
        try:
            cached_datapoints = CarbonLink.query(self.real_metric_path)
        except:
            log.exception("Failed CarbonLink query '%s'" %
                          self.real_metric_path)
            cached_datapoints = []

        if isinstance(cached_datapoints, dict):
            cached_datapoints = cached_datapoints.items()

        values = merge_with_cache(cached_datapoints, start, step, values,
                                  aggregation_method)

        return time_info, values
Exemplo n.º 12
0
  def fetch(self, startTime, endTime):
    data = whisper.fetch(self.fs_path, startTime, endTime)
    if not data:
      return None

    time_info, values = data
    (start,end,step) = time_info

    meta_info = whisper.info(self.fs_path)
    lowest_step = min([i['secondsPerPoint'] for i in meta_info['archives']])
    # Merge in data from carbon's cache
    cached_datapoints = []
    try:
        if step == lowest_step:
            cached_datapoints = CarbonLink.query(self.real_metric_path)
    except:
      log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
      cached_datapoints = []

    if isinstance(cached_datapoints, dict):
      cached_datapoints = cached_datapoints.items()

    for (timestamp, value) in cached_datapoints:
      interval = timestamp - (timestamp % step)

      try:
        i = int(interval - start) / step
        values[i] = value
      except:
        pass

    return (time_info, values)
Exemplo n.º 13
0
    def fetch(self, startTime, endTime):
        data = whisper.fetch(self.fs_path, startTime, endTime)
        if not data:
            return None

        time_info, values = data
        (start, end, step) = time_info

        meta_info = whisper.info(self.fs_path)
        lowest_step = min([i['secondsPerPoint'] for i in meta_info['archives']])
        # Merge in data from carbon's cache
        cached_datapoints = []
        try:
            if step == lowest_step:
                cached_datapoints = CarbonLink.query(self.real_metric_path)
        except:
            log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
            cached_datapoints = []

        if isinstance(cached_datapoints, dict):
            cached_datapoints = cached_datapoints.items()

        for (timestamp, value) in cached_datapoints:
            interval = timestamp - (timestamp % step)

            try:
                i = int(interval - start) / step
                values[i] = value
            except:
                pass

        return (time_info, values)
Exemplo n.º 14
0
  def fetch(self, startTime, endTime):
    data = whisper.fetch(self.fs_path, startTime, endTime)
    if not data:
      return None

    time_info, values = data
    (start,end,step) = time_info

    # Merge in data from carbon's cache
    try:
      cached_datapoints = CarbonLink.query(self.real_metric_path)
    except:
      log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
      cached_datapoints = []

    for (timestamp, value) in cached_datapoints:
      interval = timestamp - (timestamp % step)

      try:
        i = int(interval - start) / step
        values[i] = value
      except:
        pass

    return (time_info, values)
Exemplo n.º 15
0
  def fetch(self, startTime, endTime):
    data = whisper.fetch(self.fs_path, startTime, endTime)
    if not data:
      return None
    consolidationFunc = ""
    whisper_info = whisper.info(self.fs_path)
    if "aggregationMethod" in whisper_info:
      aggregationMethod = whisper_info["aggregationMethod"]
      if aggregationMethod == 'min' or aggregationMethod == 'max':
        consolidationFunc = aggregationMethod
    time_info, values = data
    (start,end,step) = time_info

    # Merge in data from carbon's cache
    try:
      cached_datapoints = CarbonLink.query(self.real_metric_path)
    except:
      log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
      cached_datapoints = []

    for (timestamp, value) in cached_datapoints:
      interval = timestamp - (timestamp % step)

      try:
        i = int(interval - start) / step
        values[i] = value
      except:
        pass

    return (time_info, values, consolidationFunc)
Exemplo n.º 16
0
    def fetch(self, startTime, endTime):
        data = whisper.fetch(self.fs_path, startTime, endTime)
        if not data:
            return None

        time_info, values = data
        (start, end, step) = time_info

        # Merge in data from carbon's cache
        try:
            cached_datapoints = CarbonLink.query(self.real_metric_path)
        except:
            log.exception("Failed CarbonLink query '%s'" %
                          self.real_metric_path)
            cached_datapoints = []

        for (timestamp, value) in cached_datapoints:
            interval = timestamp - (timestamp % step)

            try:
                i = int(interval - start) / step
                values[i] = value
            except:
                pass

        return (time_info, values)
Exemplo n.º 17
0
    def fetch(self, startTime, endTime):
        data = whisper.fetch(self.fs_path, startTime, endTime)
        if not data:
            return None

        time_info, values = data
        (start, end, step) = time_info

        # Merge in data from carbon's cache
        if settings.REPLICATION_FACTOR != 0:
            try:
                cached_datapoints = CarbonLink.query(self.real_metric_path)
            except:
                log.exception("Failed CarbonLink query '%s'" %
                              self.real_metric_path)
                cached_datapoints = []
        else:
            cached_datapoints = []

        if isinstance(cached_datapoints, dict):
            cached_datapoints = cached_datapoints.items()

        for (timestamp, value) in cached_datapoints:
            interval = timestamp - (timestamp % step)

            try:
                i = int(interval - start) / step
                values[i] = value
            except:
                pass

        return (time_info, values)
Exemplo n.º 18
0
    def fetch(self, startTime, endTime):
        data = self.ceres_node.read(startTime, endTime)
        time_info = (data.startTime, data.endTime, data.timeStep)
        values = list(data.values)

        # Merge in data from carbon's cache
        if settings.REPLICATION_FACTOR != 0:
            try:
                cached_datapoints = CarbonLink.query(self.real_metric_path)
            except:
                log.exception("Failed CarbonLink query '%s'" %
                              self.real_metric_path)
                cached_datapoints = []
        else:
            cached_datapoints = []

        for (timestamp, value) in cached_datapoints:
            interval = timestamp - (timestamp % data.timeStep)

            try:
                i = int(interval - data.startTime) / data.timeStep
                values[i] = value
            except:
                pass

        return (time_info, values)
Exemplo n.º 19
0
def cache_metric(request):
    queryParams = request.GET.copy()
    metric = queryParams.get('metric', 'yun.test')
    datapoints = CarbonLink.query(metric)
    result_json_obj = {"target": metric, "datapoints": datapoints}
    response = HttpResponse(content=json.dumps(result_json_obj),
                            content_type='application/json')
    return response
Exemplo n.º 20
0
def get_metadata_view(request):
  key = request.REQUEST['key']
  metrics = request.REQUEST.getlist('metric')
  results = {}
  for metric in metrics:
    try:
      results[metric] = CarbonLink.get_metadata(metric, key)
    except:
      log.exception()
      results[metric] = dict(error="Unexpected error occurred in CarbonLink.get_metadata(%s, %s)" % (metric, key))

  return HttpResponse(json.dumps(results), mimetype='application/json')
Exemplo n.º 21
0
def get_metadata_view(request):
  key = request.REQUEST['key']
  metrics = request.REQUEST.getlist('metric')
  results = {}
  for metric in metrics:
    try:
      results[metric] = CarbonLink.get_metadata(metric, key)
    except:
      log.exception()
      results[metric] = dict(error="Unexpected error occurred in CarbonLink.get_metadata(%s, %s)" % (metric, key))

  return json_response_for(request, results)
Exemplo n.º 22
0
def get_metadata_view(request):
  key = request.REQUEST['key']
  metrics = request.REQUEST.getlist('metric')
  results = {}
  for metric in metrics:
    try:
      results[metric] = CarbonLink.get_metadata(metric, key)
    except:
      log.exception()
      results[metric] = dict(error="Unexpected error occurred in CarbonLink.get_metadata(%s, %s)" % (metric, key))

  return json_response_for(request, results)
Exemplo n.º 23
0
def get_metadata_view(request):
  queryParams = request.GET.copy()
  queryParams.update(request.POST)

  key = queryParams.get('key')
  metrics = queryParams.getlist('metric')
  results = {}
  for metric in metrics:
    try:
      results[metric] = CarbonLink.get_metadata(metric, key)
    except:
      log.exception()
      results[metric] = dict(error="Unexpected error occurred in CarbonLink.get_metadata(%s, %s)" % (metric, key))

  return json_response_for(request, results)
Exemplo n.º 24
0
def get_metadata_view(request):
  queryParams = request.GET.copy()
  queryParams.update(request.POST)

  key = queryParams.get('key')
  metrics = queryParams.getlist('metric')
  results = {}
  for metric in metrics:
    try:
      results[metric] = CarbonLink.get_metadata(metric, key)
    except:
      log.exception()
      results[metric] = dict(error="Unexpected error occurred in CarbonLink.get_metadata(%s, %s)" % (metric, key))

  return json_response_for(request, results)
Exemplo n.º 25
0
def get_metadata_view(request):
    key = request.REQUEST['key']
    metrics = request.REQUEST.getlist('metric')
    results = {}
    for metric in metrics:
        try:
            results[metric] = CarbonLink.get_metadata(metric, key)
        except:
            log.exception()
            results[metric] = dict(
                error=
                "Unexpected error occurred in CarbonLink.get_metadata(%s, %s)"
                % (metric, key))

    return HttpResponse(json.dumps(results), mimetype='application/json')
Exemplo n.º 26
0
 def _query_and_format_cache_data(self, from_time, until_time, step):
     cached_results = CarbonLink.query(self.metric)
     if cached_results:
         from_interval = int(from_time - (from_time % step)) + step
         until_interval = int(until_time - (until_time % step)) + step
         if from_interval == until_interval:
             until_interval += step
         points = (until_interval - from_interval) // step
         values = [None] * points
         time_info = (from_interval, until_interval, step)
         for (timestamp, value) in cached_results:
             interval = int(timestamp - (timestamp % step))
             index = (interval - from_interval) / step
             if index < 0 or index >= points:
                 continue
             values[index] = value
         return time_info, values
Exemplo n.º 27
0
    def fetch(self, startTime, endTime):
        data = self.ceres_node.read(startTime, endTime)
        time_info = (data.startTime, data.endTime, data.timeStep)
        values = list(data.values)

        # Merge in data from carbon's cache
        try:
            cached_datapoints = CarbonLink.query(self.real_metric_path)
        except:
            log.exception("Failed CarbonLink query '%s'" %
                          self.real_metric_path)
            cached_datapoints = []

        values = merge_with_cache(cached_datapoints, data.startTime,
                                  data.timeStep, values)

        return time_info, values
Exemplo n.º 28
0
  def fetch(self, startTime, endTime):
    data = self.ceres_node.read(startTime, endTime)
    time_info = (data.startTime, data.endTime, data.timeStep)
    values = list(data.values)

    # Merge in data from carbon's cache
    try:
      cached_datapoints = CarbonLink.query(self.real_metric_path)
    except:
      log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
      cached_datapoints = []

    values = merge_with_cache(cached_datapoints,
                              data.startTime,
                              data.timeStep,
                              values)

    return time_info, values
Exemplo n.º 29
0
  def fetch(self, startTime, endTime):
    data = self.ceres_node.read(startTime, endTime)
    time_info = (data.startTime, data.endTime, data.timeStep)
    values = list(data.values)

    # Merge in data from carbon's cache
    try:
      cached_datapoints = CarbonLink.query(self.real_metric_path)
    except:
      log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
      cached_datapoints = []

    for (timestamp, value) in cached_datapoints:
      interval = timestamp - (timestamp % data.timeStep)

      try:
        i = int(interval - data.startTime) / data.timeStep
        values[i] = value
      except:
        pass

    return (time_info, values)
Exemplo n.º 30
0
def CarbonLink():
    """Return a carbonlink instance."""
    # Late import to avoid pulling out too many dependencies with
    # readers.py which is usually imported by plugins.
    from graphite.carbonlink import CarbonLink
    return CarbonLink()