示例#1
0
    def test_fetch(self):
        """
        fetch info from database
        """
        # Don't use AssertRaisesException due to a super obscure bug in
        # python2.6 which returns an IOError in the 2nd argument of __exit__
        # in a context manager as a tuple. See this for a minimal reproducer:
        #    http://git.io/cKz30g
        with self.assertRaises(IOError):
            # check a db that doesnt exist
            whisper.fetch("this_db_does_not_exist", 0)

        # SECOND MINUTE HOUR DAY
        retention = [(1, 60), (60, 60), (3600, 24), (86400, 365)]
        whisper.create(self.filename, retention)

        # check a db with an invalid time range
        now = int(time.time())
        past = now - 6000

        msg = "Invalid time interval: from time '{0}' is after until time '{1}'"
        with AssertRaisesException(whisper.InvalidTimeInterval(msg.format(now, past))):
            whisper.fetch(self.filename, now, past)

        fetch = whisper.fetch(self.filename, 0)

        # check time range
        self.assertEqual(fetch[0][1] - fetch[0][0],
                         retention[-1][0] * retention[-1][1])

        # check number of points
        self.assertEqual(len(fetch[1]), retention[-1][1])

        # check step size
        self.assertEqual(fetch[0][2], retention[-1][0])
示例#2
0
    def test_fetch(self):
        """fetch info from database """

        # check a db that doesnt exist
        with self.assertRaises(Exception):
            whisper.fetch("this_db_does_not_exist", 0)

        # SECOND MINUTE HOUR DAY
        retention = [(1, 60), (60, 60), (3600, 24), (86400, 365)]
        whisper.create(self.db, retention)

        # check a db with an invalid time range
        with self.assertRaises(whisper.InvalidTimeInterval):
            whisper.fetch(self.db, time.time(), time.time() - 6000)

        fetch = whisper.fetch(self.db, 0)

        # check time range
        self.assertEqual(fetch[0][1] - fetch[0][0],
                         retention[-1][0] * retention[-1][1])

        # check number of points
        self.assertEqual(len(fetch[1]), retention[-1][1])

        # check step size
        self.assertEqual(fetch[0][2], retention[-1][0])

        self._removedb()
示例#3
0
    def test_fetch(self):
        """fetch info from database """

        # check a db that doesnt exist
        with self.assertRaises(Exception):
            whisper.fetch("this_db_does_not_exist", 0)

        # SECOND MINUTE HOUR DAY
        retention = [(1, 60), (60, 60), (3600, 24), (86400, 365)]
        whisper.create(self.db, retention)

        # check a db with an invalid time range
        with self.assertRaises(whisper.InvalidTimeInterval):
            whisper.fetch(self.db, time.time(), time.time()-6000)

        fetch = whisper.fetch(self.db, 0)

        # check time range
        self.assertEqual(fetch[0][1] - fetch[0][0],
                         retention[-1][0] * retention[-1][1])

        # check number of points
        self.assertEqual(len(fetch[1]), retention[-1][1])

        # check step size
        self.assertEqual(fetch[0][2], retention[-1][0])

        self._removedb()
def benchmark_create_update_fetch():
    path, archive_list, tear_down = set_up_create()
    # start timer
    start_time = time.clock()
    for i in range(100):
        whisper.create(path, archive_list)

        seconds_ago = 3500
        current_value = 0.5
        increment = 0.2
        now = time.time()
        # file_update closes the file so we have to reopen every time
        for i in range(seconds_ago):
            whisper.update(path, current_value, now - seconds_ago + i)
            current_value += increment

        from_time = now - seconds_ago
        until_time = from_time + 1000

        whisper.fetch(path, from_time, until_time)
        tear_down()

    # end timer
    end_time = time.clock()
    elapsed_time = end_time - start_time

    print "Executed 100 iterations in %ss (%i ns/op)" % (
        elapsed_time, (elapsed_time * 1000 * 1000 * 1000) / 100)
示例#5
0
    def test_update_single_archive(self):
        """
        Update with a single leveled archive
        """
        retention_schema = [(1, 20)]
        data = self._update(schema=retention_schema)
        # fetch the data
        fetch = whisper.fetch(self.filename, 0)   # all data
        fetch_data = fetch[1]

        for i, (timestamp, value) in enumerate(data):
            # is value in the fetched data?
            self.assertEqual(value, fetch_data[i])

        # check TimestampNotCovered
        with AssertRaisesException(
                whisper.TimestampNotCovered(
                    'Timestamp not covered by any archives in this database.')):
            # in the futur
            whisper.update(self.filename, 1.337, time.time() + 1)

        with AssertRaisesException(
                whisper.TimestampNotCovered(
                    'Timestamp not covered by any archives in this database.')):
            # before the past
            whisper.update(self.filename, 1.337, time.time() - retention_schema[0][1] - 1)

        # When no timestamp is passed in, it should use the current time
        original_lock = whisper.LOCK
        whisper.LOCK = True
        whisper.update(self.filename, 3.7337, None)
        fetched = whisper.fetch(self.filename, 0)[1]
        self.assertEqual(fetched[-1], 3.7337)

        whisper.LOCK = original_lock
示例#6
0
    def test_fill_should_handle_gaps(self):
        testdb = "test-%s" % self.db
        self._removedb()

        try:
            os.unlink(testdb)
        except (IOError, OSError):
            pass

        schema = [(1, 20)]
        complete = [
            1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
            20
        ]
        holes = [
            1, 2, 3, 4, 5, 6, None, None, None, None, 11, 12, 13, 14, 15, 16,
            17, 18, 19, 20
        ]

        end = int(time.time()) + schema[0][0]
        start = end - (schema[0][1] * schema[0][0])
        times = range(start, end, schema[0][0])

        complete_data = zip(times, complete)

        holes_data = [t for t in zip(times, holes) if t[1] is not None]
        self._createdb(self.db, schema, complete_data)
        self._createdb(testdb, schema, holes_data)

        fill_archives(self.db, testdb, time.time())

        original_data = whisper.fetch(self.db, 0)
        filled_data = whisper.fetch(testdb, 0)
        self.assertEqual(original_data, filled_data)
示例#7
0
    def test_fetch(self):
        """
        fetch info from database
        """
        # Don't use AssertRaisesException due to a super obscure bug in
        # python2.6 which returns an IOError in the 2nd argument of __exit__
        # in a context manager as a tuple. See this for a minimal reproducer:
        #    http://git.io/cKz30g
        with self.assertRaises(IOError):
            # check a db that doesnt exist
            whisper.fetch("this_db_does_not_exist", 0)

        # SECOND MINUTE HOUR DAY
        retention = [(1, 60), (60, 60), (3600, 24), (86400, 365)]
        whisper.create(self.filename, retention)

        # check a db with an invalid time range
        now = int(time.time())
        past = now - 6000

        msg = "Invalid time interval: from time '{0}' is after until time '{1}'"
        with AssertRaisesException(whisper.InvalidTimeInterval(msg.format(now, past))):
            whisper.fetch(self.filename, now, past)

        fetch = whisper.fetch(self.filename, 0)

        # check time range
        self.assertEqual(fetch[0][1] - fetch[0][0],
                         retention[-1][0] * retention[-1][1])

        # check number of points
        self.assertEqual(len(fetch[1]), retention[-1][1])

        # check step size
        self.assertEqual(fetch[0][2], retention[-1][0])
示例#8
0
    def test_fill_should_not_override_destination(self):
        testdb = "test-%s" % self.db
        self._removedb()

        try:
            os.unlink(testdb)
        except (IOError, OSError):
            pass

        schema = [(1, 20)]
        data = [
            1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
            20
        ]

        end = int(time.time()) + schema[0][0]
        start = end - (schema[0][1] * schema[0][0])
        times = range(start, end, schema[0][0])

        override_data = zip(times, data)

        emptyData = [data]
        startTime = time.time()
        self._createdb(self.db, schema)
        self._createdb(testdb, schema, override_data)

        fill_archives(self.db, testdb, startTime)

        original_data = whisper.fetch(self.db, 0)
        filled_data = whisper.fetch(testdb, 0)
        self.assertEqual(data, filled_data[1])
示例#9
0
    def test_fill_should_not_override_destination(self):
        testdb = "test-%s" % self.db
        self._removedb()

        try:
            os.unlink(testdb)
        except (IOError, OSError):
            pass

        schema = [(1, 20)]
        data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
                11, 12, 13, 14, 15, 16, 17, 18, 19, 20]

        end = int(time.time()) + schema[0][0]
        start = end - (schema[0][1] * schema[0][0])
        times = range(start, end, schema[0][0])

        override_data = zip(times, data)

        emptyData = [data]
        startTime = time.time()
        self._createdb(self.db, schema)
        self._createdb(testdb, schema, override_data)

        fill_archives(self.db, testdb, startTime)

        original_data = whisper.fetch(self.db, 0)
        filled_data = whisper.fetch(testdb, 0)
        self.assertEqual(data, filled_data[1])
示例#10
0
    def test_fill_should_handle_gaps(self):
        testdb = "test-%s" % self.db
        self._removedb()

        try:
            os.unlink(testdb)
        except (IOError, OSError):
            pass

        schema = [(1, 20)]
        complete = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
                    11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
        holes = [1, 2, 3, 4, 5, 6, None, None, None, None,
                 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]

        end = int(time.time()) + schema[0][0]
        start = end - (schema[0][1] * schema[0][0])
        times = range(start, end, schema[0][0])

        complete_data = zip(times, complete)

        holes_data = [t for t in zip(times, holes) if t[1] is not None]
        self._createdb(self.db, schema, complete_data)
        self._createdb(testdb, schema, holes_data)

        fill_archives(self.db, testdb, time.time())

        original_data = whisper.fetch(self.db, 0)
        filled_data = whisper.fetch(testdb, 0)
        self.assertEqual(original_data, filled_data)
示例#11
0
def benchmark_create_update_fetch():
	path, archive_list, tear_down = set_up_create()
	# start timer
	start_time = time.clock()
	for i in range(100):
		whisper.create(path, archive_list)

		seconds_ago = 3500
		current_value = 0.5
		increment = 0.2
		now = time.time()
		# file_update closes the file so we have to reopen every time
		for i in range(seconds_ago):
			whisper.update(path, current_value, now - seconds_ago + i)
			current_value += increment

		from_time = now - seconds_ago
		until_time = from_time + 1000

		whisper.fetch(path, from_time, until_time)
		tear_down()

	# end timer
	end_time = time.clock()
	elapsed_time = end_time - start_time

	print "Executed 100 iterations in %ss (%i ns/op)" % (elapsed_time, (elapsed_time * 1000 * 1000 * 1000) / 100)
示例#12
0
    def test_update_single_archive(self):
        """
        Update with a single leveled archive
        """
        retention_schema = [(1, 20)]
        data = self._update(schema=retention_schema)
        # fetch the data
        fetch = whisper.fetch(self.filename, 0)   # all data
        fetch_data = fetch[1]

        for i, (timestamp, value) in enumerate(data):
            # is value in the fetched data?
            self.assertEqual(value, fetch_data[i])

        # check TimestampNotCovered
        with AssertRaisesException(
                whisper.TimestampNotCovered(
                    'Timestamp not covered by any archives in this database.')):
            # in the futur
            whisper.update(self.filename, 1.337, time.time() + 1)

        with AssertRaisesException(
                whisper.TimestampNotCovered(
                    'Timestamp not covered by any archives in this database.')):
            # before the past
            whisper.update(self.filename, 1.337, time.time() - retention_schema[0][1] - 1)

        # When no timestamp is passed in, it should use the current time
        original_lock = whisper.LOCK
        whisper.LOCK = True
        whisper.update(self.filename, 3.7337, None)
        fetched = whisper.fetch(self.filename, 0)[1]
        self.assertEqual(fetched[-1], 3.7337)

        whisper.LOCK = original_lock
示例#13
0
    def test_fill_endat(self, unused_mock_time):
        dst_db = "dst-%s" % self.db
        self._removedb()
        try:
            os.unlink(dst_db)
        except (IOError, OSError):
            pass

        complete = range(1, 21)
        seconds_per_point = 1
        seconds_per_point_l2 = seconds_per_point * 4
        points_number = len(complete)
        schema = [
            (seconds_per_point, points_number),
            (seconds_per_point_l2, points_number),
        ]
        empty_data = []

        end = int(time.time()) + seconds_per_point
        start = end - (points_number * seconds_per_point)
        times = range(start, end, seconds_per_point)

        complete_data = zip(times, complete)
        self._createdb(self.db, schema, complete_data)
        self._createdb(dst_db, schema, empty_data)

        quarter = points_number // 4
        half = points_number // 2
        three_quarter = points_number * 3 // 4

        # fills a fourth of data, from 2/4th to 3/4th
        fill_archives(self.db, dst_db,
                      time.time() - quarter,
                      time.time() - half)
        quarter_filled_data = whisper.fetch(dst_db,
                                            start - seconds_per_point)[1]
        expected = [None] * half + complete[half:three_quarter] + [None
                                                                   ] * quarter
        self.assertEqual(expected, quarter_filled_data)
        # Fetching data older than start forces the use of the second level of aggregation
        # We get a first empty cell and then
        quarter_filled_data_l2 = whisper.fetch(dst_db, 0)[1]
        average_l1 = _average(quarter_filled_data)
        average_l2 = _average(quarter_filled_data_l2)
        self.assertEqual(average_l1, average_l2)

        # fills a half of data, from 2/4th to 4/4th
        fill_archives(self.db, dst_db, time.time(), time.time() - half)
        half_filled_data = whisper.fetch(dst_db, start - seconds_per_point)[1]
        expected = [None] * half + complete[half:]
        self.assertEqual(expected, half_filled_data)

        # Explicitly passes the default value of endAt=now (excluded)
        fill_archives(self.db, dst_db, time.time(), endAt=0)
        filled_data = whisper.fetch(dst_db, start - seconds_per_point)[1]
        self.assertEqual(complete[:-1], filled_data[:-1])
        self.assertIsNone(filled_data[-1])
示例#14
0
    def test_resize_with_aggregate(self):
        """resize whisper file with aggregate"""
        # 60s per point save two days
        retention = [(60, 60 * 24 * 2)]
        whisper.create(self.filename, retention)

        # insert data
        now_timestamp = int(
            (datetime.now() - datetime(1970, 1, 1)).total_seconds())
        now_timestamp -= now_timestamp % 60  # format timestamp
        points = [(now_timestamp - i * 60, i) for i in range(0, 60 * 24 * 2)]
        whisper.update_many(self.filename, points)
        data = whisper.fetch(self.filename,
                             fromTime=now_timestamp - 3600 * 25,
                             untilTime=now_timestamp - 3600 * 25 + 60 * 10)
        self.assertEqual(len(data[1]), 10)
        self.assertEqual(data[0][2], 60)  # high retention == 60
        for d in data[1]:
            self.assertIsNotNone(d)
        # resize from high to low
        os.system(
            'whisper-resize.py %s 60s:1d 300s:2d --aggregate --nobackup >/dev/null'
            % self.filename)  # noqa
        data_low = whisper.fetch(self.filename,
                                 fromTime=now_timestamp - 3600 * 25,
                                 untilTime=now_timestamp - 3600 * 25 + 60 * 10)
        self.assertEqual(len(data_low[1]), 2)
        self.assertEqual(data_low[0][2], 300)  # low retention == 300
        for d in data_low[1]:
            self.assertIsNotNone(d)
        data_high = whisper.fetch(self.filename,
                                  fromTime=now_timestamp - 60 * 10,
                                  untilTime=now_timestamp)
        self.assertEqual(len(data_high[1]), 10)
        self.assertEqual(data_high[0][2], 60)  # high retention == 60
        # resize from low to high
        os.system(
            'whisper-resize.py %s 60s:2d --aggregate --nobackup >/dev/null' %
            self.filename)  # noqa
        data1 = whisper.fetch(self.filename,
                              fromTime=now_timestamp - 3600 * 25,
                              untilTime=now_timestamp - 3600 * 25 + 60 * 10)
        self.assertEqual(len(data1[1]), 10)
        # noqa data1 looks like ((1588836720, 1588837320, 60), [None, None, 1490.0, None, None, None, None, 1485.0, None, None])
        # data1[1] have two not none value
        self.assertEqual(len(list(filter(lambda x: x is not None, data1[1]))),
                         2)
        data2 = whisper.fetch(self.filename,
                              fromTime=now_timestamp - 60 * 15,
                              untilTime=now_timestamp - 60 * 5)
        # noqa data2 looks like ((1588925820, 1588926420, 60), [10.0, 11.0, 10.0, 9.0, 8.0, 5.0, 6.0, 5.0, 4.0, 3.0])
        self.assertEqual(len(list(filter(lambda x: x is not None, data2[1]))),
                         10)

        # clean up
        self.tearDown()
示例#15
0
    def test_fill_endat(self, unused_mock_time):
        dst_db = "dst-%s" % self.db
        self._removedb()
        try:
            os.unlink(dst_db)
        except (IOError, OSError):
            pass

        complete = range(1, 21)
        seconds_per_point = 1
        seconds_per_point_l2 = seconds_per_point * 4
        points_number = len(complete)
        schema = [(seconds_per_point, points_number),
                  (seconds_per_point_l2, points_number),
        ]
        empty_data = []

        end = int(time.time()) + seconds_per_point
        start = end - (points_number * seconds_per_point)
        times = range(start, end, seconds_per_point)

        complete_data = zip(times, complete)
        self._createdb(self.db, schema, complete_data)
        self._createdb(dst_db, schema, empty_data)

        quarter = points_number // 4
        half = points_number // 2
        three_quarter =  points_number * 3 // 4

        # fills a fourth of data, from 2/4th to 3/4th
        fill_archives(self.db, dst_db, time.time()-quarter, time.time()-half)
        quarter_filled_data = whisper.fetch(dst_db, start-seconds_per_point)[1]
        expected = [None]*half + complete[half:three_quarter] + [None]*quarter
        self.assertEqual(expected, quarter_filled_data)
        # Fetching data older than start forces the use of the second level of aggregation
        # We get a first empty cell and then
        quarter_filled_data_l2 = whisper.fetch(dst_db, 0)[1]
        average_l1 = _average(quarter_filled_data)
        average_l2 = _average(quarter_filled_data_l2)
        self.assertEqual(average_l1, average_l2)

        # fills a half of data, from 2/4th to 4/4th
        fill_archives(self.db, dst_db, time.time(), time.time()-half)
        half_filled_data = whisper.fetch(dst_db, start-seconds_per_point)[1]
        expected = [None]*half + complete[half:]
        self.assertEqual(expected, half_filled_data)

        # Explicitly passes the default value of endAt=now (excluded)
        fill_archives(self.db, dst_db, time.time(), endAt=0)
        filled_data = whisper.fetch(dst_db, start-seconds_per_point)[1]
        self.assertEqual(complete[:-1], filled_data[:-1])
        self.assertEqual(filled_data[-1], None)
示例#16
0
    def read_metrics_whisper_obj(self,
                                 metrics,
                                 start_path,
                                 operators=config.operators,
                                 level=0):
        now = int(time.time())
        yesterday = now - (60 * config.metric_interval)

        for filename in glob.iglob(start_path + '**/*', recursive=True):
            if filename.endswith(".wsp"):

                # gehe durch operatoren in config.operators
                for operator in config.operators:
                    # Nehme Key aus dem Metric Objekt
                    if operator.lower() in filename.lower(
                    ) and not "overall" in filename.lower():

                        for meter in metrics[operator].get_metrics():
                            list = self.drop_nulls(
                                whisper.fetch(filename, yesterday, now)[1])

                            if meter.get_name().lower() in filename.lower():
                                if len(list) > 0:
                                    if "Parallelism".lower() in meter.get_name(
                                    ).lower():
                                        value = max(list)
                                        meter.set_value(value)
                                    else:
                                        average = sum(list) / len(list)
                                        summed = meter.get_value() + average
                                        meter.set_list(list)
                                        if summed > 0:
                                            meter.set_value(summed)

                    elif "overall" in operator.lower(
                    ) and "overall" in filename.lower() and operator.lower(
                    ) in filename.lower():
                        for meter in metrics[operator].get_metrics():
                            list = self.drop_nulls(
                                whisper.fetch(filename, yesterday, now)[1])

                            if meter.get_name().lower() in filename.lower():

                                if len(list) > 0:
                                    average = sum(list) / len(list)
                                    summed = meter.get_value() + average
                                    meter.set_list(list)
                                    if summed > 0:
                                        meter.set_value(summed)
            else:
                if level < 100:
                    self.read_metrics_whisper_obj(metrics, filename, level + 1)
示例#17
0
    def test_heal_target_corrupt(self):
        testdb = "/dev/null"
        self._removedb()

        schema = [(1, 20)]
        self._createdb(self.db, schema)
        original_data = whisper.fetch(self.db, 0)

        # This should log complaints but exit successfully as it cannot
        # heal its target /dev/null
        heal_metric(self.db, testdb)
        data = whisper.fetch(self.db, 0)
        self.assertEqual(original_data, data)
示例#18
0
    def test_heal_target_corrupt(self):
        testdb = "/dev/null"
        self._removedb()

        schema = [(1, 20)]
        self._createdb(self.db, schema)
        original_data = whisper.fetch(self.db, 0)

        # This should log complaints but exit successfully as it cannot
        # heal its target /dev/null
        heal_metric(self.db, testdb)
        data = whisper.fetch(self.db, 0)
        self.assertEqual(original_data, data)
示例#19
0
    def fetch(self, startTime, endTime):
        data = whisper.fetch(self.fs_path, startTime, endTime)
        if not data:
            return None

        time_info, values = data
        (start, end, step) = time_info

        # Merge in data from carbon's cache
        if settings.REPLICATION_FACTOR != 0:
            try:
                cached_datapoints = CarbonLink.query(self.real_metric_path)
            except:
                log.exception("Failed CarbonLink query '%s'" %
                              self.real_metric_path)
                cached_datapoints = []
        else:
            cached_datapoints = []

        if isinstance(cached_datapoints, dict):
            cached_datapoints = cached_datapoints.items()

        for (timestamp, value) in cached_datapoints:
            interval = timestamp - (timestamp % step)

            try:
                i = int(interval - start) / step
                values[i] = value
            except:
                pass

        return (time_info, values)
示例#20
0
    def fetch(self, startTime, endTime, now=None, requestContext=None):
        try:
            data = whisper.fetch(self.fs_path, startTime, endTime, now)
        except IOError:
            log.exception("Failed fetch of whisper file '%s'" % self.fs_path)
            return None
        if not data:
            return None

        time_info, values = data
        (start, end, step) = time_info

        meta_info = whisper.info(self.fs_path)
        aggregation_method = meta_info['aggregationMethod']
        # Merge in data from carbon's cache
        cached_datapoints = []
        try:
            cached_datapoints = CarbonLink.query(self.real_metric_path)
        except:
            log.exception("Failed CarbonLink query '%s'" %
                          self.real_metric_path)
            cached_datapoints = []

        if isinstance(cached_datapoints, dict):
            cached_datapoints = cached_datapoints.items()

        values = merge_with_cache(cached_datapoints, start, step, values,
                                  aggregation_method)

        return time_info, values
示例#21
0
    def test_heal_mixed_data(self):
        testdb = "test-%s" % self.db
        self._removedb()

        try:
            os.unlink(testdb)
        except (IOError, OSError):
            pass

        schema = [(1, 20)]
        have = [1, 2, 3, None, 5, 6, 7, 8, 9, 10,
                    11, 12, 13, 14, 15, None, 17, 18, 19, None]
        remote = [1, 2, 3, 4, 5, 6, None, None, None, None,
                 11, 12, 13, 14, 15, 16, 17, 18, None, 20]

        end = int(time.time()) + schema[0][0]
        start = end - (schema[0][1] * schema[0][0])
        times = range(start, end, schema[0][0])

        have_data = [t for t in zip(times, have) if t[1] is not None]
        remote_data = [t for t in zip(times, remote) if t[1] is not None]

        self._createdb(self.db, schema, remote_data)
        self._createdb(testdb, schema, have_data)

        heal_metric(self.db, testdb, overwrite=True)

        final_data = whisper.fetch(testdb, 0)
        self.assertEqual(final_data[1], list(range(1,21)))
示例#22
0
  def fetch(self, startTime, endTime):
    data = whisper.fetch(self.fs_path, startTime, endTime)
    if not data:
      return None

    time_info, values = data
    (start,end,step) = time_info

    meta_info = whisper.info(self.fs_path)
    lowest_step = min([i['secondsPerPoint'] for i in meta_info['archives']])
    # Merge in data from carbon's cache
    cached_datapoints = []
    try:
        if step == lowest_step:
            cached_datapoints = CarbonLink.query(self.real_metric_path)
    except:
      log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
      cached_datapoints = []

    if isinstance(cached_datapoints, dict):
      cached_datapoints = cached_datapoints.items()

    for (timestamp, value) in cached_datapoints:
      interval = timestamp - (timestamp % step)

      try:
        i = int(interval - start) / step
        values[i] = value
      except:
        pass

    return (time_info, values)
示例#23
0
def fetch(args):
    now = int(time.time())
    yesterday = now - (24 * 60 * 60)

    if args.from_time:
        from_time = time.mktime(
            dt.datetime.strptime(args.from_time, FORMAT).timetuple())
    else:
        from_time = now

    if args.until_time:
        until_time = time.mktime(
            dt.datetime.strptime(args.until_time, FORMAT).timetuple())
    else:
        until_time = yesterday

    times, values = whisper.fetch(args.file, from_time, until_time)

    if args.drop:
        fn = _DROP_FUNCTIONS.get(args.drop)
        values = [x for x in values if fn(x)]

    start, end, step = times

    t = start
    for value in values:

        if args.time_format:
            s = time.strftime(args.time_format, time.localtime(t))
        else:
            s = time.ctime(t)

        print("%s\t%f" % (s, value))

        t += step
示例#24
0
def getReadings(uuid, start, end):
    assert time()-int(start) >= 60
    dataFile = os.path.join(WHISPER_DATA,str(uuid) + ".wsp")
    try:
        (timeInfo, values) = whisper.fetch(dataFile, start, end)
    except whisper.WhisperException, exc:
      raise SystemExit('[ERROR] %s' % str(exc))
示例#25
0
def waterlevel(db_name):
    """Reduce alert frequency after initial alert, reset on all-clear"""

    (times, fail_buffer) = whisper.fetch(db_name, 315550800)

    if fail_buffer.count(1) > 2:
        new_whisper_db_name = db_name + '.wsp2'
        whisper.create(new_whisper_db_name, FOLLOWUP, aggregationMethod='last')
        whisper.update(new_whisper_db_name, 1)
        os.rename(new_whisper_db_name, db_name)

        for admin in sys.argv[2:]:
            os.system('mail -s "' + sys.argv[1] + '" ' + admin + '</dev/null')

    if fail_buffer.count(1) == 0:
        if whisper.info(
                db_name)['archives'][0]['secondsPerPoint'] == FOLLOWUP[0][0]:
            new_whisper_db_name = db_name + '.wsp2'
            whisper.create(new_whisper_db_name,
                           RETAINER,
                           aggregationMethod='last')
            whisper.update(new_whisper_db_name, 0)
            os.rename(new_whisper_db_name, db_name)

            for admin in sys.argv[2:]:
                os.system('mail -s "' + sys.argv[1] + '" ' + admin +
                          '</dev/null')

    return (0)
示例#26
0
文件: graph.py 项目: 2mind/salmon
    def fetch(self, from_time, until_time=None):
        """
        This method fetch data from the database according to the period
        given

        fetch(path, fromTime, untilTime=None)

        fromTime is an datetime
        untilTime is also an datetime, but defaults to now.

        Returns a tuple of (timeInfo, valueList)
        where timeInfo is itself a tuple of (fromTime, untilTime, step)

        Returns None if no data can be returned
        """
        until_time = until_time or datetime.now()
        time_info, values = whisper.fetch(self.path,
                                          from_time.strftime('%s'),
                                          until_time.strftime('%s'))
        # build up a list of (timestamp, value)
        start_time, end_time, step = time_info
        current = start_time
        times = []
        while current <= end_time:
            times.append(current)
            current += step
        return zip(times, values)
示例#27
0
 async def read_from_wsps(self):
     """
     Read metrics from wsp file and then publish
     to an asyncio Queue.
     """
     debug_print("start reading from wsp", self.debug)
     prefix = self.prefix or os.path.basename(self.directory)
     for relative_path, full_path in self._extract_wsp():
         if full_path.endswith('.wsp'):
             metric_path = relative_path.replace('/', '.')[:-4]
             metric_path = self.schema_func(metric_path)
             metric = "{0}{1}".format(prefix, metric_path)
             try:
                 time_info, values = whisper.fetch(full_path, 0)
             except whisper.CorruptWhisperFile:
                 debug_print('Corrupt, skipping', self.debug)
                 continue
             metrics = zip(range(*time_info), values)
             for timestamp, value in metrics:
                 if value is not None:
                     # await asyncio.sleep(0.1)
                     await self.queue.put((metric, value, timestamp))
                     debug_print(
                         "reading {0}, {1}, {2}".format(
                             metric, value, timestamp), self.debug)
     # Send singal to writer, there is no data anymore
     await self.queue.put(None)
示例#28
0
def fetch(args):
    now = int(time.time())
    yesterday = now - (24 * 60 * 60)

    if args.from_time:
        from_time = time.mktime(
            dt.datetime.strptime(args.from_time, FORMAT).timetuple())
    else:
        from_time = now

    if args.until_time:
        until_time = time.mktime(
            dt.datetime.strptime(args.until_time, FORMAT).timetuple())
    else:
        until_time = yesterday

    times, values = whisper.fetch(args.file, from_time, until_time)

    if args.drop:
        fn = _DROP_FUNCTIONS.get(args.drop)
        values = [x for x in values if fn(x)]

    start, end, step = times

    t = start
    for value in values:

        if args.time_format:
            s = time.strftime(args.time_format, time.localtime(t))
        else:
            s = time.ctime(t)

        print("%s\t%f" % (s, value))

        t += step
示例#29
0
 def fetch(self, *parts):
     path = settings.CARBON_CONF['whisper_dir']
     path = os.path.join(path, *parts)
     path += '.wsp'
     range_args, values = whisper.fetch(
         path, fromTime=self.from_, untilTime=self.till_)
     return range_args, values
示例#30
0
    def fetch(self, startTime, endTime):
        data = whisper.fetch(self.fs_path, startTime, endTime)
        if not data:
            return None

        time_info, values = data
        (start, end, step) = time_info

        meta_info = whisper.info(self.fs_path)
        lowest_step = min([i['secondsPerPoint'] for i in meta_info['archives']])
        # Merge in data from carbon's cache
        cached_datapoints = []
        try:
            if step == lowest_step:
                cached_datapoints = CarbonLink.query(self.real_metric_path)
        except:
            log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
            cached_datapoints = []

        if isinstance(cached_datapoints, dict):
            cached_datapoints = cached_datapoints.items()

        for (timestamp, value) in cached_datapoints:
            interval = timestamp - (timestamp % step)

            try:
                i = int(interval - start) / step
                values[i] = value
            except:
                pass

        return (time_info, values)
示例#31
0
  def fetch(self, startTime, endTime):
    try:
      data = whisper.fetch(self.fs_path, startTime, endTime)
    except IOError:
      log.exception("Failed fetch of whisper file '%s'" % self.fs_path)
      return None
    if not data:
      return None

    time_info, values = data
    (start,end,step) = time_info

    meta_info = whisper.info(self.fs_path)
    aggregation_method = meta_info['aggregationMethod']
    lowest_step = min([i['secondsPerPoint'] for i in meta_info['archives']])
    # Merge in data from carbon's cache
    cached_datapoints = []
    try:
      cached_datapoints = CarbonLink().query(self.real_metric_path)
    except:
      log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
      cached_datapoints = []

    if isinstance(cached_datapoints, dict):
      cached_datapoints = cached_datapoints.items()

    values = merge_with_cache(cached_datapoints,
                              start,
                              step,
                              values,
                              aggregation_method)

    return time_info, values
示例#32
0
    def test_update_many_excess(self):
        # given an empty db
        wsp = "test_update_many_excess.wsp"
        self.addCleanup(self._remove, wsp)
        archive_len = 3
        archive_step = 1
        whisper.create(wsp, [(archive_step, archive_len)])

        # given too many points than the db can hold
        excess_len = 1
        num_input_points = archive_len + excess_len
        test_now = int(time.time())
        input_start = test_now - num_input_points + archive_step
        input_points = [(input_start + i, random.random() * 10)
                        for i in range(num_input_points)]

        # when the db is updated with too many points
        whisper.update_many(wsp, input_points, now=test_now)

        # then only the most recent input points (those at the end) were written
        actual_time_info = whisper.fetch(wsp, 0, now=test_now)[0]
        self.assertEqual(actual_time_info,
                         (input_points[-archive_len][0],
                          input_points[-1][0] + archive_step,  # untilInterval = newest + step
                          archive_step))
示例#33
0
    def fetch(self, from_time, until_time=None):
        """
        This method fetch data from the database according to the period
        given

        fetch(path, fromTime, untilTime=None)

        fromTime is an datetime
        untilTime is also an datetime, but defaults to now.

        Returns a tuple of (timeInfo, valueList)
        where timeInfo is itself a tuple of (fromTime, untilTime, step)

        Returns None if no data can be returned
        """
        until_time = until_time or datetime.now()
        time_info, values = whisper.fetch(self.path, from_time.strftime('%s'),
                                          until_time.strftime('%s'))
        # build up a list of (timestamp, value)
        start_time, end_time, step = time_info
        current = start_time
        times = []
        while current <= end_time:
            times.append(current)
            current += step
        return zip(times, values)
示例#34
0
  def fetch(self, startTime, endTime):
    data = whisper.fetch(self.fs_path, startTime, endTime)
    if not data:
      return None

    time_info, values = data
    (start,end,step) = time_info

    # Merge in data from carbon's cache
    try:
      cached_datapoints = CarbonLink.query(self.real_metric_path)
    except:
      log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
      cached_datapoints = []

    for (timestamp, value) in cached_datapoints:
      interval = timestamp - (timestamp % step)

      try:
        i = int(interval - start) / step
        values[i] = value
      except:
        pass

    return (time_info, values)
示例#35
0
def fill_archives(src, dst, startFrom):
    header = whisper.info(dst)
    archives = header['archives']
    archives = sorted(archives, key=lambda t: t['retention'])

    for archive in archives:
        fromTime = time.time() - archive['retention']
        if fromTime >= startFrom:
            continue

        (timeInfo, values) = whisper.fetch(dst, fromTime, startFrom)
        (start, end, step) = timeInfo
        gapstart = None
        for v in values:
            if not v and not gapstart:
                gapstart = start
            elif v and gapstart:
                # ignore single units lost
                if (start - gapstart) > archive['secondsPerPoint']:
                    fill(src, dst, gapstart - step, start)
                gapstart = None
            elif gapstart and start == end - step:
                fill(src, dst, gapstart - step, start)

            start += step

        startFrom = fromTime
示例#36
0
  def fetch(self, startTime, endTime):
    data = whisper.fetch(self.fs_path, startTime, endTime)
    if not data:
      return None
    consolidationFunc = ""
    whisper_info = whisper.info(self.fs_path)
    if "aggregationMethod" in whisper_info:
      aggregationMethod = whisper_info["aggregationMethod"]
      if aggregationMethod == 'min' or aggregationMethod == 'max':
        consolidationFunc = aggregationMethod
    time_info, values = data
    (start,end,step) = time_info

    # Merge in data from carbon's cache
    try:
      cached_datapoints = CarbonLink.query(self.real_metric_path)
    except:
      log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
      cached_datapoints = []

    for (timestamp, value) in cached_datapoints:
      interval = timestamp - (timestamp % step)

      try:
        i = int(interval - start) / step
        values[i] = value
      except:
        pass

    return (time_info, values, consolidationFunc)
示例#37
0
def fill_archives(src, dst, startFrom):
    header = info(dst)
    archives = header['archives']
    archives = sorted(archives, key=lambda t: t['retention'])

    for archive in archives:
        fromTime = time.time() - archive['retention']
        if fromTime >= startFrom:
            continue

        (timeInfo, values) = fetch(dst, fromTime, startFrom)
        (start, end, step) = timeInfo
        gapstart = None
        for v in values:
            if not v and not gapstart:
                gapstart = start
            elif v and gapstart:
                # ignore single units lost
                if (start - gapstart) > archive['secondsPerPoint']:
                    fill(src, dst, gapstart - step, start)
                gapstart = None
            elif gapstart and start == end - step:
                fill(src, dst, gapstart - step, start)

            start += step

        startFrom = fromTime
示例#38
0
    def fetch(self, startTime, endTime):
        data = whisper.fetch(self.fs_path, startTime, endTime)
        if not data:
            return None

        time_info, values = data
        (start, end, step) = time_info

        # Merge in data from carbon's cache
        try:
            cached_datapoints = CarbonLink.query(self.real_metric_path)
        except:
            log.exception("Failed CarbonLink query '%s'" %
                          self.real_metric_path)
            cached_datapoints = []

        for (timestamp, value) in cached_datapoints:
            interval = timestamp - (timestamp % step)

            try:
                i = int(interval - start) / step
                values[i] = value
            except:
                pass

        return (time_info, values)
示例#39
0
    def test_heal_mixed_data(self):
        testdb = "test-%s" % self.db
        self._removedb()

        try:
            os.unlink(testdb)
        except (IOError, OSError):
            pass

        schema = [(1, 20)]
        have = [
            1, 2, 3, None, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, None, 17, 18,
            19, None
        ]
        remote = [
            1, 2, 3, 4, 5, 6, None, None, None, None, 11, 12, 13, 14, 15, 16,
            17, 18, None, 20
        ]

        end = int(time.time()) + schema[0][0]
        start = end - (schema[0][1] * schema[0][0])
        times = list(range(start, end, schema[0][0]))

        have_data = [t for t in zip(times, have) if t[1] is not None]
        remote_data = [t for t in zip(times, remote) if t[1] is not None]

        self._createdb(self.db, schema, remote_data)
        self._createdb(testdb, schema, have_data)

        heal_metric(self.db, testdb, overwrite=True)

        final_data = whisper.fetch(testdb, 0)
        self.assertEqual(final_data[1], list(range(1, 21)))
示例#40
0
  def fetch(self, startTime, endTime):
    data = whisper.fetch(self.fs_path, startTime, endTime)
    if not data:
      return None

    time_info, values = data
    (start,end,step) = time_info

    meta_info = whisper.info(self.fs_path)
    aggregation_method = meta_info['aggregationMethod']
    lowest_step = min([i['secondsPerPoint'] for i in meta_info['archives']])
    # Merge in data from carbon's cache
    cached_datapoints = []
    try:
      cached_datapoints = CarbonLink.query(self.real_metric_path)
    except:
      log.exception("Failed CarbonLink query '%s'" % self.real_metric_path)
      cached_datapoints = []

    if isinstance(cached_datapoints, dict):
      cached_datapoints = cached_datapoints.items()

    values = merge_with_cache(cached_datapoints,
                              start,
                              step,
                              values,
                              aggregation_method)

    return time_info, values
示例#41
0
    def extract_data(self, metric, start, end=None):
        f = self.map_metric_to_files(metric)
        if not f:
            return None

        fetched = whisper.fetch(f[0], start, end)
        return fetched
示例#42
0
    def test_update_many_excess(self):
        # given an empty db
        wsp = "test_update_many_excess.wsp"
        self.addCleanup(self._remove, wsp)
        archive_len = 3
        archive_step = 1
        whisper.create(wsp, [(archive_step, archive_len)])

        # given too many points than the db can hold
        excess_len = 1
        num_input_points = archive_len + excess_len
        test_now = int(time.time())
        input_start = test_now - num_input_points + archive_step
        input_points = [(input_start + i, random.random() * 10)
                        for i in range(num_input_points)]

        # when the db is updated with too many points
        whisper.update_many(wsp, input_points, now=test_now)

        # then only the most recent input points (those at the end) were written
        actual_time_info = whisper.fetch(wsp, 0, now=test_now)[0]
        self.assertEqual(actual_time_info,
                         (input_points[-archive_len][0],
                          input_points[-1][0] + archive_step,  # untilInterval = newest + step
                          archive_step))
示例#43
0
    def fetch(self, startTime, endTime):
        data = whisper.fetch(self.fs_path, startTime, endTime)
        if not data:
            return None

        time_info, values = data
        start, end, step = time_info
        return (time_info, values)
示例#44
0
	def data(self, t = datetime.datetime.now() - datetime.timedelta(minutes=30)):
		ret = []
		import whisper
		for fn in os.listdir("/var/mmonet/"):
			if fn.startswith(self.uuid):
				print fn
				ret.append(whisper.fetch("/var/mmonet/"+fn, t))
				print ret
		return json.dumps(zip(*ret))
示例#45
0
    def test_heal_target_missing(self):
        testdb = "test-%s" % self.db
        try:
            os.unlink(testdb)
        except (IOError, OSError):
            pass

        self._removedb()

        schema = [(1, 20)]
        self._createdb(self.db, schema)
        original_data = whisper.fetch(self.db, 0)

        # This should log complaints but exit successfully as it cannot
        # heal its target /dev/null
        heal_metric(self.db, testdb)
        data = whisper.fetch(testdb, 0)
        self.assertEqual(original_data, data)
示例#46
0
    def _fetch_stat_data(self, stat_path, from_time, to_time):
        """
        获取数据
        :return:
        """
        # return [10, 100, 100]
        time_info, values = whisper.fetch(stat_path, from_time, to_time)

        return values
示例#47
0
    def test_heal_target_missing(self):
        testdb = "test-%s" % self.db
        try:
            os.unlink(testdb)
        except (IOError, OSError):
            pass

        self._removedb()

        schema = [(1, 20)]
        self._createdb(self.db, schema)
        original_data = whisper.fetch(self.db, 0)

        # This should log complaints but exit successfully as it cannot
        # heal its target /dev/null
        heal_metric(self.db, testdb)
        data = whisper.fetch(testdb, 0)
        self.assertEqual(original_data, data)
示例#48
0
    def _fetch_stat_data(self, stat_path, from_time, to_time):
        """
        获取数据
        :return:
        """
        # return [10, 100, 100]
        time_info, values = whisper.fetch(stat_path, from_time, to_time)

        return values
示例#49
0
    def test_fetch_with_archive_to_select(self):
        """
        fetch info from database providing the archive to select
        """

        # SECOND MINUTE HOUR DAY
        retention = [(1, 60), (60, 60), (3600, 24), (86400, 365)]
        whisper.create(self.filename, retention)

        archives = ["1s", "1m", "1h", "1d"]

        for i in range(len(archives)):
            fetch = whisper.fetch(self.filename, 0, archiveToSelect=archives[i])
            self.assertEqual(fetch[0][2], retention[i][0])

            # check time range
            self.assertEqual(fetch[0][1] - fetch[0][0], retention[-1][0] * retention[-1][1])
        with AssertRaisesException(ValueError("Invalid granularity: 2")):
            fetch = whisper.fetch(self.filename, 0, archiveToSelect="2s")
示例#50
0
    def test_fetch_with_archive_to_select(self):
        """
        fetch info from database providing the archive to select
        """

        # SECOND MINUTE HOUR DAY
        retention = [(1, 60), (60, 60), (3600, 24), (86400, 365)]
        whisper.create(self.filename, retention)

        archives = ["1s", "1m", "1h", "1d"]

        for i in range(len(archives)):
            fetch = whisper.fetch(self.filename, 0, archiveToSelect=archives[i])
            self.assertEqual(fetch[0][2], retention[i][0])

            # check time range
            self.assertEqual(fetch[0][1] - fetch[0][0], retention[-1][0] * retention[-1][1])
        with AssertRaisesException(ValueError("Invalid granularity: 2")):
            fetch = whisper.fetch(self.filename, 0, archiveToSelect="2s")
示例#51
0
    def test_fill_empty(self):
        testdb = "test-%s" % self.db
        self._removedb()

        try:
            os.unlink(testdb)
        except (IOError, OSError):
            pass

        schema = [(1, 20)]
        emptyData = []
        startTime = time.time()
        self._createdb(self.db, schema)
        self._createdb(testdb, schema, emptyData)

        fill_archives(self.db, testdb, startTime)

        original_data = whisper.fetch(self.db, 0)
        filled_data = whisper.fetch(testdb, 0)
        self.assertEqual(original_data, filled_data)
示例#52
0
    def test_fill_empty(self):
        testdb = "test-%s" % self.db
        self._removedb()

        try:
            os.unlink(testdb)
        except (IOError, OSError):
            pass

        schema = [(1, 20)]
        emptyData = []
        startTime = time.time()
        self._createdb(self.db, schema)
        self._createdb(testdb, schema, emptyData)

        fill_archives(self.db, testdb, startTime)

        original_data = whisper.fetch(self.db, 0)
        filled_data = whisper.fetch(testdb, 0)
        self.assertEqual(original_data, filled_data)
示例#53
0
    def _my_get_data(self, request, context, *args, **kwargs):
        # Add data to the context here...

        instances = api.server_list(self.request)
        for i in instances[0]._attrs:
            has = getattr(instances[0], i, "no")
            print "%s? %s" % (i, has)


        context["data"] = []
        for instance in instances:
            whisper_files = utils.get_whisper_files_list(WHISPER_DBS_PREFIX)

            this_instance_files = [x for x in whisper_files if instance.id in x]

            for f in this_instance_files:
                if "cpu" not in f: continue

                fetched = whisper.fetch(f, time.time() - 10)
                times, data = fetched
                context["data"].append((instance.id, data[-1]))

##        prefix = WHISPER_DBS_PREFIX
##        whisper_files = get_whisper_files_list(prefix, domain_filter=instances[0].id)
##
##        # for each whisper file found, query the last 30 seconds of data from
##        # it and format it it into the context
##        context["data"] = []
##        for f in sorted(whisper_files):
##            key = f.replace(prefix + '/', '')
##            
##            this_context = {}
##            this_context["name"] = key
##            
##            fetched = whisper.fetch(f, time.time() - 30)
##            times, data = fetched
##            
##            if len(set(data)) == 1 and None in set(data):
##                continue
##
##            start, end, step = times
##            values = []
##            for tv, val in zip(xrange(start, end, step), data):
##                if val is None:
##                    val = "n/a"
##                values.append({"time": tv, "value": val})
##
##            this_context["items"] = values
##
##            context["data"].append(this_context)
##
##        print "\n\n\n", context, "\n\n\n"
        return context
示例#54
0
 def test_01_add_point(self):
     """
     Add a point and check the created time range.
     """
     now = int(time.time())
     whisper.update(FILENAME, 1234, timestamp=now)
     (fromInterval, toInterval, step), points = whisper.fetch(FILENAME, 0, None)
     now = now - (now % 60)
     # The upper bound is (roughly) 'now'.
     self.assertEqual(toInterval, now + SECONDS_PER_POINT)
     # The lower bound is (roughly) now minus the covered time.
     self.assertEqual(fromInterval, now - (NUMBER_OF_POINTS - 1) * SECONDS_PER_POINT)
示例#55
0
def data(path, hours, offset=0):
    """
    Does the metric at ``path`` have any whisper data newer than ``hours``?

    If ``offset`` is not None, view the ``hours`` prior to ``offset`` hours
    ago, instead of from right now.
    """
    now = time.time()
    end = now - _to_sec(offset)  # Will default to now
    start = end - _to_sec(hours)
    _data = whisper.fetch(path, start, end)
    return all(x is None for x in _data[-1])
示例#56
0
def fill_archives(src,
                  dst,
                  start_from,
                  end_at=0,
                  overwrite=False,
                  lock_writes=False):
    """
    Fills gaps in dst using data from src.

    src is the path as a string
    dst is the path as a string
    start_from is the latest timestamp (archives are read backward)
    end_at is the earliest timestamp (archives are read backward).
          if absent, we take the earliest timestamp in the archive
    overwrite will write all non null points from src dst.
    lock using whisper lock if true
    """
    if lock_writes is False:
        whisper.LOCK = False
    elif whisper.CAN_LOCK and lock_writes is True:
        whisper.LOCK = True

    header = whisper.info(dst)
    archives = header['archives']
    archives = sorted(archives, key=lambda t: t['retention'])

    for archive in archives:
        from_time = max(end_at, time.time() - archive['retention'])
        if from_time >= start_from:
            continue

        (timeInfo, values) = whisper.fetch(dst,
                                           from_time,
                                           untilTime=start_from)
        (start, end, step) = timeInfo
        gapstart = None
        for value in values:
            has_value = bool(value and not overwrite)
            if not has_value and not gapstart:
                gapstart = start
            elif has_value and gapstart:
                if (start - gapstart) >= archive['secondsPerPoint']:
                    fill(src, dst, gapstart - step, start)
                gapstart = None
            start += step
        # fill if this gap continues to the end
        if gapstart:
            fill(src, dst, gapstart - step, end - step)

        # The next archive only needs to be filled up to the latest point
        # in time we updated.
        start_from = from_time
示例#57
0
    def data(self, sources, start, end, width):
        serieses = []
        for source in sources:
            path = os.path.join(self.GRAPH_ROOT, "%s.wsp" % (os.path.join(*source),))
            timeInfo, values = whisper.fetch(path, start, end)
            print len(values)
            start, end, step = timeInfo
            serieses.append(values)

        out = []
        for timestamp, values in izip(xrange(start, end, step), izip(*serieses)):
            out.append({'t': timestamp, 'v': [v for v in values]})
        return json.dumps(out)
示例#58
0
    def fetch(self, start_time, end_time):
        try:
            with get_lock(self.filepath):
                data = whisper.fetch(self.filepath, start_time, end_time)
        except FileNotFoundError:
            return None

        if not data:
            return None

        (start, end, step), values = data

        return start, end, step, values
    def get_samples(self, sample_filter, limit=None):

        conditions = {
            "resource_id": sample_filter.resource,
            "user_id": sample_filter.user,
            "source_id": sample_filter.source,
            "project_id": sample_filter.project,
            "name": sample_filter.meter,
        }

        sql = "SELECT * FROM resources r, meters m"
        where_clause = " WHERE r.resource_id=m.resource_id"
        sql = sql + where_clause

        def convert_local_time_to_utc():
            now = datetime.datetime.utcnow().replace(second=0, microsecond=0)
            tt = int((now - datetime.datetime(1970, 1, 1)).total_seconds())
            return tt

        for key, value in conditions.iteritems():
            if not value == None:
                sql = sql + " and m." + str(key) + "='" + str(value) + "'"

        conn = sqlite3.connect(env_variables["sql_db_path"])
        c = conn.cursor()
        rs = c.execute(sql)

        for row in rs:
            path = env_variables["whisper_path"] + row[1] + "_" + row[8].replace(".", "_") + ".wsp"
            t = convert_local_time_to_utc()
            r = whisper.fetch(path, t - unit_multipliers["minutes"], t)

            yield api_models.Sample(
                source=row[14],
                counter_name=row[8],
                counter_type=row[9],
                counter_unit=row[10],
                counter_volume=r[1][0],
                user_id=row[2],
                project_id=row[3],
                resource_id=row[1],
                timestamp=datetime.datetime.utcnow().replace(second=0, microsecond=0),
                recorded_at=None,
                resource_metadata=json.loads(row[5]),
                message_id=None,
                message_signature=None,
            )

        conn.close()