Example #1
0
def waterlevel(db_name):
    """Reduce alert frequency after initial alert, reset on all-clear"""

    (times, fail_buffer) = whisper.fetch(db_name, 315550800)

    if fail_buffer.count(1) > 2:
        new_whisper_db_name = db_name + '.wsp2'
        whisper.create(new_whisper_db_name, FOLLOWUP, aggregationMethod='last')
        whisper.update(new_whisper_db_name, 1)
        os.rename(new_whisper_db_name, db_name)

        for admin in sys.argv[2:]:
            os.system('mail -s "' + sys.argv[1] + '" ' + admin + '</dev/null')

    if fail_buffer.count(1) == 0:
        if whisper.info(
                db_name)['archives'][0]['secondsPerPoint'] == FOLLOWUP[0][0]:
            new_whisper_db_name = db_name + '.wsp2'
            whisper.create(new_whisper_db_name,
                           RETAINER,
                           aggregationMethod='last')
            whisper.update(new_whisper_db_name, 0)
            os.rename(new_whisper_db_name, db_name)

            for admin in sys.argv[2:]:
                os.system('mail -s "' + sys.argv[1] + '" ' + admin +
                          '</dev/null')

    return (0)
def benchmark_create_update_fetch():
    path, archive_list, tear_down = set_up_create()
    # start timer
    start_time = time.clock()
    for i in range(100):
        whisper.create(path, archive_list)

        seconds_ago = 3500
        current_value = 0.5
        increment = 0.2
        now = time.time()
        # file_update closes the file so we have to reopen every time
        for i in range(seconds_ago):
            whisper.update(path, current_value, now - seconds_ago + i)
            current_value += increment

        from_time = now - seconds_ago
        until_time = from_time + 1000

        whisper.fetch(path, from_time, until_time)
        tear_down()

    # end timer
    end_time = time.clock()
    elapsed_time = end_time - start_time

    print "Executed 100 iterations in %ss (%i ns/op)" % (
        elapsed_time, (elapsed_time * 1000 * 1000 * 1000) / 100)
    def test_single_metric(self):
        xfilesfactor = 0.5
        aggregation_method = "last"
        # This retentions are such that every other point is present in both
        # archives. Test validates that duplicate points gets inserted only once.
        retentions = [(1, 10), (2, 10)]
        high_precision_duration = retentions[0][0] * retentions[0][1]
        low_precision_duration = retentions[1][0] * retentions[1][1]
        now = int(time.time())
        time_from, time_to = now - low_precision_duration, now
        points = [(float(t), float(now-t)) for t in xrange(time_from, time_to)]
        metric = "test_metric"
        metric_path = os_path.join(self.tempdir, metric + ".wsp")
        whisper.create(metric_path, retentions, xfilesfactor, aggregation_method)
        whisper.update_many(metric_path, points)

        self._call_main()

        metric = self.accessor.get_metric(metric)
        self.assertTrue(metric)
        self.assertEqual(metric.name, metric.name)
        self.assertEqual(metric.aggregator.carbon_name, aggregation_method)
        self.assertEqual(metric.carbon_xfilesfactor, xfilesfactor)
        self.assertEqual(metric.retention.as_string, "10*1s:10*2s")

        points_again = list(self.accessor.fetch_points(
            metric, time_from, time_to, metric.retention[0]))
        self.assertEqual(points[-high_precision_duration:], points_again)
Example #4
0
    def update(self, updates):
        """
        whisperをまとめて更新する
        """
        # updatesをbox_id別に管理する
        sorted_updates = defaultdict(lambda: defaultdict(int))

        # whisperのtimestampは秒単位なので、秒以下はまとめる
        for box_id, timestamp in updates:
            sorted_updates[box_id][int(timestamp)] += 1

        for box_id, timestamps in sorted_updates.items():
            # whisperの中でSORTかけてた
            # timestamps = sorted(timestamps.items())
            timestamps = timestamps.items()

            db_path = os.path.join(self.dir_prefix,
                                   TimedDB.make_db_name(box_id))

            if not os.path.exists(db_path):
                logger.info('create whsiper db for box %s at path %s', box_id,
                            db_path)
                whisper.create(db_path,
                               WHISPER_ARCHIVES,
                               xFilesFactor=0.5,
                               aggregationMethod='sum',
                               sparse=False,
                               useFallocate=True)

            with get_lock(db_path):
                whisper.update_many(db_path, timestamps)
Example #5
0
    def test_fetch(self):
        """fetch info from database """

        # check a db that doesnt exist
        with self.assertRaises(Exception):
            whisper.fetch("this_db_does_not_exist", 0)

        # SECOND MINUTE HOUR DAY
        retention = [(1, 60), (60, 60), (3600, 24), (86400, 365)]
        whisper.create(self.db, retention)

        # check a db with an invalid time range
        with self.assertRaises(whisper.InvalidTimeInterval):
            whisper.fetch(self.db, time.time(), time.time() - 6000)

        fetch = whisper.fetch(self.db, 0)

        # check time range
        self.assertEqual(fetch[0][1] - fetch[0][0],
                         retention[-1][0] * retention[-1][1])

        # check number of points
        self.assertEqual(len(fetch[1]), retention[-1][1])

        # check step size
        self.assertEqual(fetch[0][2], retention[-1][0])

        self._removedb()
Example #6
0
    def test_file_fetch_edge_cases(self):
        """
        Test some of the edge cases in file_fetch() that should return
        None or raise an exception
        """
        whisper.create(self.filename, [(1, 60)])

        with open(self.filename, 'rb') as fh:
            msg = "Invalid time interval: from time '{0}' is after until time '{1}'"
            until_time = 0
            from_time = int(time.time()) + 100

            with AssertRaisesException(
                    whisper.InvalidTimeInterval(msg.format(from_time, until_time))):
                whisper.file_fetch(fh, fromTime=from_time, untilTime=until_time)

            # fromTime > now aka metrics from the future
            self.assertIsNone(
                whisper.file_fetch(fh, fromTime=int(time.time()) + 100,
                                   untilTime=int(time.time()) + 200),
            )

            # untilTime > oldest time stored in the archive
            headers = whisper.info(self.filename)
            the_past = int(time.time()) - headers['maxRetention'] - 200
            self.assertIsNone(
                whisper.file_fetch(fh, fromTime=the_past - 1, untilTime=the_past),
            )

            # untilTime > now, change untilTime to now
            now = int(time.time())
            self.assertEqual(
                whisper.file_fetch(fh, fromTime=now, untilTime=now + 200, now=now),
                ((now + 1, now + 2, 1), [None]),
            )
    def test_render_view(self):
        url = reverse('graphite.render.views.renderView')

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        self.assertEqual(json.loads(response.content), [])
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {'target': 'test'})
        self.assertEqual(response['Content-Type'], 'image/png')
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        self.addCleanup(self.wipe_whisper)
        whisper.create(self.db, [(1, 60)])

        ts = int(time.time())
        whisper.update(self.db, 0.5, ts - 2)
        whisper.update(self.db, 0.4, ts - 1)
        whisper.update(self.db, 0.6, ts)

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        data = json.loads(response.content)
        end = data[0]['datapoints'][-4:]
        self.assertEqual(
            end, [[None, ts - 3], [0.5, ts - 2], [0.4, ts - 1], [0.6, ts]])
Example #8
0
    def test_fetch(self):
        """
        fetch info from database
        """
        # Don't use AssertRaisesException due to a super obscure bug in
        # python2.6 which returns an IOError in the 2nd argument of __exit__
        # in a context manager as a tuple. See this for a minimal reproducer:
        #    http://git.io/cKz30g
        with self.assertRaises(IOError):
            # check a db that doesnt exist
            whisper.fetch("this_db_does_not_exist", 0)

        # SECOND MINUTE HOUR DAY
        retention = [(1, 60), (60, 60), (3600, 24), (86400, 365)]
        whisper.create(self.filename, retention)

        # check a db with an invalid time range
        now = int(time.time())
        past = now - 6000

        msg = "Invalid time interval: from time '{0}' is after until time '{1}'"
        with AssertRaisesException(whisper.InvalidTimeInterval(msg.format(now, past))):
            whisper.fetch(self.filename, now, past)

        fetch = whisper.fetch(self.filename, 0)

        # check time range
        self.assertEqual(fetch[0][1] - fetch[0][0],
                         retention[-1][0] * retention[-1][1])

        # check number of points
        self.assertEqual(len(fetch[1]), retention[-1][1])

        # check step size
        self.assertEqual(fetch[0][2], retention[-1][0])
Example #9
0
    def test_fetch(self):
        """fetch info from database """

        # check a db that doesnt exist
        with self.assertRaises(Exception):
            whisper.fetch("this_db_does_not_exist", 0)

        # SECOND MINUTE HOUR DAY
        retention = [(1, 60), (60, 60), (3600, 24), (86400, 365)]
        whisper.create(self.db, retention)

        # check a db with an invalid time range
        with self.assertRaises(whisper.InvalidTimeInterval):
            whisper.fetch(self.db, time.time(), time.time()-6000)

        fetch = whisper.fetch(self.db, 0)

        # check time range
        self.assertEqual(fetch[0][1] - fetch[0][0],
                         retention[-1][0] * retention[-1][1])

        # check number of points
        self.assertEqual(len(fetch[1]), retention[-1][1])

        # check step size
        self.assertEqual(fetch[0][2], retention[-1][0])

        self._removedb()
Example #10
0
def createWhisperFile(metric, dbFilePath, dbFileExists):
  if not dbFileExists:
    archiveConfig = None
    xFilesFactor, aggregationMethod = None, None

    for schema in schemas:
      if schema.matches(metric):
        log.creates('new metric %s matched schema %s' % (metric, schema.name))
        archiveConfig = [archive.getTuple() for archive in schema.archives]
        break

    for schema in agg_schemas:
      if schema.matches(metric):
        log.creates('new metric %s matched aggregation schema %s' % (metric, schema.name))
        xFilesFactor, aggregationMethod = schema.archives
        break

    if not archiveConfig:
          raise Exception("No storage schema matched the metric '%s', check your storage-schemas.conf file." % metric)

    dbDir = dirname(dbFilePath)
    try:
      os.makedirs(dbDir)
    except OSError as e:
      if e.errno != errno.EEXIST:
        log.err("%s" % e)
    log.creates("creating database file %s (archive=%s xff=%s agg=%s)" %
                (dbFilePath, archiveConfig, xFilesFactor, aggregationMethod))
    try:
      whisper.create(dbFilePath, archiveConfig, xFilesFactor, aggregationMethod, settings.WHISPER_SPARSE_CREATE, settings.WHISPER_FALLOCATE_CREATE)
      instrumentation.increment('creates')
    except Exception, e:
      log.err("Error creating %s: %s" % (dbFilePath, e))
      return False
Example #11
0
    def test_single_metric(self):
        xfilesfactor = 0.5
        aggregation_method = "last"
        # This retentions are such that every other point is present in both
        # archives. Test validates that duplicate points gets inserted only once.
        retentions = [(1, 10), (2, 10)]
        high_precision_duration = retentions[0][0] * retentions[0][1]
        low_precision_duration = retentions[1][0] * retentions[1][1]
        now = int(time.time())
        time_from, time_to = now - low_precision_duration, now
        points = [(float(t), float(now - t))
                  for t in xrange(time_from, time_to)]
        metric = "test_metric"
        metric_path = os_path.join(self.tempdir, metric + ".wsp")
        whisper.create(metric_path, retentions, xfilesfactor,
                       aggregation_method)
        whisper.update_many(metric_path, points)

        self._call_main()

        metric = self.accessor.get_metric(metric)
        self.assertTrue(metric)
        self.assertEqual(metric.name, metric.name)
        self.assertEqual(metric.aggregator.carbon_name, aggregation_method)
        self.assertEqual(metric.carbon_xfilesfactor, xfilesfactor)
        self.assertEqual(metric.retention.as_string, "10*1s:10*2s")

        points_again = list(
            self.accessor.fetch_points(metric, time_from, time_to,
                                       metric.retention[0]))
        self.assertEqual(points[-high_precision_duration:], points_again)
Example #12
0
    def test_render_view(self):
        url = reverse('graphite.render.views.renderView')

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        self.assertEqual(json.loads(response.content), [])
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {'target': 'test'})
        self.assertEqual(response['Content-Type'], 'image/png')
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        self.addCleanup(self.wipe_whisper)
        whisper.create(self.db, [(1, 60)])

        ts = int(time.time())
        whisper.update(self.db, 0.5, ts - 2)
        whisper.update(self.db, 0.4, ts - 1)
        whisper.update(self.db, 0.6, ts)

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        data = json.loads(response.content)
        end = data[0]['datapoints'][-4:]
        self.assertEqual(
            end, [[None, ts - 3], [0.5, ts - 2], [0.4, ts - 1], [0.6, ts]])
  def create_db(self, metric):
    archiveConfig = None
    xFilesFactor, aggregationMethod = None, None
    
    for schema in schemas:
      if schema.matches(metric):
        log.creates('new metric %s matched schema %s' % (metric, schema.name))
        archiveConfig = [archive.getTuple() for archive in schema.archives]
        break

    for schema in agg_schemas:
      if schema.matches(metric):
        log.creates('new metric %s matched aggregation schema %s' % (metric, schema.name))
        xFilesFactor, aggregationMethod = schema.archives
        break

    if not archiveConfig:
      raise Exception("No storage schema matched the metric '%s', check your storage-schemas.conf file." % metric)

    dbDir = dirname(dbFilePath)
    os.system("mkdir -p -m 755 '%s'" % dbDir)

    log.creates("creating database file %s (archive=%s xff=%s agg=%s)" % 
                (dbFilePath, archiveConfig, xFilesFactor, aggregationMethod))
    whisper.create(dbFilePath, archiveConfig, xFilesFactor, aggregationMethod, settings.WHISPER_SPARSE_CREATE)
    os.chmod(dbFilePath, 0755)
Example #14
0
 def _populate_data(self):
     self.db = os.path.join(settings.WHISPER_DIR, 'test.wsp')
     whisper.create(self.db, [(1, 60)])
     ts = int(time.time())
     for i, value in enumerate(reversed(self._test_data)):
         whisper.update(self.db, value, ts - i)
     self.ts = ts
Example #15
0
    def test_update_many_excess(self):
        # given an empty db
        wsp = "test_update_many_excess.wsp"
        self.addCleanup(self._remove, wsp)
        archive_len = 3
        archive_step = 1
        whisper.create(wsp, [(archive_step, archive_len)])

        # given too many points than the db can hold
        excess_len = 1
        num_input_points = archive_len + excess_len
        test_now = int(time.time())
        input_start = test_now - num_input_points + archive_step
        input_points = [(input_start + i, random.random() * 10)
                        for i in range(num_input_points)]

        # when the db is updated with too many points
        whisper.update_many(wsp, input_points, now=test_now)

        # then only the most recent input points (those at the end) were written
        actual_time_info = whisper.fetch(wsp, 0, now=test_now)[0]
        self.assertEqual(actual_time_info,
                         (input_points[-archive_len][0],
                          input_points[-1][0] + archive_step,  # untilInterval = newest + step
                          archive_step))
Example #16
0
    def test_normal(self):
        whisper.create(self.filename, [(1, 60), (60, 60)])

        whisper.CACHE_HEADERS = True
        whisper.info(self.filename)
        whisper.info(self.filename)
        whisper.CACHE_HEADERS = False
Example #17
0
    def test_setAggregation(self):
        """Create a db, change aggregation, xFilesFactor, then use info() to validate"""
        retention = [(1, 60), (60, 60)]

        # create a new db with a valid configuration
        whisper.create(self.db, retention)

        #set setting every AggregationMethod available
        for ag in whisper.aggregationMethods:
          for xff in 0.0,0.2,0.4,0.7,0.75,1.0:
            #original xFilesFactor
            info0 = whisper.info(self.db)
            #optional xFilesFactor not passed
            whisper.setAggregationMethod(self.db, ag)

            #original value should not change
            info1 = whisper.info(self.db)
            self.assertEqual(info0['xFilesFactor'],info1['xFilesFactor'])
            #the selected aggregation method should have applied
            self.assertEqual(ag,info1['aggregationMethod'])

            #optional xFilesFactor used
            whisper.setAggregationMethod(self.db, ag, xff)
            #new info should match what we just set it to
            info2 = whisper.info(self.db)
            #packing and unpacking because
            #AssertionError: 0.20000000298023224 != 0.2
            target_xff = struct.unpack("!f", struct.pack("!f",xff))[0]
            self.assertEqual(info2['xFilesFactor'], target_xff)

            #same aggregationMethod asssertion again, but double-checking since
            #we are playing with packed values and seek()
            self.assertEqual(ag,info2['aggregationMethod'])

        self._removedb()
Example #18
0
    def test_setAggregation(self):
        """Create a db, change aggregation, xFilesFactor, then use info() to validate"""
        retention = [(1, 60), (60, 60)]

        # create a new db with a valid configuration
        whisper.create(self.db, retention)

        #set setting every AggregationMethod available
        for ag in whisper.aggregationMethods:
            for xff in 0.0, 0.2, 0.4, 0.7, 0.75, 1.0:
                #original xFilesFactor
                info0 = whisper.info(self.db)
                #optional xFilesFactor not passed
                whisper.setAggregationMethod(self.db, ag)

                #original value should not change
                info1 = whisper.info(self.db)
                self.assertEqual(info0['xFilesFactor'], info1['xFilesFactor'])
                #the selected aggregation method should have applied
                self.assertEqual(ag, info1['aggregationMethod'])

                #optional xFilesFactor used
                whisper.setAggregationMethod(self.db, ag, xff)
                #new info should match what we just set it to
                info2 = whisper.info(self.db)
                #packing and unpacking because
                #AssertionError: 0.20000000298023224 != 0.2
                target_xff = struct.unpack("!f", struct.pack("!f", xff))[0]
                self.assertEqual(info2['xFilesFactor'], target_xff)

                #same aggregationMethod asssertion again, but double-checking since
                #we are playing with packed values and seek()
                self.assertEqual(ag, info2['aggregationMethod'])

        self._removedb()
def benchmark_create_update_fetch():
	path, archive_list, tear_down = set_up_create()
	# start timer
	start_time = time.clock()
	for i in range(100):
		whisper.create(path, archive_list)

		seconds_ago = 3500
		current_value = 0.5
		increment = 0.2
		now = time.time()
		# file_update closes the file so we have to reopen every time
		for i in range(seconds_ago):
			whisper.update(path, current_value, now - seconds_ago + i)
			current_value += increment

		from_time = now - seconds_ago
		until_time = from_time + 1000

		whisper.fetch(path, from_time, until_time)
		tear_down()

	# end timer
	end_time = time.clock()
	elapsed_time = end_time - start_time

	print "Executed 100 iterations in %ss (%i ns/op)" % (elapsed_time, (elapsed_time * 1000 * 1000 * 1000) / 100)
Example #20
0
 def populate_data(self):
     self.db = os.path.join(settings.WHISPER_DIR, 'test.wsp')
     whisper.create(self.db, [(1, 60)])
     ts = int(time.time())
     for i, value in enumerate(reversed(self._test_data)):
         whisper.update(self.db, value, ts - i)
     self.ts = ts
Example #21
0
    def test_file_fetch_edge_cases(self):
        """
        Test some of the edge cases in file_fetch() that should return
        None or raise an exception
        """
        whisper.create(self.filename, [(1, 60)])

        with open(self.filename, 'rb') as fh:
            msg = "Invalid time interval: from time '{0}' is after until time '{1}'"
            until_time = 0
            from_time = int(time.time()) + 100

            with AssertRaisesException(
                    whisper.InvalidTimeInterval(msg.format(from_time, until_time))):
                whisper.file_fetch(fh, fromTime=from_time, untilTime=until_time)

            # fromTime > now aka metrics from the future
            self.assertIsNone(
                whisper.file_fetch(fh, fromTime=int(time.time()) + 100,
                                   untilTime=int(time.time()) + 200),
            )

            # untilTime > oldest time stored in the archive
            headers = whisper.info(self.filename)
            the_past = int(time.time()) - headers['maxRetention'] - 200
            self.assertIsNone(
                whisper.file_fetch(fh, fromTime=the_past - 1, untilTime=the_past),
            )

            # untilTime > now, change untilTime to now
            now = int(time.time())
            self.assertEqual(
                whisper.file_fetch(fh, fromTime=now, untilTime=now + 200, now=now),
                ((now + 1, now + 2, 1), [None]),
            )
Example #22
0
    def test_fetch(self):
        """
        fetch info from database
        """
        # Don't use AssertRaisesException due to a super obscure bug in
        # python2.6 which returns an IOError in the 2nd argument of __exit__
        # in a context manager as a tuple. See this for a minimal reproducer:
        #    http://git.io/cKz30g
        with self.assertRaises(IOError):
            # check a db that doesnt exist
            whisper.fetch("this_db_does_not_exist", 0)

        # SECOND MINUTE HOUR DAY
        retention = [(1, 60), (60, 60), (3600, 24), (86400, 365)]
        whisper.create(self.filename, retention)

        # check a db with an invalid time range
        now = int(time.time())
        past = now - 6000

        msg = "Invalid time interval: from time '{0}' is after until time '{1}'"
        with AssertRaisesException(whisper.InvalidTimeInterval(msg.format(now, past))):
            whisper.fetch(self.filename, now, past)

        fetch = whisper.fetch(self.filename, 0)

        # check time range
        self.assertEqual(fetch[0][1] - fetch[0][0],
                         retention[-1][0] * retention[-1][1])

        # check number of points
        self.assertEqual(len(fetch[1]), retention[-1][1])

        # check step size
        self.assertEqual(fetch[0][2], retention[-1][0])
Example #23
0
    def test_normal(self):
        whisper.create(self.filename, [(1, 60), (60, 60)])

        whisper.CACHE_HEADERS = True
        whisper.info(self.filename)
        whisper.info(self.filename)
        whisper.CACHE_HEADERS = False
Example #24
0
    def test_update_many_excess(self):
        # given an empty db
        wsp = "test_update_many_excess.wsp"
        self.addCleanup(self._remove, wsp)
        archive_len = 3
        archive_step = 1
        whisper.create(wsp, [(archive_step, archive_len)])

        # given too many points than the db can hold
        excess_len = 1
        num_input_points = archive_len + excess_len
        test_now = int(time.time())
        input_start = test_now - num_input_points + archive_step
        input_points = [(input_start + i, random.random() * 10)
                        for i in range(num_input_points)]

        # when the db is updated with too many points
        whisper.update_many(wsp, input_points, now=test_now)

        # then only the most recent input points (those at the end) were written
        actual_time_info = whisper.fetch(wsp, 0, now=test_now)[0]
        self.assertEqual(actual_time_info,
                         (input_points[-archive_len][0],
                          input_points[-1][0] + archive_step,  # untilInterval = newest + step
                          archive_step))
Example #25
0
 def _create_dbs(self):
     for db in (
         ('test', 'foo.wsp'),
         ('test', 'bar', 'baz.wsp'),
     ):
         db_path = os.path.join(WHISPER_DIR, *db)
         os.makedirs(os.path.dirname(db_path))
         whisper.create(db_path, [(1, 60)])
Example #26
0
 def test_00_create_empty_whisper(self):
     """
     Create a whisper file with one archive of 5 points where each point
     covers 60 second, and default xFilesFactor and aggregationMethod.
     """
     self.assertTrue(not os.path.exists(FILENAME))
     whisper.create(FILENAME, [(SECONDS_PER_POINT, NUMBER_OF_POINTS)])
     self.assertTrue(os.path.exists(FILENAME))
Example #27
0
    def create_rrd_file(self, postfix, overwrite=False):
        path = self.get_rrd_file(postfix)

        if os.path.exists(path) and not overwrite:
            return

        whisper.create(path, self.archiveList, self.xFilesFactor,
                       self.get_aggregationMethod(postfix))
Example #28
0
 def create(self, filename):
     whisper.create(
         filename,
         self.archives,
         xFilesFactor=self.x_files_factor,
         aggregationMethod=self.aggregation_method,
         sparse=self.sparse,
     )
    def record_metering_data(self, data):

        record = copy.deepcopy(data)

        timestamp = record["timestamp"].replace(second=0, microsecond=0)
        timestamp = int((timestamp - datetime.datetime(1970, 1, 1)).total_seconds())
        value = float(record["counter_volume"])

        record_path = (
            env_variables["whisper_path"] + data["resource_id"] + "_" + data["counter_name"].replace(".", "_") + ".wsp"
        )
        # if not os.path.isdir(os.path.dirname(record_path)):
        #     os.makedirs(os.path.dirname(record_path))

        if not os.path.isfile(record_path):
            whisper.create(record_path, archieve_list)

        whisper.update(record_path, value, timestamp)

        # add resource & meter to sqlite db
        conn = sqlite3.connect(env_variables["sql_db_path"])
        c = conn.cursor()
        c.execute("select count(*) from resources where resource_id=?", (data["resource_id"],))
        r = c.fetchone()[0]
        if r == 0:
            c.execute(
                "insert into resources (resource_id, user_id, project_id, source_id, resource_metadata)"
                + "values (?,?,?,?,?)",
                (
                    data["resource_id"],
                    data["user_id"],
                    data["project_id"],
                    data["source"],
                    json.dumps(data["resource_metadata"]),
                ),
            )

        c.execute(
            "select count(*) from meters where name=? and resource_id=?", (data["counter_name"], data["resource_id"])
        )
        r = c.fetchone()[0]
        if r == 0:
            c.execute(
                "insert into meters (name, type, unit, resource_id, project_id, user_id, source)"
                + "values (?,?,?,?,?,?,?)",
                (
                    data["counter_name"],
                    data["counter_type"],
                    data["counter_unit"],
                    data["resource_id"],
                    data["project_id"],
                    data["user_id"],
                    data["source"],
                ),
            )

        conn.commit()
        conn.close()
Example #30
0
def store(file, data):
	print "store"
	import whisper
	try:
		whisper.create(file, [(1,60*60*24), (10, 60*60*24), (60, 60*60*24*30) ])
	except:
		pass
	print "update"
	whisper.update(file, data)
Example #31
0
    def test_setAggregation(self):
        """
        Create a db, change aggregation, xFilesFactor, then use info() to validate
        """
        original_lock = whisper.LOCK
        original_caching = whisper.CACHE_HEADERS
        original_autoflush = whisper.AUTOFLUSH

        whisper.LOCK = True
        whisper.AUTOFLUSH = True
        whisper.CACHE_HEADERS = True
        # create a new db with a valid configuration
        whisper.create(self.filename, self.retention)

        with AssertRaisesException(whisper.InvalidAggregationMethod('Unrecognized aggregation method: yummy beer')):
            whisper.setAggregationMethod(self.filename, 'yummy beer')

        #set setting every AggregationMethod available
        for ag in whisper.aggregationMethods:
          for xff in 0.0, 0.2, 0.4, 0.7, 0.75, 1.0:
            # original xFilesFactor
            info0 = whisper.info(self.filename)
            # optional xFilesFactor not passed
            old_ag = whisper.setAggregationMethod(self.filename, ag)

            # should return old aggregationmethod
            self.assertEqual(old_ag, info0['aggregationMethod'])

            # original value should not change
            info1 = whisper.info(self.filename)
            self.assertEqual(info0['xFilesFactor'], info1['xFilesFactor'])

            # the selected aggregation method should have applied
            self.assertEqual(ag, info1['aggregationMethod'])

            # optional xFilesFactor used
            old_ag = whisper.setAggregationMethod(self.filename, ag, xff)
            # should return old aggregationmethod
            self.assertEqual(old_ag, info1['aggregationMethod'])
            # new info should match what we just set it to
            info2 = whisper.info(self.filename)
            # packing and unpacking because
            # AssertionError: 0.20000000298023224 != 0.2
            target_xff = struct.unpack("!f", struct.pack("!f", xff))[0]
            self.assertEqual(info2['xFilesFactor'], target_xff)

            # same aggregationMethod assertion again, but double-checking since
            # we are playing with packed values and seek()
            self.assertEqual(ag, info2['aggregationMethod'])

            with SimulatedCorruptWhisperFile():
                with AssertRaisesException(whisper.CorruptWhisperFile('Unable to read header', self.filename)):
                    whisper.setAggregationMethod(self.filename, ag)

        whisper.LOCK = original_lock
        whisper.AUTOFLUSH = original_autoflush
        whisper.CACHE_HEADERS = original_caching
Example #32
0
 def _create(self):
     """Create the Whisper file on disk"""
     if not os.path.exists(settings.SALMON_WHISPER_DB_PATH):
         os.makedirs(settings.SALMON_WHISPER_DB_PATH)
     archives = [whisper.parseRetentionDef(retentionDef)
                 for retentionDef in settings.ARCHIVES.split(",")]
     whisper.create(self.path, archives,
                    xFilesFactor=settings.XFILEFACTOR,
                    aggregationMethod=settings.AGGREGATION_METHOD)
Example #33
0
 def create_whisper(self, path, gz=False):
     path = join(self.test_dir, path)
     if not isdir(dirname(path)):
         os.makedirs(dirname(path))
     whisper.create(path, [(1, 60)])
     if gz:
       with open(path, 'rb') as f_in, gzip.open("%s.gz" % path, 'wb') as f_out:
          shutil.copyfileobj(f_in, f_out)
       os.remove(path)
Example #34
0
 def _createdb(self, wsp, schema=[(1, 20)], data=None):
     whisper.create(wsp, schema)
     if data is None:
         tn = time.time() - 20
         data = []
         for i in range(20):
             data.append((tn + 1 + i, random.random() * 10))
     whisper.update_many(wsp, data)
     return data
Example #35
0
 def _createdb(self, wsp, schema=[(1, 20)], data=None):
     whisper.create(wsp, schema)
     if data is None:
         tn = time.time() - 20
         data = []
         for i in range(20):
             data.append((tn + 1 + i, random.random() * 10))
     whisper.update_many(wsp, data)
     return data
Example #36
0
    def _write_data(self, hostID, plugin, data_json):

        # For each plugin
        if plugin == None:
            return False

        data =  self.jsonToPython(data_json)
        data_timestamp = data.get('TimeStamp')

        # TODO dont write data if no infos

        # Get data_path
        data_path = self._redis_connexion.redis_hget("WSP_PATH", hostID)
        if data_path is None:
            return False

        wspPath = '%s/%s' % (data_path, plugin)
        for ds_name, value in data.get("Values", {}).iteritems():

            ds_path = '%s/%s.wsp' % (wspPath, ds_name)
            # Create wsp file - config wsp here
            if not os.path.isfile(ds_path):
                self._logger.warning("writewsp host : %s Create wsp file : %s"
                                        % (hostID, ds_path))
                # Create directory
                if not os.path.exists(wspPath):
                    try:
                        os.makedirs(wspPath)
                        self._logger.info("writewsp host : %s make directory : %s"
                                            % (hostID, wspPath))
                    except OSError:
                        self._logger.error("writewsp host : %s can't make directory : %s"
                                            % (hostID, wspPath))
                        continue
                try:
                    whisper.create(ds_path,
                                   [(60, 1440),   # --- Daily (1 Minute Average)
                                   (300, 2016),   # --- Weekly (5 Minute Average)
                                   (600, 4608),   # --- Monthly (10 Min Average)
                                   (3600, 8784)]) # --- Yearly (1 Hour Average)
                except Exception as e:
                    self._logger.error("writewsp host : %s Create wsp Error %s"
                        % (hostID, str(e)))
                    continue
            # Update whisper
            try:
                self._logger.debug("writewsp host : %s Update wsp "
                                   "Timestamp %s For value %s in file %s"
                                    % (hostID, data_timestamp, value, ds_path))
                whisper.update(ds_path, str(value), str(data_timestamp) )
            except Exception as e:
                self._logger.error("writewsp host : %s Update Error %s - %s"
                                    % (hostID, ds_path, e))
                continue
        self._dataNumber += 1
        return True
Example #37
0
    def test_setAggregation(self):
        """
        Create a db, change aggregation, xFilesFactor, then use info() to validate
        """
        original_lock = whisper.LOCK
        original_caching = whisper.CACHE_HEADERS
        original_autoflush = whisper.AUTOFLUSH

        whisper.LOCK = True
        whisper.AUTOFLUSH = True
        whisper.CACHE_HEADERS = True
        # create a new db with a valid configuration
        whisper.create(self.filename, self.retention)

        with AssertRaisesException(
                whisper.InvalidAggregationMethod(
                    'Unrecognized aggregation method: yummy beer')):
            whisper.setAggregationMethod(self.filename, 'yummy beer')

        #set setting every AggregationMethod available
        for ag in whisper.aggregationMethods:
            for xff in 0.0, 0.2, 0.4, 0.7, 0.75, 1.0:
                # original xFilesFactor
                info0 = whisper.info(self.filename)
                # optional xFilesFactor not passed
                whisper.setAggregationMethod(self.filename, ag)

                # original value should not change
                info1 = whisper.info(self.filename)
                self.assertEqual(info0['xFilesFactor'], info1['xFilesFactor'])

                # the selected aggregation method should have applied
                self.assertEqual(ag, info1['aggregationMethod'])

                # optional xFilesFactor used
                whisper.setAggregationMethod(self.filename, ag, xff)
                # new info should match what we just set it to
                info2 = whisper.info(self.filename)
                # packing and unpacking because
                # AssertionError: 0.20000000298023224 != 0.2
                target_xff = struct.unpack("!f", struct.pack("!f", xff))[0]
                self.assertEqual(info2['xFilesFactor'], target_xff)

                # same aggregationMethod asssertion again, but double-checking since
                # we are playing with packed values and seek()
                self.assertEqual(ag, info2['aggregationMethod'])

                with SimulatedCorruptWhisperFile():
                    with AssertRaisesException(
                            whisper.CorruptWhisperFile('Unable to read header',
                                                       self.filename)):
                        whisper.setAggregationMethod(self.filename, ag)

        whisper.LOCK = original_lock
        whisper.AUTOFLUSH = original_autoflush
        whisper.CACHE_HEADERS = original_caching
Example #38
0
    def test_resize_with_aggregate(self):
        """resize whisper file with aggregate"""
        # 60s per point save two days
        retention = [(60, 60 * 24 * 2)]
        whisper.create(self.filename, retention)

        # insert data
        now_timestamp = int(
            (datetime.now() - datetime(1970, 1, 1)).total_seconds())
        now_timestamp -= now_timestamp % 60  # format timestamp
        points = [(now_timestamp - i * 60, i) for i in range(0, 60 * 24 * 2)]
        whisper.update_many(self.filename, points)
        data = whisper.fetch(self.filename,
                             fromTime=now_timestamp - 3600 * 25,
                             untilTime=now_timestamp - 3600 * 25 + 60 * 10)
        self.assertEqual(len(data[1]), 10)
        self.assertEqual(data[0][2], 60)  # high retention == 60
        for d in data[1]:
            self.assertIsNotNone(d)
        # resize from high to low
        os.system(
            'whisper-resize.py %s 60s:1d 300s:2d --aggregate --nobackup >/dev/null'
            % self.filename)  # noqa
        data_low = whisper.fetch(self.filename,
                                 fromTime=now_timestamp - 3600 * 25,
                                 untilTime=now_timestamp - 3600 * 25 + 60 * 10)
        self.assertEqual(len(data_low[1]), 2)
        self.assertEqual(data_low[0][2], 300)  # low retention == 300
        for d in data_low[1]:
            self.assertIsNotNone(d)
        data_high = whisper.fetch(self.filename,
                                  fromTime=now_timestamp - 60 * 10,
                                  untilTime=now_timestamp)
        self.assertEqual(len(data_high[1]), 10)
        self.assertEqual(data_high[0][2], 60)  # high retention == 60
        # resize from low to high
        os.system(
            'whisper-resize.py %s 60s:2d --aggregate --nobackup >/dev/null' %
            self.filename)  # noqa
        data1 = whisper.fetch(self.filename,
                              fromTime=now_timestamp - 3600 * 25,
                              untilTime=now_timestamp - 3600 * 25 + 60 * 10)
        self.assertEqual(len(data1[1]), 10)
        # noqa data1 looks like ((1588836720, 1588837320, 60), [None, None, 1490.0, None, None, None, None, 1485.0, None, None])
        # data1[1] have two not none value
        self.assertEqual(len(list(filter(lambda x: x is not None, data1[1]))),
                         2)
        data2 = whisper.fetch(self.filename,
                              fromTime=now_timestamp - 60 * 15,
                              untilTime=now_timestamp - 60 * 5)
        # noqa data2 looks like ((1588925820, 1588926420, 60), [10.0, 11.0, 10.0, 9.0, 8.0, 5.0, 6.0, 5.0, 4.0, 3.0])
        self.assertEqual(len(list(filter(lambda x: x is not None, data2[1]))),
                         10)

        # clean up
        self.tearDown()
Example #39
0
 def create_whisper_files(self):
     if self.args.dry_run:
         self.log.info(
             'Skipping creating whisper files because dry-run flag is set')
         return
     for metric in METRICS:
         try:
             whisper.create(self.get_whisper_file(metric), ARCHIVES)
         except whisper.InvalidConfiguration:
             pass  # Already exists.
Example #40
0
 def test_get_structure(self):
     whisper.create(os.path.join(settings.WHISPER_DIR, 'test2.wsp'), [(1, 60)])
     # Create a bit of a structure
     os.makedirs(os.path.join(settings.WHISPER_DIR, "level1", "level2", "level3"))
     level1 = os.path.join(settings.WHISPER_DIR, "level1")
     level2 = os.path.join(level1, "level2")
     whisper.create(os.path.join(level1, 'level1_test.wsp'), [(1, 60)])
     structure = query.get_structure()
     self.assertIn("test", structure)
     self.assertIsInstance(structure["test2"], LeafNode)
     self.assertNotIn("level2", structure["level1"])
Example #41
0
    def test_merge_bad_archive_config(self):
        testdb = "test-%s" % self.filename

        # Create 2 whisper databases with different schema
        self._update()
        whisper.create(testdb, [(100, 1)])

        with AssertRaisesException(NotImplementedError('db.wsp and test-db.wsp archive configurations are unalike. Resize the input before merging')):
            whisper.merge(self.filename, testdb)

        self._remove(testdb)
Example #42
0
        def create(self, metric, retentions, xfilesfactor, aggregation_method):
            path = self.getFilesystemPath(metric)
            directory = dirname(path)
            try:
                if not exists(directory):
                    os.makedirs(directory)
            except OSError as e:
                log.err("%s" % e)

            whisper.create(path, retentions, xfilesfactor, aggregation_method,
                           self.sparse_create, self.fallocate_create)
    def test_merge_bad_archive_config(self):
        testdb = "test-%s" % self.filename

        # Create 2 whisper databases with different schema
        self._update()
        whisper.create(testdb, [(100, 1)])

        with AssertRaisesException(NotImplementedError('db.wsp and test-db.wsp archive configurations are unalike. Resize the input before merging')):
            whisper.merge(self.filename, testdb)

        self._remove(testdb)
Example #44
0
    def create(self, metric, retentions, xfilesfactor, aggregation_method):
      path = self.getFilesystemPath(metric)
      directory = dirname(path)
      try:
        if not exists(directory):
          os.makedirs(directory)
      except OSError as e:
        log.err("%s" % e)

      whisper.create(path, retentions, xfilesfactor, aggregation_method,
                     self.sparse_create, self.fallocate_create)
Example #45
0
    def test_file_diff_invalid(self):
        testdb = "test-%s" % self.filename

        whisper.create(testdb, [(120, 10)])
        whisper.create(self.filename, self.retention)

        # Merging 2 archives with different retentions should fail
        with open(testdb, 'rb') as fh_1:
            with open(self.filename, 'rb+') as fh_2:
                with AssertRaisesException(NotImplementedError('test-db.wsp and db.wsp archive configurations are unalike. Resize the input before diffing')):
                    whisper.file_diff(fh_1, fh_2)
        self._remove(testdb)
Example #46
0
 def write_series(self, series):
     file_name = os.path.join(
         WHISPER_DIR,
         '{0}.wsp'.format(series.pathExpression.replace('.', os.sep)))
     os.makedirs(os.path.dirname(file_name))
     whisper.create(file_name, [(1, 180)])
     data = []
     for index, value in enumerate(series):
         if value is None:
             continue
         data.append((series.start + index * series.step, value))
     whisper.update_many(file_name, data)
Example #47
0
 def _create(self):
     """Create the Whisper file on disk"""
     if not os.path.exists(settings.SALMON_WHISPER_DB_PATH):
         os.makedirs(settings.SALMON_WHISPER_DB_PATH)
     archives = [
         whisper.parseRetentionDef(retentionDef)
         for retentionDef in settings.ARCHIVES.split(",")
     ]
     whisper.create(self.path,
                    archives,
                    xFilesFactor=settings.XFILEFACTOR,
                    aggregationMethod=settings.AGGREGATION_METHOD)
    def test_file_diff_invalid(self):
        testdb = "test-%s" % self.filename

        whisper.create(testdb, [(120, 10)])
        whisper.create(self.filename, self.retention)

        # Merging 2 archives with different retentions should fail
        with open(testdb, 'rb') as fh_1:
            with open(self.filename, 'rb+') as fh_2:
                with AssertRaisesException(NotImplementedError('test-db.wsp and db.wsp archive configurations are unalike. Resize the input before diffing')):
                    whisper.file_diff(fh_1, fh_2)
        self._remove(testdb)
Example #49
0
def createDs(uuid):
    archives = [whisper.parseRetentionDef(retentionDef) for retentionDef in WHISPER_ARCHIVES]
    dataFile = os.path.join(WHISPER_DATA,str(uuid) + ".wsp")
    try:
        os.makedirs(WHISPER_DATA)
    except OSError as exception:
        if exception.errno != errno.EEXIST:
            raise
    try:
        whisper.create(dataFile, archives, xFilesFactor=0.5, aggregationMethod="average")
    except whisper.WhisperException, exc:
        raise SystemExit('[ERROR] %s' % str(exc))
Example #50
0
    def test_info_bogus_file(self):
        self.assertIsNone(whisper.info('bogus-file'))

        # Validate "corrupt" whisper metadata
        whisper.create(self.filename, self.retention)
        with SimulatedCorruptWhisperFile():
            with AssertRaisesException(whisper.CorruptWhisperFile('Unable to read header', self.filename)):
                whisper.info(self.filename)

        # Validate "corrupt" whisper archive data
        with SimulatedCorruptWhisperFile(corrupt_archive=True):
            with AssertRaisesException(whisper.CorruptWhisperFile('Unable to read archive0 metadata', self.filename)):
                whisper.info(self.filename)
    def test_info_bogus_file(self):
        self.assertIsNone(whisper.info('bogus-file'))

        # Validate "corrupt" whisper metadata
        whisper.create(self.filename, self.retention)
        with SimulatedCorruptWhisperFile():
            with AssertRaisesException(whisper.CorruptWhisperFile('Unable to read header', self.filename)):
                whisper.info(self.filename)

        # Validate "corrupt" whisper archive data
        with SimulatedCorruptWhisperFile(corrupt_archive=True):
            with AssertRaisesException(whisper.CorruptWhisperFile('Unable to read archive0 metadata', self.filename)):
                whisper.info(self.filename)
Example #52
0
    def test_raw_data(self):
        url = '/render'
        whisper.create(self.db, [(1, 60)])

        response = self.app.get(url, query_string={'rawData': '1',
                                                   'target': 'test'})
        info, data = response.data.decode('utf-8').strip().split('|', 1)
        path, start, stop, step = info.split(',')
        datapoints = data.split(',')
        self.assertEqual(datapoints, ['None'] * 60)
        self.assertEqual(int(stop) - int(start), 60)
        self.assertEqual(path, 'test')
        self.assertEqual(int(step), 1)
Example #53
0
    def update(self, metric, dataline):
        data = dataline.split()
        metric = os.path.join(self.root, metric)
        metric += '.wsp'

        if not os.path.exists(metric):
            # create this database
            mkdirs(os.path.dirname(metric))
            whisper.create(metric, [(METRIC_SEC_PER_POINT, METRIC_POINTS_TO_STORE)])

        value = float(data[0])
        timestamp = float(data[1])

        whisper.update(metric, value, timestamp)
Example #54
0
    def test_merge_empty(self):
        """
        test merging from an empty database
        """
        testdb_a = "test-a-%s" % self.filename
        testdb_b = "test-b-%s" % self.filename

        # create two empty databases with same retention
        self.addCleanup(self._remove, testdb_a)
        whisper.create(testdb_a, self.retention)
        self.addCleanup(self._remove, testdb_b)
        whisper.create(testdb_b, self.retention)

        whisper.merge(testdb_a, testdb_b)
Example #55
0
    def create_whisper_hosts(self, ts=None):
        worker1 = self.hostcpu.replace('hostname', 'worker1')
        worker2 = self.hostcpu.replace('hostname', 'worker2')
        try:
            os.makedirs(worker1.replace('cpu.wsp', ''))
            os.makedirs(worker2.replace('cpu.wsp', ''))
        except OSError:
            pass

        whisper.create(worker1, [(1, 60)])
        whisper.create(worker2, [(1, 60)])

        ts = ts or int(time.time())
        whisper.update(worker1, 1, ts)
        whisper.update(worker2, 2, ts)
Example #56
0
    def test_diff(self):
        testdb = "test-%s" % self.filename

        now = time.time()

        whisper.create(testdb, self.retention)
        whisper.create(self.filename, self.retention)
        whisper.update(testdb, 1.0, now)
        whisper.update(self.filename, 2.0, now)

        results = whisper.diff(testdb, self.filename)
        self._remove(testdb)

        expected = [(0, [(int(now), 1.0, 2.0)], 1), (1, [], 0)]

        self.assertEqual(results, expected)
Example #57
0
    class WhisperDatabase(TimeSeriesDatabase):
        plugin_name = 'whisper'

        def __init__(self, settings):
            self.data_dir = settings.LOCAL_DATA_DIR
            self.sparse_create = settings.WHISPER_SPARSE_CREATE
            self.fallocate_create = settings.WHISPER_FALLOCATE_CREATE
            if settings.WHISPER_AUTOFLUSH:
                log.msg("Enabling Whisper autoflush")
                whisper.AUTOFLUSH = True

            if settings.WHISPER_FALLOCATE_CREATE:
                if whisper.CAN_FALLOCATE:
                    log.msg("Enabling Whisper fallocate support")
                else:
                    log.err(
                        "WHISPER_FALLOCATE_CREATE is enabled but linking failed."
                    )

            if settings.WHISPER_LOCK_WRITES:
                if whisper.CAN_LOCK:
                    log.msg("Enabling Whisper file locking")
                    whisper.LOCK = True
                else:
                    log.err(
                        "WHISPER_LOCK_WRITES is enabled but import of fcntl module failed."
                    )

            if settings.WHISPER_FADVISE_RANDOM:
                try:
                    if whisper.CAN_FADVISE:
                        log.msg("Enabling Whisper fadvise_random support")
                        whisper.FADVISE_RANDOM = True
                    else:
                        log.err(
                            "WHISPER_FADVISE_RANDOM is enabled but import of ftools module failed."
                        )
                except AttributeError:
                    log.err(
                        "WHISPER_FADVISE_RANDOM is enabled but skipped because it is not compatible with the version of Whisper."
                    )

        def write(self, metric, datapoints):
            path = self.getFilesystemPath(metric)
            whisper.update_many(path, datapoints)

        def exists(self, metric):
            return exists(self.getFilesystemPath(metric))

        def create(self, metric, retentions, xfilesfactor, aggregation_method):
            path = self.getFilesystemPath(metric)
            directory = dirname(path)
            try:
                if not exists(directory):
                    os.makedirs(directory)
            except OSError, e:
                log.err("%s" % e)

            whisper.create(path, retentions, xfilesfactor, aggregation_method,
                           self.sparse_create, self.fallocate_create)
Example #58
0
 def handle(self):
     points = 0
     for metric in self.redis.smembers(METRICS):
         values = self.redis.zrange(metric, 0, -1)
         points += len(values)
         f = target_to_path(self.path, metric)
         d = os.path.dirname(f)
         if d not in self.dirs:
             if not os.path.isdir(d):
                 os.makedirs(d)
             self.dirs.add(d)
         if not os.path.exists(f):
             whisper.create(f, [(10, 1000)])  # [FIXME] hardcoded values
         whisper.update_many(f, [struct.unpack('!ff', a) for a in values])
         if len(values):
             self.redis.zrem(metric, *values)
     self.metric(METRIC_POINTS, points)
Example #59
0
    def test_fetch_with_archive_to_select(self):
        """
        fetch info from database providing the archive to select
        """

        # SECOND MINUTE HOUR DAY
        retention = [(1, 60), (60, 60), (3600, 24), (86400, 365)]
        whisper.create(self.filename, retention)

        archives = ["1s", "1m", "1h", "1d"]

        for i in range(len(archives)):
            fetch = whisper.fetch(self.filename, 0, archiveToSelect=archives[i])
            self.assertEqual(fetch[0][2], retention[i][0])

            # check time range
            self.assertEqual(fetch[0][1] - fetch[0][0], retention[-1][0] * retention[-1][1])
        with AssertRaisesException(ValueError("Invalid granularity: 2")):
            fetch = whisper.fetch(self.filename, 0, archiveToSelect="2s")