def benchmark_create_update_fetch():
    path, archive_list, tear_down = set_up_create()
    # start timer
    start_time = time.clock()
    for i in range(100):
        whisper.create(path, archive_list)

        seconds_ago = 3500
        current_value = 0.5
        increment = 0.2
        now = time.time()
        # file_update closes the file so we have to reopen every time
        for i in range(seconds_ago):
            whisper.update(path, current_value, now - seconds_ago + i)
            current_value += increment

        from_time = now - seconds_ago
        until_time = from_time + 1000

        whisper.fetch(path, from_time, until_time)
        tear_down()

    # end timer
    end_time = time.clock()
    elapsed_time = end_time - start_time

    print "Executed 100 iterations in %ss (%i ns/op)" % (
        elapsed_time, (elapsed_time * 1000 * 1000 * 1000) / 100)
Esempio n. 2
0
def waterlevel(db_name):
    """Reduce alert frequency after initial alert, reset on all-clear"""

    (times, fail_buffer) = whisper.fetch(db_name, 315550800)

    if fail_buffer.count(1) > 2:
        new_whisper_db_name = db_name + '.wsp2'
        whisper.create(new_whisper_db_name, FOLLOWUP, aggregationMethod='last')
        whisper.update(new_whisper_db_name, 1)
        os.rename(new_whisper_db_name, db_name)

        for admin in sys.argv[2:]:
            os.system('mail -s "' + sys.argv[1] + '" ' + admin + '</dev/null')

    if fail_buffer.count(1) == 0:
        if whisper.info(
                db_name)['archives'][0]['secondsPerPoint'] == FOLLOWUP[0][0]:
            new_whisper_db_name = db_name + '.wsp2'
            whisper.create(new_whisper_db_name,
                           RETAINER,
                           aggregationMethod='last')
            whisper.update(new_whisper_db_name, 0)
            os.rename(new_whisper_db_name, db_name)

            for admin in sys.argv[2:]:
                os.system('mail -s "' + sys.argv[1] + '" ' + admin +
                          '</dev/null')

    return (0)
Esempio n. 3
0
 def populate_data(self):
     self.db = os.path.join(settings.WHISPER_DIR, 'test.wsp')
     whisper.create(self.db, [(1, 60)])
     ts = int(time.time())
     for i, value in enumerate(reversed(self._test_data)):
         whisper.update(self.db, value, ts - i)
     self.ts = ts
Esempio n. 4
0
def benchmark_create_update_fetch():
	path, archive_list, tear_down = set_up_create()
	# start timer
	start_time = time.clock()
	for i in range(100):
		whisper.create(path, archive_list)

		seconds_ago = 3500
		current_value = 0.5
		increment = 0.2
		now = time.time()
		# file_update closes the file so we have to reopen every time
		for i in range(seconds_ago):
			whisper.update(path, current_value, now - seconds_ago + i)
			current_value += increment

		from_time = now - seconds_ago
		until_time = from_time + 1000

		whisper.fetch(path, from_time, until_time)
		tear_down()

	# end timer
	end_time = time.clock()
	elapsed_time = end_time - start_time

	print "Executed 100 iterations in %ss (%i ns/op)" % (elapsed_time, (elapsed_time * 1000 * 1000 * 1000) / 100)
Esempio n. 5
0
 def _populate_data(self):
     self.db = os.path.join(settings.WHISPER_DIR, 'test.wsp')
     whisper.create(self.db, [(1, 60)])
     ts = int(time.time())
     for i, value in enumerate(reversed(self._test_data)):
         whisper.update(self.db, value, ts - i)
     self.ts = ts
Esempio n. 6
0
 def save(self, value, timestamp, lenient=False):
     logger.debug("Saving %s: %f" % (self.name, value))
     try:
         whisper.update(self.path, value, timestamp)
     except whisper.TimestampNotCovered as exc:
         # The timestamp we were given is either "in the future" (perhaps
         # because our own clock is delayed) or "before the time the
         # database remembers". If we're willing to fudge the timestamp,
         # check whether the difference is less than the configured
         # epsilon for clock drift. If it is, then try saving the value
         # again using a timestamp from one second earlier than reported.
         # If that's still not accepted, a new (unhandled) TimestampNot-
         # Covered exception will be raised for the caller to handle.
         statsd.incr('error.timestampnotcovered')
         if lenient:
             delta = timestamp - time.time() # in seconds
             statsd.timing('timestamp_drift', delta * 1000) # report in ms
             if abs(delta) < settings.drift_epsilon:
                 # Ensure lenient is set to False for the next attempt so
                 # that we don't end up in a loop
                 self.save(value, timestamp-1, lenient=False)
                 # Report only successful lenient saves
                 statsd.incr('lenient_save')
         else:
             raise
    def record_metering_data(self, data):

        record = copy.deepcopy(data)

        timestamp = record["timestamp"].replace(second=0, microsecond=0)
        timestamp = int((timestamp - datetime.datetime(1970, 1, 1)).total_seconds())
        value = float(record["counter_volume"])

        record_path = (
            env_variables["whisper_path"] + data["resource_id"] + "_" + data["counter_name"].replace(".", "_") + ".wsp"
        )
        # if not os.path.isdir(os.path.dirname(record_path)):
        #     os.makedirs(os.path.dirname(record_path))

        if not os.path.isfile(record_path):
            whisper.create(record_path, archieve_list)

        whisper.update(record_path, value, timestamp)

        # add resource & meter to sqlite db
        conn = sqlite3.connect(env_variables["sql_db_path"])
        c = conn.cursor()
        c.execute("select count(*) from resources where resource_id=?", (data["resource_id"],))
        r = c.fetchone()[0]
        if r == 0:
            c.execute(
                "insert into resources (resource_id, user_id, project_id, source_id, resource_metadata)"
                + "values (?,?,?,?,?)",
                (
                    data["resource_id"],
                    data["user_id"],
                    data["project_id"],
                    data["source"],
                    json.dumps(data["resource_metadata"]),
                ),
            )

        c.execute(
            "select count(*) from meters where name=? and resource_id=?", (data["counter_name"], data["resource_id"])
        )
        r = c.fetchone()[0]
        if r == 0:
            c.execute(
                "insert into meters (name, type, unit, resource_id, project_id, user_id, source)"
                + "values (?,?,?,?,?,?,?)",
                (
                    data["counter_name"],
                    data["counter_type"],
                    data["counter_unit"],
                    data["resource_id"],
                    data["project_id"],
                    data["user_id"],
                    data["source"],
                ),
            )

        conn.commit()
        conn.close()
Esempio n. 8
0
def store(file, data):
	print "store"
	import whisper
	try:
		whisper.create(file, [(1,60*60*24), (10, 60*60*24), (60, 60*60*24*30) ])
	except:
		pass
	print "update"
	whisper.update(file, data)
Esempio n. 9
0
    def _write_data(self, hostID, plugin, data_json):

        # For each plugin
        if plugin == None:
            return False

        data =  self.jsonToPython(data_json)
        data_timestamp = data.get('TimeStamp')

        # TODO dont write data if no infos

        # Get data_path
        data_path = self._redis_connexion.redis_hget("WSP_PATH", hostID)
        if data_path is None:
            return False

        wspPath = '%s/%s' % (data_path, plugin)
        for ds_name, value in data.get("Values", {}).iteritems():

            ds_path = '%s/%s.wsp' % (wspPath, ds_name)
            # Create wsp file - config wsp here
            if not os.path.isfile(ds_path):
                self._logger.warning("writewsp host : %s Create wsp file : %s"
                                        % (hostID, ds_path))
                # Create directory
                if not os.path.exists(wspPath):
                    try:
                        os.makedirs(wspPath)
                        self._logger.info("writewsp host : %s make directory : %s"
                                            % (hostID, wspPath))
                    except OSError:
                        self._logger.error("writewsp host : %s can't make directory : %s"
                                            % (hostID, wspPath))
                        continue
                try:
                    whisper.create(ds_path,
                                   [(60, 1440),   # --- Daily (1 Minute Average)
                                   (300, 2016),   # --- Weekly (5 Minute Average)
                                   (600, 4608),   # --- Monthly (10 Min Average)
                                   (3600, 8784)]) # --- Yearly (1 Hour Average)
                except Exception as e:
                    self._logger.error("writewsp host : %s Create wsp Error %s"
                        % (hostID, str(e)))
                    continue
            # Update whisper
            try:
                self._logger.debug("writewsp host : %s Update wsp "
                                   "Timestamp %s For value %s in file %s"
                                    % (hostID, data_timestamp, value, ds_path))
                whisper.update(ds_path, str(value), str(data_timestamp) )
            except Exception as e:
                self._logger.error("writewsp host : %s Update Error %s - %s"
                                    % (hostID, ds_path, e))
                continue
        self._dataNumber += 1
        return True
Esempio n. 10
0
 def test_01_add_point(self):
     """
     Add a point and check the created time range.
     """
     now = int(time.time())
     whisper.update(FILENAME, 1234, timestamp=now)
     (fromInterval, toInterval, step), points = whisper.fetch(FILENAME, 0, None)
     now = now - (now % 60)
     # The upper bound is (roughly) 'now'.
     self.assertEqual(toInterval, now + SECONDS_PER_POINT)
     # The lower bound is (roughly) now minus the covered time.
     self.assertEqual(fromInterval, now - (NUMBER_OF_POINTS - 1) * SECONDS_PER_POINT)
Esempio n. 11
0
    def _update(self, datapoints):
        """
        This method store in the datapoints in the current database.

            :datapoints: is a list of tupple with the epoch timestamp and value
                 [(1368977629,10)]
        """
        if len(datapoints) == 1:
            timestamp, value = datapoints[0]
            whisper.update(self.path, value, timestamp)
        else:
            whisper.update_many(self.path, datapoints)
Esempio n. 12
0
    def _update(self, datapoints):
        """
        This method store in the datapoints in the current database.

            :datapoints: is a list of tupple with the epoch timestamp and value
                 [(1368977629,10)]
        """
        if len(datapoints) == 1:
            timestamp, value = datapoints[0]
            whisper.update(self.path, value, timestamp)
        else:
            whisper.update_many(self.path, datapoints)
Esempio n. 13
0
def update(path, datapoints):
    nrOfPoints = len(datapoints),
    if nrOfPoints == 1:
        (timestamp, value) = datapoints[0]
        timestamp = timegm(timestamp.timetuple())
        whisper.update(path, value, timestamp)
    elif nrOfPoints > 1:
        whisper.update_many(path + '.wsp', [
            (timegm(t.timetuple()), v) for (t,v) in datapoints])
    else:
        raise Exception("No Datapoint given")

    return True
Esempio n. 14
0
    def update(self, metric, dataline):
        data = dataline.split()
        metric = os.path.join(self.root, metric)
        metric += '.wsp'

        if not os.path.exists(metric):
            # create this database
            mkdirs(os.path.dirname(metric))
            whisper.create(metric, [(METRIC_SEC_PER_POINT, METRIC_POINTS_TO_STORE)])

        value = float(data[0])
        timestamp = float(data[1])

        whisper.update(metric, value, timestamp)
Esempio n. 15
0
    def create_whisper_hosts(self, ts=None):
        worker1 = self.hostcpu.replace('hostname', 'worker1')
        worker2 = self.hostcpu.replace('hostname', 'worker2')
        try:
            os.makedirs(worker1.replace('cpu.wsp', ''))
            os.makedirs(worker2.replace('cpu.wsp', ''))
        except OSError:
            pass

        whisper.create(worker1, [(1, 60)])
        whisper.create(worker2, [(1, 60)])

        ts = ts or int(time.time())
        whisper.update(worker1, 1, ts)
        whisper.update(worker2, 2, ts)
Esempio n. 16
0
    def create_whisper_hosts(self, ts=None):
        worker1 = self.hostcpu.replace('hostname', 'worker1')
        worker2 = self.hostcpu.replace('hostname', 'worker2')
        try:
            os.makedirs(worker1.replace('cpu.wsp', ''))
            os.makedirs(worker2.replace('cpu.wsp', ''))
        except OSError:
            pass

        whisper.create(worker1, [(1, 60)])
        whisper.create(worker2, [(1, 60)])

        ts = ts or int(time.time())
        whisper.update(worker1, 1, ts)
        whisper.update(worker2, 2, ts)
Esempio n. 17
0
    def test_diff(self):
        testdb = "test-%s" % self.filename

        now = time.time()

        whisper.create(testdb, self.retention)
        whisper.create(self.filename, self.retention)
        whisper.update(testdb, 1.0, now)
        whisper.update(self.filename, 2.0, now)

        results = whisper.diff(testdb, self.filename)
        self._remove(testdb)

        expected = [(0, [(int(now), 1.0, 2.0)], 1), (1, [], 0)]

        self.assertEqual(results, expected)
Esempio n. 18
0
    def test_diff(self):
        testdb = "test-%s" % self.filename

        now = time.time()

        whisper.create(testdb, self.retention)
        whisper.create(self.filename, self.retention)
        whisper.update(testdb, 1.0, now)
        whisper.update(self.filename, 2.0, now)

        results = whisper.diff(testdb, self.filename)
        self._remove(testdb)

        expected = [(0, [(int(now), 1.0, 2.0)], 1), (1, [], 0)]

        self.assertEqual(results, expected)
Esempio n. 19
0
    def test_file_diff(self):
        testdb = "test-%s" % self.filename

        now = time.time()

        whisper.create(testdb, self.retention)
        whisper.create(self.filename, self.retention)
        whisper.update(testdb, 1.0, now)
        whisper.update(self.filename, 2.0, now)

        # Merging 2 archives with different retentions should fail
        with open(testdb, 'rb') as fh_1:
            with open(self.filename, 'rb+') as fh_2:
                results = whisper.file_diff(fh_1, fh_2)
        self._remove(testdb)

        expected = [(0, [(int(now), 1.0, 2.0)], 1), (1, [], 0)]

        self.assertEqual(results, expected)
Esempio n. 20
0
    def test_file_diff(self):
        testdb = "test-%s" % self.filename

        now = time.time()

        whisper.create(testdb, self.retention)
        whisper.create(self.filename, self.retention)
        whisper.update(testdb, 1.0, now)
        whisper.update(self.filename, 2.0, now)

        # Merging 2 archives with different retentions should fail
        with open(testdb, 'rb') as fh_1:
            with open(self.filename, 'rb+') as fh_2:
                results = whisper.file_diff(fh_1, fh_2)
        self._remove(testdb)

        expected = [(0, [(int(now), 1.0, 2.0)], 1), (1, [], 0)]

        self.assertEqual(results, expected)
Esempio n. 21
0
    def _update(self, wsp=None, schema=None, sparse=False, useFallocate=False):
        wsp = wsp or self.filename
        schema = schema or [(1, 20)]

        num_data_points = 20

        # create sample data
        whisper.create(wsp, schema, sparse=sparse, useFallocate=useFallocate)
        tn = time.time() - num_data_points
        data = []
        for i in range(num_data_points):
            data.append((tn + 1 + i, random.random() * 10))

        # test single update
        whisper.update(wsp, data[0][1], data[0][0])

        # test multi update
        whisper.update_many(wsp, data[1:])
        return data
Esempio n. 22
0
    def _update(self, wsp=None, schema=None):
        wsp = wsp or self.db
        schema = schema or [(1, 20)]
        num_data_points = 20

        whisper.create(wsp, schema)

        # create sample data
        tn = time.time() - num_data_points
        data = []
        for i in range(num_data_points):
            data.append((tn + 1 + i, random.random() * 10))

        # test single update
        whisper.update(wsp, data[0][1], data[0][0])

        # test multi update
        whisper.update_many(wsp, data[1:])
        return data
Esempio n. 23
0
  def create_whisper_hosts(self):
    worker1 = self.hostcpu.replace('hostname', 'worker1')
    worker2 = self.hostcpu.replace('hostname', 'worker2')
    bogus_file = os.path.join(settings.WHISPER_DIR, 'a/b/c/bogus_file.txt')

    try:
      os.makedirs(worker1.replace('cpu.wsp', ''))
      os.makedirs(worker2.replace('cpu.wsp', ''))
      os.makedirs(bogus_file.replace('bogus_file.txt', ''))
    except OSError:
      pass

    open(bogus_file, 'a').close()
    whisper.create(worker1, [(1, 60)])
    whisper.create(worker2, [(1, 60)])

    ts = int(time.time())
    whisper.update(worker1, 1, ts)
    whisper.update(worker2, 2, ts)
Esempio n. 24
0
    def create_whisper_hosts(self):
        self.start_ts = int(time.time())
        try:
            os.makedirs(self.worker1.replace('cpu.wsp', ''))
            os.makedirs(self.worker2.replace('cpu.wsp', ''))
            os.makedirs(self.worker3.replace('cpu.wsp', ''))
            os.makedirs(self.worker4.replace('cpu.wsp.gz', ''))
        except OSError:
            pass

        whisper.create(self.worker1, [(1, 60)])
        whisper.create(self.worker2, [(1, 60)])
        open(self.worker3, 'a').close()

        whisper.update(self.worker1, 1, self.start_ts)
        whisper.update(self.worker2, 2, self.start_ts)

        with open(self.worker1, 'rb') as f_in, gzip.open(self.worker4, 'wb') as f_out:
            shutil.copyfileobj(f_in, f_out)
Esempio n. 25
0
def waterlevel(db_name):
    """Reduce alert frequency after initial alert, reset on all-clear"""
    (times, fail_buffer) = whisper.fetch(db_name, 315550800)

    if fail_buffer.count(1) > 2:
        # Roll DB-over to 'FOLLOWUP_RETAINER'
        new_whisper_db_name = db_name + '.wsp2'
        whisper.create(new_whisper_db_name,
                       FOLLOWUP_RETAINER,
                       aggregationMethod='last')
        whisper.update(new_whisper_db_name, 1)
        os.rename(new_whisper_db_name, db_name)
        return (1)

    if fail_buffer.count(1) == 0:
        new_whisper_db_name = db_name + '.wsp2'
        whisper.create(new_whisper_db_name, RETAINER, aggregationMethod='last')
        whisper.update(new_whisper_db_name, 0)
        return (0)
Esempio n. 26
0
    def _update(self, wsp=None, schema=None):
        wsp = wsp or self.filename
        schema = schema or [(1, 20)]

        num_data_points = 20

        # create sample data
        whisper.create(wsp, schema)
        tn = time.time() - num_data_points
        data = []
        for i in range(num_data_points):
            data.append((tn + 1 + i, random.random() * 10))

        # test single update
        whisper.update(wsp, data[0][1], data[0][0])

        # test multi update
        whisper.update_many(wsp, data[1:])
        return data
Esempio n. 27
0
    def create_whisper_hosts(self):
        self.start_ts = int(time.time())
        try:
            os.makedirs(self.worker1.replace('cpu.wsp', ''))
            os.makedirs(self.worker2.replace('cpu.wsp', ''))
            os.makedirs(self.worker3.replace('cpu.wsp', ''))
            os.makedirs(self.worker4.replace('cpu.wsp.gz', ''))
        except OSError:
            pass

        whisper.create(self.worker1, [(1, 60)])
        whisper.create(self.worker2, [(1, 60)])
        open(self.worker3, 'a').close()

        whisper.update(self.worker1, 1, self.start_ts)
        whisper.update(self.worker2, 2, self.start_ts)

        with open(self.worker1, 'rb') as f_in, gzip.open(self.worker4, 'wb') as f_out:
            shutil.copyfileobj(f_in, f_out)
Esempio n. 28
0
    def create_whisper_hosts(self):
        worker1 = self.hostcpu.replace('hostname', 'worker1')
        worker2 = self.hostcpu.replace('hostname', 'worker2')
        bogus_file = os.path.join(settings.WHISPER_DIR, 'a/b/c/bogus_file.txt')

        try:
            os.makedirs(worker1.replace('cpu.wsp', ''))
            os.makedirs(worker2.replace('cpu.wsp', ''))
            os.makedirs(bogus_file.replace('bogus_file.txt', ''))
        except OSError:
            pass

        open(bogus_file, 'a').close()
        whisper.create(worker1, [(1, 60)])
        whisper.create(worker2, [(1, 60)])

        ts = int(time.time())
        whisper.update(worker1, 1, ts)
        whisper.update(worker2, 2, ts)
Esempio n. 29
0
    def test_render_view(self):
        url = reverse('graphite.render.views.renderView')

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        self.assertEqual(json.loads(response.content), [])
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {'target': 'test'})
        self.assertEqual(response['Content-Type'], 'image/png')
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        self.addCleanup(self.wipe_whisper)
        whisper.create(self.db, [(1, 60)])

        ts = int(time.time())
        whisper.update(self.db, 0.5, ts - 2)
        whisper.update(self.db, 0.4, ts - 1)
        whisper.update(self.db, 0.6, ts)

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        data = json.loads(response.content)
        end = data[0]['datapoints'][-4:]
        self.assertEqual(
            end, [[None, ts - 3], [0.5, ts - 2], [0.4, ts - 1], [0.6, ts]])
Esempio n. 30
0
    def test_update_single_archive(self):
        """
        Update with a single leveled archive
        """
        retention_schema = [(1, 20)]
        data = self._update(schema=retention_schema)
        # fetch the data
        fetch = whisper.fetch(self.filename, 0)   # all data
        fetch_data = fetch[1]

        for i, (timestamp, value) in enumerate(data):
            # is value in the fetched data?
            self.assertEqual(value, fetch_data[i])

        # check TimestampNotCovered
        with AssertRaisesException(
                whisper.TimestampNotCovered(
                    'Timestamp not covered by any archives in this database.')):
            # in the futur
            whisper.update(self.filename, 1.337, time.time() + 1)

        with AssertRaisesException(
                whisper.TimestampNotCovered(
                    'Timestamp not covered by any archives in this database.')):
            # before the past
            whisper.update(self.filename, 1.337, time.time() - retention_schema[0][1] - 1)

        # When no timestamp is passed in, it should use the current time
        original_lock = whisper.LOCK
        whisper.LOCK = True
        whisper.update(self.filename, 3.7337, None)
        fetched = whisper.fetch(self.filename, 0)[1]
        self.assertEqual(fetched[-1], 3.7337)

        whisper.LOCK = original_lock
Esempio n. 31
0
    def test_render_view(self):
        url = reverse('graphite.render.views.renderView')

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        self.assertEqual(json.loads(response.content), [])
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {'target': 'test'})
        self.assertEqual(response['Content-Type'], 'image/png')
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        self.addCleanup(self.wipe_whisper)
        whisper.create(self.db, [(1, 60)])

        ts = int(time.time())
        whisper.update(self.db, 0.5, ts - 2)
        whisper.update(self.db, 0.4, ts - 1)
        whisper.update(self.db, 0.6, ts)

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        data = json.loads(response.content)
        end = data[0]['datapoints'][-4:]
        self.assertEqual(
            end, [[None, ts - 3], [0.5, ts - 2], [0.4, ts - 1], [0.6, ts]])
Esempio n. 32
0
    def test_update_single_archive(self):
        """
        Update with a single leveled archive
        """
        retention_schema = [(1, 20)]
        data = self._update(schema=retention_schema)
        # fetch the data
        fetch = whisper.fetch(self.filename, 0)   # all data
        fetch_data = fetch[1]

        for i, (timestamp, value) in enumerate(data):
            # is value in the fetched data?
            self.assertEqual(value, fetch_data[i])

        # check TimestampNotCovered
        with AssertRaisesException(
                whisper.TimestampNotCovered(
                    'Timestamp not covered by any archives in this database.')):
            # in the futur
            whisper.update(self.filename, 1.337, time.time() + 1)

        with AssertRaisesException(
                whisper.TimestampNotCovered(
                    'Timestamp not covered by any archives in this database.')):
            # before the past
            whisper.update(self.filename, 1.337, time.time() - retention_schema[0][1] - 1)

        # When no timestamp is passed in, it should use the current time
        original_lock = whisper.LOCK
        whisper.LOCK = True
        whisper.update(self.filename, 3.7337, None)
        fetched = whisper.fetch(self.filename, 0)[1]
        self.assertEqual(fetched[-1], 3.7337)

        whisper.LOCK = original_lock
Esempio n. 33
0
    def _update(self, wsp=None, schema=None, sparse=False, useFallocate=False):
        wsp = wsp or self.filename
        schema = schema or [(1, 20)]

        num_data_points = 20

        # create sample data
        self.addCleanup(self._remove, wsp)
        whisper.create(wsp, schema, sparse=sparse, useFallocate=useFallocate)
        tn = int(time.time()) - num_data_points

        data = []
        for i in range(num_data_points):
            data.append((tn + 1 + i, random.random() * 10))

        # test single update
        whisper.update(wsp, data[0][1], data[0][0])

        # test multi update
        whisper.update_many(wsp, data[1:])

        return data
Esempio n. 34
0
    def test_update_single_archive(self):
        """Update with a single leveled archive"""
        retention_schema = [(1, 20)]
        data = self._update(schema=retention_schema)
        # fetch the data
        fetch = whisper.fetch(self.db, 0)   # all data
        fetch_data = fetch[1]

        for i, (timestamp, value) in enumerate(data):
            # is value in the fetched data?
            self.assertEqual(value, fetch_data[i])

        # check TimestampNotCovered
        with self.assertRaises(whisper.TimestampNotCovered):
            # in the future
            whisper.update(self.db, 1.337, time.time() + 1)
        with self.assertRaises(whisper.TimestampNotCovered):
            # before the past
            whisper.update(self.db, 1.337,
                           time.time() - retention_schema[0][1] - 1)

        self._removedb()
Esempio n. 35
0
    def test_update_single_archive(self):
        """Update with a single leveled archive"""
        retention_schema = [(1, 20)]
        data = self._update(schema=retention_schema)
        # fetch the data
        fetch = whisper.fetch(self.db, 0)  # all data
        fetch_data = fetch[1]

        for i, (timestamp, value) in enumerate(data):
            # is value in the fetched data?
            self.assertEqual(value, fetch_data[i])

        # check TimestampNotCovered
        with self.assertRaises(whisper.TimestampNotCovered):
            # in the future
            whisper.update(self.db, 1.337, time.time() + 1)
        with self.assertRaises(whisper.TimestampNotCovered):
            # before the past
            whisper.update(self.db, 1.337,
                           time.time() - retention_schema[0][1] - 1)

        self._removedb()
Esempio n. 36
0
def print_ipv6_event(cpu, data, size):
    event = ct.cast(data, ct.POINTER(Data_ipv6)).contents
    global start_ts
    # if args.time:
    #     if args.csv:
    #         print("%s," % strftime("%H:%M:%S"), end="")
    #     else:
    #         print("%-8s " % strftime("%H:%M:%S"), end="")
    # if args.timestamp:
    #     if start_ts == 0:
    #         start_ts = event.ts_us
    #     delta_s = (float(event.ts_us) - start_ts) / 1000000
    #     if args.csv:
    #         print("%.6f," % delta_s, end="")
    #     else:
    #         print("%-9.6f " % delta_s, end="")
    # print(format_string % (event.pid, event.task.decode(),
    # "6" if args.wide or args.csv else "",
    # "",
    # inet_ntop(AF_INET6, event.saddr), event.ports >> 32,
    # inet_ntop(AF_INET6, event.daddr), event.ports & 0xffffffff,
    # event.tx_b / 1024, event.rx_b / 1024, float(event.span_us) / 1000))
    whisper.update("DB/Latency", float(event.span_us) / 1000)
Esempio n. 37
0
def pinger(hostname):
    """Get host ssh-connectivity, record in whisperDB"""

    whisper_db_name = whisper_db_dir + hostname + '.wsp'

    if not os.path.exists(whisper_db_name):
        whisper.create(whisper_db_name, RETAINER, aggregationMethod='last')

    client = paramiko.SSHClient()
    client.load_system_host_keys()

    client.set_missing_host_key_policy(paramiko.AutoAddPolicy())

    try:
        client.connect(hostname, timeout=10)
        whisper.update(whisper_db_name, 0)

        waterlevel(whisper_db_name)

    except Exception as e:
        if str(e) == "timed out":
            whisper.update(whisper_db_name, 1)

            sys.exit(waterlevel(whisper_db_name))
Esempio n. 38
0
    def test_diff_with_empty(self):
        testdb = "test-%s" % self.filename

        now = time.time()

        self.addCleanup(self._remove, testdb)
        whisper.create(testdb, self.retention)

        whisper.create(self.filename, self.retention)

        whisper.update(testdb, 1.0, now)
        whisper.update(self.filename, 2.0, now)

        # Purposefully insert nulls to strip out
        previous = now - self.retention[0][0]
        whisper.update(testdb, float('NaN'), previous)

        results = whisper.diff(testdb, self.filename, ignore_empty=True)
        self.assertEqual(
            results,
            [(0, [(int(now), 1.0, 2.0)], 1), (1, [], 0)],
        )

        results_empties = whisper.diff(testdb,
                                       self.filename,
                                       ignore_empty=False)
        expected = [(0, [(int(previous), float('NaN'), None),
                         (int(now), 1.0, 2.0)], 2), (1, [], 0)]

        # Stupidly, float('NaN') != float('NaN'), so assert that the
        # repr() results are the same :/
        #
        # See this thread:
        #    https://mail.python.org/pipermail/python-ideas/2010-March/006945.html
        self.assertEqual(
            repr(results_empties),
            repr(expected),
        )
        # Since the above test is somewhat of a sham, ensure that there
        # is a nan where there should be.
        self.assertTrue(math.isnan(results_empties[0][1][0][1]))
Esempio n. 39
0
    def test_diff_with_empty(self):
        testdb = "test-%s" % self.filename

        now = time.time()

        self.addCleanup(self._remove, testdb)
        whisper.create(testdb, self.retention)

        whisper.create(self.filename, self.retention)

        whisper.update(testdb, 1.0, now)
        whisper.update(self.filename, 2.0, now)

        # Purposefully insert nulls to strip out
        previous = now - self.retention[0][0]
        whisper.update(testdb, float('NaN'), previous)

        results = whisper.diff(testdb, self.filename, ignore_empty=True)
        self.assertEqual(
            results,
            [(0, [(int(now), 1.0, 2.0)], 1), (1, [], 0)],
        )

        results_empties = whisper.diff(testdb, self.filename, ignore_empty=False)
        expected = [(0, [(int(previous), float('NaN'), None), (int(now), 1.0, 2.0)], 2), (1, [], 0)]

        # Stupidly, float('NaN') != float('NaN'), so assert that the
        # repr() results are the same :/
        #
        # See this thread:
        #    https://mail.python.org/pipermail/python-ideas/2010-March/006945.html
        self.assertEqual(
            repr(results_empties),
            repr(expected),
        )
        # Since the above test is somewhat of a sham, ensure that there
        # is a nan where there should be.
        self.assertTrue(
            math.isnan(results_empties[0][1][0][1])
        )
Esempio n. 40
0
    def test_render_view(self):
        url = '/render'

        response = self.app.get(url, query_string={'target': 'test',
                                                   'format': 'json'})
        self.assertEqual(json.loads(response.data.decode('utf-8')), [])

        response = self.app.get(url, query_string={'target': 'test'})
        self.assertEqual(response.headers['Content-Type'], 'image/png')

        whisper.create(self.db, [(1, 60)])

        ts = int(time.time())
        whisper.update(self.db, 0.5, ts - 2)
        whisper.update(self.db, 0.4, ts - 1)
        whisper.update(self.db, 0.6, ts)

        response = self.app.get(url, query_string={'target': 'test',
                                                   'format': 'json'})
        data = json.loads(response.data.decode('utf-8'))
        end = data[0]['datapoints'][-4:]
        self.assertEqual(
            end, [[None, ts - 3], [0.5, ts - 2], [0.4, ts - 1], [0.6, ts]])

        response = self.app.get(url, query_string={'target': 'test',
                                                   'maxDataPoints': 2,
                                                   'format': 'json'})
        data = json.loads(response.data.decode('utf-8'))
        # 1 is a time race cond
        self.assertTrue(len(data[0]['datapoints']) in [1, 2])

        response = self.app.get(url, query_string={'target': 'test',
                                                   'maxDataPoints': 200,
                                                   'format': 'json'})
        data = json.loads(response.data.decode('utf-8'))
        self.assertEqual(len(data[0]['datapoints']), 60)
Esempio n. 41
0
            new_whisper_db_name = db_name + '.wsp2'
            whisper.create(new_whisper_db_name,
                           RETAINER,
                           aggregationMethod='last')
            whisper.update(new_whisper_db_name, 0)
            os.rename(new_whisper_db_name, db_name)

            for admin in sys.argv[2:]:
                os.system('mail -s "' + sys.argv[1] + '" ' + admin +
                          '</dev/null')

    return (0)


whisper_db_dir = '/var/tmp/whisperDB/'
whisper_db_name = str(whisper_db_dir + base64.b64encode(sys.argv[1]) + '.wsp')

if not os.path.exists(whisper_db_dir):
    os.path.mkdir(whisper_db_dir)

if not os.path.exists(whisper_db_name):
    whisper.create(whisper_db_name, RETAINER, aggregationMethod='last')

if os.path.basename(sys.argv[0]) == "tinear.ok":
    whisper.update(whisper_db_name, 0)

if os.path.basename(sys.argv[0]) == "tinear.nok":
    whisper.update(whisper_db_name, 1)

waterlevel(whisper_db_name)
Esempio n. 42
0
 def update(self, filename, time, value):
     if value is None:
         return
     whisper.update(filename, value, time)
Esempio n. 43
0
    def flush_data(self, schema):
        self.log.debug('Flushing data for schema {}'.format(schema))

        offset_to_return = None

        while True:
            #
            # Start with the oldest minute_boundary value, since it's possible
            # that we're catching up
            #
            sorted_boundaries = sorted(self.events[schema].keys())

            if len(sorted_boundaries) == 0:
                self.log.info('No data to process')
                return offset_to_return

            oldest_boundary = sorted_boundaries[0]
            newest_boundary = sorted_boundaries[-1]

            #
            # We don't want to flush the data that we've accumulated if there's a
            # chance that we might still get data in one of the relevant windows.
            #
            # We're (relatively) naively assuming that UPDATE_INTERVAL is enough
            # time to wait for any lagged messages, so we want to be sure that
            # it's been at least UPDATE_INTERVAL + WINDOW_SPAN since the oldest
            # boundary.
            #
            if (oldest_boundary + WINDOW_SPAN + UPDATE_INTERVAL) > newest_boundary or \
                    (oldest_boundary + WINDOW_SPAN + UPDATE_INTERVAL) > time.time():
                self.log.debug(
                    'All windows with sufficient data have been processed')
                self.log.debug(
                    'Returning last offset {}'.format(offset_to_return))
                return offset_to_return

            #
            # If we get here, we know that we have at least one window that can be
            # processed.
            #
            # Start by creating a list of the timestamps that are within the current
            # window.
            #
            boundaries_to_consider = [
                boundary for boundary in sorted_boundaries
                if boundary < (oldest_boundary + WINDOW_SPAN)
            ]
            self.log.info('[{}] Processing events in boundaries [{}]'.format(
                schema, boundaries_to_consider))

            # Make a dict of the metrics that have samples within this window, and
            # put all of the collected samples into a list.  Don't assume that every
            # metric present in the window is in the first boundary.
            metrics_with_samples = {}
            for boundary in boundaries_to_consider:
                for metric in self.events[schema][boundary]:
                    if metric not in metrics_with_samples:
                        metrics_with_samples[metric] = []
                    metrics_with_samples[metric].extend(
                        self.events[schema][boundary][metric])

            # Get the median for each metric and write:
            for metric, values in metrics_with_samples.items():
                median_value = self.median(values)
                if self.args.dry_run:
                    self.log.info('[{}] [{}] {}'.format(
                        metric, oldest_boundary + WINDOW_SPAN, median_value))
                    # If this is a dry run, we don't want to actually commit, but
                    self.log.debug(
                        '[{}] Dry run, so not actually committing to offset {}'
                        .format(schema, self.offsets[schema][oldest_boundary]))
                    offset_to_return = None
                else:
                    whisper.update(self.get_whisper_file(metric),
                                   median_value,
                                   timestamp=oldest_boundary + WINDOW_SPAN)

            #
            # Return the highest offset from the oldest boundary, and then
            # delete the oldest boundary from the events and offsets dicts
            #
            offset_to_return = None if self.args.dry_run else self.offsets[
                schema][oldest_boundary]
            del self.events[schema][oldest_boundary]
            del self.offsets[schema][oldest_boundary]

            # Set the oldest_boundary value to the next oldest boundary
            self.oldest_boundary[schema] = sorted_boundaries[1]

        return offset_to_return
(options, args) = option_parser.parse_args()

if len(args) != 2:
    option_parser.print_usage()
    sys.exit(1)

path1 = args[0]
path2 = args[1]
bakfile = path2 + '.bak'
shutil.copy2(path2, bakfile)

print "created backup file %s" % (bakfile)

from_time = int(options._from)
until_time = int(options.until)

(timeInfo, values) = whisper.fetch(path1, from_time, until_time)
(start, end, step) = timeInfo

t = start
for value in values:
    timestr = str(t)
    if value is None:
        next
    else:
        valuestr = "%f" % value
        datapoints = [timestr, valuestr]
        whisper.update(path2, valuestr, timestr)
    t += step
Esempio n. 45
0
    def test_render_view(self):
        url = reverse('graphite.render.views.renderView')

        response = self.client.get(url, {'target': 'test', 'format': 'raw'})
        self.assertEqual(response.content, "")
        self.assertEqual(response['Content-Type'], 'text/plain')

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        self.assertEqual(json.loads(response.content), [])
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {'target': 'test'})
        self.assertEqual(response['Content-Type'], 'image/png')
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {
            'target': 'test',
            'format': 'dygraph'
        })
        self.assertEqual(json.loads(response.content), {})
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {
            'target': 'test',
            'format': 'rickshaw'
        })
        self.assertEqual(json.loads(response.content), [])
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        self.addCleanup(self.wipe_whisper)
        whisper.create(self.db, [(1, 60)])

        ts = int(time.time())
        whisper.update(self.db, 0.1234567890123456789012, ts - 5)
        whisper.update(self.db, 0.4, ts - 4)
        whisper.update(self.db, 0.6, ts - 3)
        whisper.update(self.db, float('inf'), ts - 2)
        whisper.update(self.db, float('-inf'), ts - 1)
        whisper.update(self.db, float('nan'), ts)

        response = self.client.get(url, {'target': 'test', 'format': 'raw'})
        raw_data = ("None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,None,None,None,None,None,None,None,None,"
                    "0.12345678901234568,0.4,0.6,inf,-inf,nan")
        raw_response = "test,%d,%d,1|%s\n" % (ts - 59, ts + 1, raw_data)
        self.assertEqual(response.content, raw_response)

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        self.assertIn('[1e9999, ' + str(ts - 2) + ']', response.content)
        self.assertIn('[-1e9999, ' + str(ts - 1) + ']', response.content)
        data = json.loads(response.content)
        end = data[0]['datapoints'][-7:]
        self.assertEqual(end,
                         [[None, ts - 6], [0.12345678901234568, ts - 5],
                          [0.4, ts - 4], [0.6, ts - 3], [float('inf'), ts - 2],
                          [float('-inf'), ts - 1], [None, ts]])

        response = self.client.get(url, {
            'target': 'test',
            'format': 'dygraph'
        })
        self.assertIn('[' + str((ts - 2) * 1000) + ', Infinity]',
                      response.content)
        self.assertIn('[' + str((ts - 1) * 1000) + ', -Infinity]',
                      response.content)
        data = json.loads(response.content)
        end = data['data'][-7:]
        self.assertEqual(end, [[
            (ts - 6) * 1000, None
        ], [(ts - 5) * 1000, 0.123456789012], [
            (ts - 4) * 1000, 0.4
        ], [(ts - 3) * 1000, 0.6], [
            (ts - 2) * 1000, float('inf')
        ], [(ts - 1) * 1000, float('-inf')], [ts * 1000, None]])

        response = self.client.get(url, {
            'target': 'test',
            'format': 'rickshaw'
        })
        data = json.loads(response.content)
        end = data[0]['datapoints'][-7:-1]
        self.assertEqual(end, [{
            'x': ts - 6,
            'y': None
        }, {
            'x': ts - 5,
            'y': 0.12345678901234568
        }, {
            'x': ts - 4,
            'y': 0.4
        }, {
            'x': ts - 3,
            'y': 0.6
        }, {
            'x': ts - 2,
            'y': float('inf')
        }, {
            'x': ts - 1,
            'y': float('-inf')
        }])

        last = data[0]['datapoints'][-1]
        self.assertEqual(last['x'], ts)
        self.assertTrue(math.isnan(last['y']))
skip_count = 0
stime = time.time()

for doc in db.meter.find().limit(max_doc_count).skip(skip_count):

    rate = (i * 100) / count

    record_path = "New_Migrated_data_from_MongoDB/" + doc['resource_id'] + "_" + doc['counter_name'].replace('.',
                                                                                                             '_') + ".wsp"
    if not os.path.isdir(os.path.dirname(record_path)):
        os.makedirs(os.path.dirname(record_path))

    if not os.path.isfile(record_path):
        whisper.create(record_path, archive_list)

    whisper.update(record_path, doc['counter_volume'], int(doc['timestamp'].strftime("%s")))

    if not doc['resource_id'] in resource_list:
        resource_list.append(doc['resource_id'])
        rs = c.execute(
            "insert into resources(resource_id, user_id, project_id, source_id, resource_metadata) values(?,?,?,?,?)",
            (
            doc['resource_id'], doc['user_id'], doc['project_id'], doc['source'], json.dumps(doc['resource_metadata'])))

    if not doc['counter_name'] in meter_list:
        meter_list.append(doc['counter_name'])
        rs = c.execute(
            "insert into meters (name, type, unit) values (?,?,?)",
            (doc['counter_name'], doc['counter_type'], doc['counter_unit']))

    sys.stdout.write("\r%f%% metric value has been migrated." % rate)
Esempio n. 47
0
def insertReading(uuid, value, timestamp=None):
    dataFile = os.path.join(WHISPER_DATA,str(uuid) + ".wsp")
    try:
        whisper.update(dataFile, value, timestamp)
    except whisper.WhisperException, exc:
        raise SystemExit('[ERROR] %s' % str(exc))
Esempio n. 48
0
 def _addvalue(self, worker, stat, time, value, aggmethod='average'):
     path = os.path.join(self.path, "%s.%s.db" % (worker, stat))
     if not os.path.exists(path):
         whisper.create(path, RETENTION, aggregationMethod=aggmethod)
     whisper.update(path, value, time)
# Ignore SIGPIPE
signal.signal(signal.SIGPIPE, signal.SIG_DFL)

now = int(time.time())

option_parser = optparse.OptionParser(
    usage='''%prog [options] path timestamp:value [timestamp:value]*''')

(options, args) = option_parser.parse_args()

if len(args) < 2:
    option_parser.print_help()
    sys.exit(1)

path = args[0]
datapoint_strings = args[1:]
datapoint_strings = [
    point.replace('N:', '%d:' % now) for point in datapoint_strings
]
datapoints = [tuple(point.split(':')) for point in datapoint_strings]

try:
    if len(datapoints) == 1:
        timestamp, value = datapoints[0]
        whisper.update(path, value, timestamp)
    else:
        whisper.update_many(path, datapoints)
except whisper.WhisperException as exc:
    raise SystemExit('[ERROR] %s' % str(exc))
Esempio n. 50
0
    def _write_data(self, hostID, plugin, data_json):

        # For each plugin
        if plugin == None:
            return False

        data = self.jsonToPython(data_json)
        data_timestamp = data.get('TimeStamp')

        # TODO dont write data if no infos

        # Get data_path
        data_path = self._redis_connexion.redis_hget("WSP_PATH", hostID)
        if data_path is None:
            return False

        wspPath = '%s/%s' % (data_path, plugin)
        for ds_name, value in data.get("Values", {}).iteritems():

            ds_path = '%s/%s.wsp' % (wspPath, ds_name)
            # Create wsp file - config wsp here
            if not os.path.isfile(ds_path):
                self._logger.warning(
                    "writewsp host : %s Create wsp file : %s" %
                    (hostID, ds_path))
                # Create directory
                if not os.path.exists(wspPath):
                    try:
                        os.makedirs(wspPath)
                        self._logger.info(
                            "writewsp host : %s make directory : %s" %
                            (hostID, wspPath))
                    except OSError:
                        self._logger.error(
                            "writewsp host : %s can't make directory : %s" %
                            (hostID, wspPath))
                        continue
                try:
                    whisper.create(
                        ds_path,
                        [
                            (60, 1440),  # --- Daily (1 Minute Average)
                            (300, 2016),  # --- Weekly (5 Minute Average)
                            (600, 4608),  # --- Monthly (10 Min Average)
                            (3600, 8784)
                        ])  # --- Yearly (1 Hour Average)
                except Exception as e:
                    self._logger.error(
                        "writewsp host : %s Create wsp Error %s" %
                        (hostID, str(e)))
                    continue
            # Update whisper
            try:
                self._logger.debug("writewsp host : %s Update wsp "
                                   "Timestamp %s For value %s in file %s" %
                                   (hostID, data_timestamp, value, ds_path))
                whisper.update(ds_path, str(value), str(data_timestamp))
            except Exception as e:
                self._logger.error("writewsp host : %s Update Error %s - %s" %
                                   (hostID, ds_path, e))
                continue
        self._dataNumber += 1
        return True
Esempio n. 51
0
path = args[0]
from_time = int(options._from)
until_time = int(options.until)

try:
    data = whisper.fetch(path, from_time, until_time)
    if not data:
        raise SystemExit('No data in selected timerange')
    (timeInfo, values_old) = data
except whisper.WhisperException as exc:
    raise SystemExit('[ERROR] %s' % str(exc))

(start, end, step) = timeInfo
t = start
for value_old in values_old:
    value_str_old = str(value_old)
    value_new = update_value(t, value_old)
    value_str_new = str(value_new)
    if options.pretty:
        timestr = time.ctime(t)
    else:
        timestr = str(t)

    print("%s\t%s -> %s" % (timestr, value_str_old, value_str_new))
    try:
        if value_new is not None:
            whisper.update(path, value_new, t)
        t += step
    except whisper.WhisperException as exc:
        raise SystemExit('[ERROR] %s' % str(exc))
Esempio n. 52
0
 def update(self, filename, time, value):
     whisper.update(filename, value, time)
Esempio n. 53
0
def run(args):
  whisper.update(args.filename, args.value, timestamp=None)
    def test_heal(self):
        staging_dir = tempfile.mkdtemp(prefix='staging')
        storage_dir = tempfile.mkdtemp(prefix='storage')
        carbonate_sync.STORAGE_DIR = storage_dir
        remote = os.path.join(staging_dir, 'foo.wsp')
        local = os.path.join(storage_dir, 'foo.wsp')
        resolution = [(1, 10)]

        now = int(time.time())

        whisper.create(local, resolution)
        whisper.create(remote, resolution)

        # N, N, N, 6, 5, 4, 3, N, N, N
        whisper.update(local, 6.0, now - 6)
        whisper.update(local, 5.0, now - 5)
        whisper.update(local, 4.0, now - 4)
        whisper.update(local, 3.0, now - 3)

        # N, N, N, 6, 6, N, 3, 2, 1, N
        whisper.update(remote, 6.0, now - 6)
        whisper.update(remote, 6.0, now - 5)
        whisper.update(remote, 3.0, now - 3)
        whisper.update(remote, 2.0, now - 2)
        whisper.update(remote, 1.0, now - 1)

        results = whisper.diff(local, remote)
        expected = [
            (0, [
                (now - 5, 5.0, 6.0),
                (now - 4, 4.0, None),
                (now - 2, None, 2.0),
                (now - 1, None, 1.0),
            ], 6)
        ]
        self.assertEqual(results, expected)

        metrics_fs = ['foo.wsp']
        start_time = now - 10
        end_time = now

        attr = {
            'staging_dir': staging_dir,
            'metrics_fs': metrics_fs,
            'start_time': start_time,
            'end_time': end_time,
            'remote_user': '******',
            'remote_node': 'foo',
            'rsync_options': [],
            'ssh_options': [],
            'overwrite': False,
        }
        batch = carbonate_sync._Batch(**attr)

        carbonate_sync._heal(batch)

        # Check that we add missing points.
        expected = [(0, [(now - 5, 5.0, 6.0), (now - 4, 4.0, None)], 6)]
        results = whisper.diff(local, remote)
        self.assertEqual(results, expected)

        attr['overwrite'] = True
        batch = carbonate_sync._Batch(**attr)
        carbonate_sync._heal(batch)

        expected = [(0, [(now - 4, 4.0, None)], 6)]
        results = whisper.diff(local, remote)
        self.assertEqual(results, expected)

        shutil.rmtree(staging_dir)
        shutil.rmtree(storage_dir)
Esempio n. 55
0
    def test_render_view(self):
        url = reverse('graphite.render.views.renderView')

        response = self.client.get(url, {'target': 'test', 'format': 'raw'})
        self.assertEqual(response.content, "")
        self.assertEqual(response['Content-Type'], 'text/plain')

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        self.assertEqual(json.loads(response.content), [])
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {'target': 'test'})
        self.assertEqual(response['Content-Type'], 'image/png')
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {'target': 'test', 'format': 'dygraph'})
        self.assertEqual(json.loads(response.content), {})
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {'target': 'test', 'format': 'rickshaw'})
        self.assertEqual(json.loads(response.content), [])
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        self.addCleanup(self.wipe_whisper)
        whisper.create(self.db, [(1, 60)])

        ts = int(time.time())
        whisper.update(self.db, 0.1234567890123456789012, ts - 5)
        whisper.update(self.db, 0.4, ts - 4)
        whisper.update(self.db, 0.6, ts - 3)
        whisper.update(self.db, float('inf'), ts - 2)
        whisper.update(self.db, float('-inf'), ts - 1)
        whisper.update(self.db, float('nan'), ts)

        response = self.client.get(url, {'target': 'test', 'format': 'raw'})
        raw_data = ("None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,None,None,None,None,None,None,None,None,"
                    "0.12345678901234568,0.4,0.6,inf,-inf,nan")
        raw_response = "test,%d,%d,1|%s\n" % (ts-59, ts+1, raw_data)
        self.assertEqual(response.content, raw_response)

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        self.assertIn('[1e9999, ' + str(ts - 2) + ']', response.content)
        self.assertIn('[-1e9999, ' + str(ts - 1) + ']', response.content)
        self.assertIn('[null, ' + str(ts) + ']', response.content)
        data = json.loads(response.content)
        end = data[0]['datapoints'][-7:]
        self.assertEqual(
            end, [[None, ts - 6],
                  [0.12345678901234568, ts - 5],
                  [0.4, ts - 4],
                  [0.6, ts - 3],
                  [float('inf'), ts - 2],
                  [float('-inf'), ts - 1],
                  [None, ts]])

        response = self.client.get(url, {'target': 'test', 'format': 'dygraph'})
        self.assertIn('[' + str((ts - 2) * 1000) + ', Infinity]', response.content)
        self.assertIn('[' + str((ts - 1) * 1000) + ', -Infinity]', response.content)
        data = json.loads(response.content)
        end = data['data'][-7:]
        self.assertEqual(end,
            [[(ts - 6) * 1000, None],
            [(ts - 5) * 1000, 0.123456789012],
            [(ts - 4) * 1000, 0.4],
            [(ts - 3) * 1000, 0.6],
            [(ts - 2) * 1000, float('inf')],
            [(ts - 1) * 1000, float('-inf')],
            [ts * 1000, None]])

        response = self.client.get(url, {'target': 'test', 'format': 'rickshaw'})
        data = json.loads(response.content)
        end = data[0]['datapoints'][-7:-1]
        self.assertEqual(end,
            [{'x': ts - 6, 'y': None},
            {'x': ts - 5, 'y': 0.12345678901234568},
            {'x': ts - 4, 'y': 0.4},
            {'x': ts - 3, 'y': 0.6},
            {'x': ts - 2, 'y': float('inf')},
            {'x': ts - 1, 'y': float('-inf')}])

        last = data[0]['datapoints'][-1]
        self.assertEqual(last['x'], ts)
        self.assertTrue(math.isnan(last['y']))
Esempio n. 56
0
    def test_render_view(self):
        url = reverse('graphite.render.views.renderView')

        response = self.client.get(url, {'target': 'test', 'format': 'raw'})
        self.assertEqual(response.content, "")
        self.assertEqual(response['Content-Type'], 'text/plain')

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        self.assertEqual(json.loads(response.content), [])
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {'target': 'test'})
        self.assertEqual(response['Content-Type'], 'image/png')
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {
            'target': 'test',
            'format': 'dygraph'
        })
        self.assertEqual(json.loads(response.content), {})
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        response = self.client.get(url, {
            'target': 'test',
            'format': 'rickshaw'
        })
        self.assertEqual(json.loads(response.content), [])
        self.assertTrue(response.has_header('Expires'))
        self.assertTrue(response.has_header('Last-Modified'))
        self.assertTrue(response.has_header('Cache-Control'))

        self.addCleanup(self.wipe_whisper)
        whisper.create(self.db, [(1, 60)])

        ts = int(time.time())
        whisper.update(self.db, 0.1234567890123456789012, ts - 2)
        whisper.update(self.db, 0.4, ts - 1)
        whisper.update(self.db, 0.6, ts)

        response = self.client.get(url, {'target': 'test', 'format': 'raw'})
        raw_data = ("None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,None,None,None,None,None,None,None,None,None,"
                    "None,None,0.12345678901234568,0.4,0.6")
        raw_response = "test,%d,%d,1|%s\n" % (ts - 59, ts + 1, raw_data)
        self.assertEqual(response.content, raw_response)

        response = self.client.get(url, {'target': 'test', 'format': 'json'})
        data = json.loads(response.content)
        end = data[0]['datapoints'][-4:]
        self.assertEqual(end, [[None, ts - 3], [0.12345678901234568, ts - 2],
                               [0.4, ts - 1], [0.6, ts]])

        response = self.client.get(url, {
            'target': 'test',
            'format': 'dygraph'
        })
        data = json.loads(response.content)
        end = data['data'][-4:]
        self.assertEqual(
            end, [[(ts - 3) * 1000, None], [(ts - 2) * 1000, 0.123456789012],
                  [(ts - 1) * 1000, 0.4], [ts * 1000, 0.6]])

        response = self.client.get(url, {
            'target': 'test',
            'format': 'rickshaw'
        })
        data = json.loads(response.content)
        end = data[0]['datapoints'][-4:]
        self.assertEqual(end, [{
            'x': ts - 3,
            'y': None
        }, {
            'x': ts - 2,
            'y': 0.12345678901234568
        }, {
            'x': ts - 1,
            'y': 0.4
        }, {
            'x': ts,
            'y': 0.6
        }])
Esempio n. 57
0
    def update_rrd(self, postfix, timestamp, value):
        if value is not None:
            path = self.get_rrd_file(postfix)

            whisper.update(path, value, timestamp / 1000)
Esempio n. 58
0
 def save(self, value, timestamp):
     logger.debug("Saving %s: %f" % (self.name, value))
     whisper.update(self.path, value, timestamp)
Esempio n. 59
0
#!/usr/bin/env python

import sys, time
import whisper
from optparse import OptionParser

now = int( time.time() )

option_parser = OptionParser(
    usage='''%prog [options] path timestamp:value [timestamp:value]*''')

(options, args) = option_parser.parse_args()

if len(args) < 2:
  option_parser.print_usage()
  sys.exit(1)

path = args[0]
datapoint_strings = args[1:]
datapoint_strings = [point.replace('N:', '%d:' % now)
                     for point in datapoint_strings]
datapoints = [tuple(point.split(':')) for point in datapoint_strings]

if len(datapoints) == 1:
  timestamp,value = datapoints[0]
  whisper.update(path, value, timestamp)
else:
  print datapoints
  whisper.update_many(path, datapoints)
Esempio n. 60
0
from_time = int( options._from )
until_time = int( options.until )

try:
  data = whisper.fetch(path, from_time, until_time)
  if not data:
    raise SystemExit('No data in selected timerange')
  (timeInfo, values_old) = data
except whisper.WhisperException as exc:
  raise SystemExit('[ERROR] %s' % str(exc))

(start,end,step) = timeInfo
t = start
for value_old in values_old:
  value_str_old = str(value_old)
  value_new = update_value(t, value_old)
  value_str_new = str(value_new)
  if options.pretty:
    timestr = time.ctime(t)
  else:
    timestr = str(t)

  print("%s\t%s -> %s" % (timestr,value_str_old, value_str_new))
  try:
    if value_new is not None:
      whisper.update(path, value_new, t)
    t += step
  except whisper.WhisperException as exc:
    raise SystemExit('[ERROR] %s' % str(exc))