Exemple #1
0
    def _archive_fetch(self, fh, header, archive, from_time, until_time):
        from_time = roundup(from_time, archive['sec_per_point'])
        until_time = roundup(until_time, archive['sec_per_point'])
        tag_cnt = len(header['tag_list'])
        null_point = (None, ) * tag_cnt

        base_point = self._read_base_point(fh, archive, header)
        base_ts = base_point[0]

        if base_ts == 0:
            step = archive['sec_per_point']
            cnt = (until_time - from_time) / step
            time_info = (from_time, until_time, step)
            val_list = [null_point] * cnt
            return (header, time_info, val_list)

        from_offset = self._timestamp2offset(from_time, base_ts, header,
                                             archive)
        until_offset = self._timestamp2offset(until_time, base_ts, header,
                                              archive)

        fh.seek(from_offset)
        if from_offset < until_offset:
            series_str = fh.read(until_offset - from_offset)
        else:
            archive_end = archive['offset'] + archive['size']
            series_str = fh.read(archive_end - from_offset)
            fh.seek(archive['offset'])
            series_str += fh.read(until_offset - archive['offset'])

        ## unpack series string
        point_format = header['point_format']
        byte_order, point_type = point_format[0], point_format[1:]
        cnt = len(series_str) / header['point_size']
        series_format = byte_order + point_type * cnt
        unpacked_series = struct.unpack(series_format, series_str)

        ## construct value list
        # pre-allocate entire list or speed
        val_list = [null_point] * cnt
        step = tag_cnt + 1
        sec_per_point = archive['sec_per_point']
        for i in xrange(0, len(unpacked_series), step):
            point_ts = unpacked_series[i]
            if from_time <= point_ts < until_time:
                val = unpacked_series[i + 1:i + step]
                idx = (point_ts - from_time) / sec_per_point
                val_list[idx] = self._conver_null_value(val)

        time_info = (from_time, until_time, sec_per_point)
        return header, time_info, val_list
Exemple #2
0
    def test_update_propagate(self):
        now_ts = 1411628779
        num_points = 6
        points = [(now_ts - i, self._gen_val(i)) for i in range(1, num_points+1)]
        self.storage.update(self.path, points, now_ts)

        from_ts = now_ts - num_points - 1
        series = self.storage.fetch(self.path, from_ts, now=now_ts)
        time_info = (from_ts, roundup(now_ts, 3), 3)
        expected = time_info, [(5.0, 15.0), (2.0, 12.0), self.null_point]
        self.assertEqual(series[1:], expected)
Exemple #3
0
    def test_null_point(self):
        now_ts = 1411628779
        num_points = 6
        points = [(now_ts - i, self._gen_val(i)) for i in range(1, num_points+1)]
        # change the last two points to null value
        points[4] = (now_ts - 5, (NULL_VALUE, NULL_VALUE))
        points[5] = (now_ts - 6, (NULL_VALUE, NULL_VALUE))

        self.storage.update(self.path, points, now_ts)

        from_ts = now_ts - num_points - 1
        series = self.storage.fetch(self.path, from_ts, now=now_ts)
        time_info = (from_ts, roundup(now_ts, 3), 3)
        expected = time_info, [self.null_point, (2.0, 12.0), self.null_point]
        self.assertEqual(series[1:], expected)
Exemple #4
0
 def test_time_range(self):
     now_ts = 1411628779
     #  downsample time of chive2: 1411628760 (floor(1411628779. / (6*5)))
     point_seeds_list = [range(19, 30), range(5, 2)]
     mtime = None
     for i, point_seeds in enumerate(point_seeds_list):
         if i != 0:
             mtime = now_ts - max(point_seeds_list[i - 1])
         points = [(now_ts - i, self._gen_val(i, num=3)) for i in point_seeds]
         self.storage.update(self.path, points, now_ts, mtime)
     from_ts = 1411628760 - 2 * 6
     until_ts = 1411628760
     series = self.storage.fetch(self.path, from_ts, until_ts,
                                 now=from_ts + 180 + 1)
     time_info = (from_ts, roundup(until_ts, 6), 6)
     values = [(26.0, 36.0, 46.0), (20.0, 30.0, 40.0)]
     expected = (time_info, values)
     self.assertEqual(series[1:], expected)
Exemple #5
0
 def test_update_propagate_with_special_start_time(self):
     now_ts = 1411628779
     # start time is 1411628760
     point_seeds_list = [range(10, 20), range(1, 7)]
     mtime = None
     for i, point_seeds in enumerate(point_seeds_list):
         if i != 0:
             mtime = now_ts - max(point_seeds_list[i - 1])
         points = [(now_ts - i, self._gen_val(i)) for i in point_seeds]
         self.storage.update(self.path, points, now_ts, mtime)
     from_ts = 1411628760
     until_ts = from_ts + 15
     series = self.storage.fetch(self.path, from_ts, until_ts,
                                 now=from_ts + 60 + 1)
     time_info = (from_ts, roundup(until_ts, 3), 3)
     values = [(17.0, 27.0), (14.0, 24.0), (11.0, 21.0), (10.0, 20.0), (5.0, 15.0)]
     expected = (time_info, values)
     self.assertEqual(series[1:], expected)
Exemple #6
0
    def test_update_propagate(self):
        now_ts = 1411628779
        point_seeds_list = [range(30, 45), range(15)]
        mtime = None
        for i, point_seeds in enumerate(point_seeds_list):
            if i != 0:
                mtime = now_ts - max(point_seeds_list[i - 1])
            points = [(now_ts - i, self._gen_val(i)) for i in point_seeds]
            self.storage.update(self.path, points, now_ts, mtime)

        from_ts = now_ts - 60 - 1
        series = self.storage.fetch(self.path, from_ts, now=now_ts)
        time_info = (from_ts, roundup(now_ts, 3), 3)
        null = self.null_point
        values = [null, null, null, null, null, (44.0, 54.0), (41.0, 51.0),
                  (38.0, 48.0), (35.0, 45.0), (32.0, 42.0), (30.0, 40.0),
                  null, null, null, null, (14.0, 24.0), (11.0, 21.0), (8.0, 18.0),
                  (5.0, 15.0), null, null]
        expected = time_info, values
        self.assertEqual(series[1:], expected)
Exemple #7
0
    def _propagate(self, fh, header, higher, lower, timestamp_range,
                   lower_idx):
        """
        propagte update to low precision archives.
        """
        from_time, until_time = timestamp_range
        timeunit = Storage.get_propagate_timeunit(lower['sec_per_point'],
                                                  higher['sec_per_point'],
                                                  header['x_files_factor'])
        from_time_boundary = from_time / timeunit
        until_time_boundary = until_time / timeunit
        if (from_time_boundary
                == until_time_boundary) and (from_time % timeunit) != 0:
            return False

        if lower['sec_per_point'] <= timeunit:
            lower_interval_end = until_time_boundary * timeunit
            lower_interval_start = min(lower_interval_end - timeunit,
                                       from_time_boundary * timeunit)
        else:
            lower_interval_end = roundup(until_time, lower['sec_per_point'])
            lower_interval_start = from_time - from_time % lower[
                'sec_per_point']

        fh.seek(higher['offset'])
        packed_base_interval = fh.read(LONG_SIZE)
        higher_base_interval = struct.unpack(LONG_FORMAT,
                                             packed_base_interval)[0]

        if higher_base_interval == 0:
            higher_first_offset = higher['offset']
        else:
            higher_first_offset = self._timestamp2offset(
                lower_interval_start, higher_base_interval, header, higher)

        higher_point_num = (lower_interval_end -
                            lower_interval_start) / higher['sec_per_point']
        higher_size = higher_point_num * header['point_size']
        relative_first_offset = higher_first_offset - higher['offset']
        relative_last_offset = (relative_first_offset +
                                higher_size) % higher['size']
        higher_last_offset = relative_last_offset + higher['offset']

        # get unpacked series str
        # TODO: abstract this to a function
        fh.seek(higher_first_offset)
        if higher_first_offset < higher_last_offset:
            series_str = fh.read(higher_last_offset - higher_first_offset)
        else:
            higher_end = higher['offset'] + higher['size']
            series_str = fh.read(higher_end - higher_first_offset)
            fh.seek(higher['offset'])
            series_str += fh.read(higher_last_offset - higher['offset'])

        # now we unpack the series data we just read
        point_format = header['point_format']
        byte_order, point_type = point_format[0], point_format[1:]
        point_num = len(series_str) / header['point_size']
        # assert point_num == higher_point_num
        series_format = byte_order + (point_type * point_num)
        unpacked_series = struct.unpack(series_format, series_str)

        # and finally we construct a list of values
        point_cnt = (lower_interval_end -
                     lower_interval_start) / lower['sec_per_point']
        tag_cnt = len(header['tag_list'])
        agg_cnt = lower['sec_per_point'] / higher['sec_per_point']
        step = (tag_cnt + 1) * agg_cnt
        lower_points = [None] * point_cnt

        unpacked_series = unpacked_series[::-1]
        ts = lower_interval_end
        for i in xrange(0, len(unpacked_series), step):
            higher_points = unpacked_series[i:i + step]
            ts -= higher['sec_per_point'] * agg_cnt
            agg_value = self._get_agg_value(higher_points, tag_cnt,
                                            header['agg_id'],
                                            lower_interval_start,
                                            lower_interval_end)
            lower_points[i / step] = (ts, agg_value)

        lower_points = [x for x in lower_points
                        if x and x[0]]  # filter zero item
        timestamp_range = (lower_interval_start,
                           max(lower_interval_end, until_time))
        self._update_archive(fh, header, lower, lower_points, lower_idx,
                             timestamp_range)