示例#1
0
 def test_combine_self_empty(self):
     d = _init([[10, 1, 0, 1, 1], [10, 2, 0, 2, 2], [10, 3, 0, 3, 3], [10, 4, 0, 4, 4], [10, 0, 0, 0, 0], [10, 1, 0, 1, 1]])
     s1 = Statistics()
     s2 = Statistics(stats=d)
     s1.combine(s2)
     self.assertEqual(10, len(s1))
     assert_stat_close(d, s1.value)
示例#2
0
 def test_combine_self_empty(self):
     d = np.array([[1, 0, 1, 1], [2, 0, 2, 2], [3, 0, 3, 3]], dtype=np.float32)
     s1 = Statistics()
     s2 = Statistics(length=10, stats=d)
     s1.combine(s2)
     self.assertEqual(10, len(s1))
     np.testing.assert_allclose(d, s1.value)
示例#3
0
 def test_combine_other_empty(self):
     s1 = Statistics()
     s2 = Statistics()
     s1.add(1.0)
     s3 = s1.combine(s2)
     self.assertEqual(1, len(s3))
     self.assertEqual(1.0, s3.mean)
示例#4
0
    def statistics_get(self, t1, t2):
        """Get the statistics for the collected sample data over a time range.

        :param t1: The starting time in seconds relative to the streaming start time.
        :param t2: The ending time in seconds.
        :return: The statistics data structure.  See :meth:`joulescope.driver.Driver.statistics_get`
            for details.
        """
        log.debug('statistics_get(%s, %s)', t1, t2)
        s1 = self.time_to_sample_id(t1)
        s2 = self.time_to_sample_id(t2)
        if s1 is None or s2 is None:
            return None

        (k1, k2), s = self._get_reduction_stats(s1, s2)
        if s1 < k1:
            length = k1 - s1
            s_start = self.get(s1, k1, increment=length)
            s.combine(Statistics(length=length, stats=s_start[0, :, :]))
        if s2 > k2:
            length = s2 - k2
            s_stop = self.get(k2, s2, increment=length)
            s.combine(Statistics(length=length, stats=s_stop[0, :, :]))

        t_start = s1 / self.sampling_frequency
        t_stop = s2 / self.sampling_frequency
        return stats_to_api(s.value, t_start, t_stop)
示例#5
0
 def test_combine(self):
     d1 = _init([[1, 1, 0, 1, 1], [1, 2, 0, 2, 2], [1, 3, 0, 3, 3], [1, 4, 0, 4, 4], [1, 0, 0, 0, 0], [1, 1, 0, 1, 1]])
     d2 = _init([[1, 3, 0, 3, 3], [1, 4, 0, 4, 4], [1, 5, 0, 5, 5], [1, 6, 0, 6, 6], [1, 1, 0, 1, 1], [1, 0, 0, 0, 0]])
     e = _init([[2, 2, 1, 1, 3], [2, 3, 1, 2, 4], [2, 4, 1, 3, 5], [2, 5, 1, 4, 6], [2, 0.5, 0.25, 0, 1], [2, 0.5, 0.25, 0, 1]])
     s1 = Statistics(stats=d1)
     s2 = Statistics(stats=d2)
     s1.combine(s2)
     self.assertEqual(2, len(s1))
     assert_stat_close(e, s1.value)
示例#6
0
 def test_combine(self):
     d1 = np.array([[1, 0, 1, 1], [2, 0, 2, 2], [3, 0, 3, 3]], dtype=np.float32)
     d2 = np.array([[3, 0, 3, 3], [4, 0, 4, 4], [5, 0, 5, 5]], dtype=np.float32)
     e = np.array([[2, 1, 1, 3], [3, 1, 2, 4], [4, 1, 3, 5]], dtype=np.float32)
     s1 = Statistics(length=1, stats=d1)
     s2 = Statistics(length=1, stats=d2)
     s1.combine(s2)
     self.assertEqual(2, len(s1))
     np.testing.assert_allclose(e, s1.value)
示例#7
0
    def _get_reduction_stats(self, start, stop):
        """Get statistics over the reduction

        :param start: The starting sample identifier (inclusive).
        :param stop: The ending sample identifier (exclusive).
        :return: The tuple of ((sample_start, sample_stop), :class:`Statistics`).
        """
        log.debug('_get_reduction_stats(%s, %s)', start, stop)
        s = Statistics()
        sz = self.config['samples_per_reduction']
        incr = self.config['samples_per_block'] // sz
        r_start = start // sz
        if (r_start * sz) < start:
            r_start += 1
        r_stop = stop // sz
        if r_start >= r_stop:  # use the reductions
            s_start = r_start * sz
            return (s_start, s_start), s
        r_idx = 0

        self._fh.seek(self._data_start_position)
        if self._f.advance() != datafile.TAG_COLLECTION_START:
            raise ValueError('data section must be single collection')
        while True:
            tag, _ = self._f.peek_tag_length()
            if tag is None or tag == datafile.TAG_COLLECTION_END:
                break
            elif tag != datafile.TAG_COLLECTION_START:
                raise ValueError('invalid file format: not collection start')
            r_idx_next = r_idx + incr
            if r_start >= r_idx_next:
                self._f.skip()
                r_idx = r_idx_next
                continue
            self._f.collection_goto_end()
            tag, value = next(self._f)
            if tag != datafile.TAG_COLLECTION_END:
                raise ValueError('invalid file format: not collection end')
            data = np.frombuffer(value, dtype=np.float32).reshape((-1, 3, 4))
            r_idx_start = 0
            r_idx_stop = incr
            if r_idx < r_start:
                r_idx_start = r_start - r_idx
            if r_idx_next > r_stop:
                r_idx_stop = r_stop - r_idx
            if r_idx_stop > len(data):
                r_idx_stop = len(data)
            length = r_idx_stop - r_idx_start
            r = reduction_downsample(data, r_idx_start, r_idx_stop, length)
            s.combine(Statistics(length=length * sz, stats=r[0, :, :]))
            r_idx = r_idx_next
            if r_idx_next >= r_stop:
                break
        return (r_start * sz, r_stop * sz), s
示例#8
0
 def test_combine(self):
     s1 = Statistics()
     for x in np.arange(0, 11):
         s1.add(x)
     s2 = Statistics()
     for x in np.arange(11, 21):
         s2.add(x)
     s3 = Statistics()
     for x in np.arange(0, 21):
         s3.add(x)
     sc = s1.combine(s2)
     self.assertEqual(len(s3), len(sc))
     self.assertEqual(s3.mean, sc.mean)
     self.assertEqual(s3.var, sc.var)
     self.assertEqual(s3.std, sc.std)
     self.assertEqual(s3.min, sc.min)
     self.assertEqual(s3.max, sc.max)
示例#9
0
 def test_add_zero_once(self):
     s = Statistics()
     s.add(0.0)
     self.assertEqual(1, len(s))
     self.assertEqual(0.0, s.mean)
     self.assertEqual(0.0, s.var)
     self.assertEqual(0.0, s.std)
     self.assertEqual(0.0, s.min)
     self.assertEqual(0.0, s.max)
示例#10
0
 def test_initialize_empty(self):
     s = Statistics()
     self.assertEqual(0, len(s))
     fields = ('length', 'mean', 'variance', 'min', 'max')
     self.assertEqual(fields, s.value.dtype.names)
     _print_offsets(s.value.dtype)
     np.testing.assert_allclose(0, s.value[0]['length'])
     np.testing.assert_allclose(0, s.value[0]['mean'])
     np.testing.assert_allclose(0, s.value[0]['variance'])
示例#11
0
 def test_add_multiple(self):
     s = Statistics()
     s.add(0.0)
     s.add(1.0)
     s.add(2.0)
     self.assertEqual(3, len(s))
     self.assertEqual(1.0, s.mean)
     # note that np.var returns the variance, not sample variance
     sample_variance = 3.0 / 2.0 * np.var([0.0, 1.0, 2.0])
     self.assertEqual(1.0, sample_variance)
     self.assertEqual(sample_variance, s.var)
     self.assertEqual(np.sqrt(sample_variance), s.std)
     self.assertEqual(0.0, s.min)
     self.assertEqual(2.0, s.max)
示例#12
0
    def _get_reduction_stats(self, start, stop):
        """Get statistics over the reduction

        :param start: The starting sample identifier (inclusive).
        :param stop: The ending sample identifier (exclusive).
        :return: The tuple of ((sample_start, sample_stop), :class:`Statistics`).
        """
        # log.debug('_get_reduction_stats(%s, %s)', start, stop)
        s = Statistics()
        sz = self.config['samples_per_reduction']
        incr = self.config['samples_per_block'] // sz
        r_start = start // sz
        if (r_start * sz) < start:
            r_start += 1
        r_stop = stop // sz
        if r_start >= r_stop:  # cannot use the reductions
            s_start = r_start * sz
            return (s_start, s_start), s
        r_idx = r_start

        while r_idx < r_stop:
            reduction_cache = self._reduction_tlv(r_idx)
            if reduction_cache is None:
                break
            data = reduction_cache['buffer']
            b_start = r_idx - reduction_cache['r_start']
            length = reduction_cache['r_stop'] - reduction_cache[
                'r_start'] - b_start
            out_remaining = r_stop - r_idx
            length = min(length, out_remaining)
            if length <= 0:
                break
            r = reduction_downsample(data, b_start, b_start + length, length)
            s.combine(Statistics(length=length * sz, stats=r[0, :, :]))
            r_idx += length
        return (r_start * sz, r_stop * sz), s
示例#13
0
 def _stats_get(self, start, stop):
     s1, s2 = start, stop
     (k1, k2), s = self._get_reduction_stats(s1, s2)
     if k1 >= k2:
         # compute directly over samples
         stats = np.empty((STATS_FIELDS, STATS_VALUES), dtype=np.float32)
         _, d_bits, z = self.raw(s1, s2)
         i, v = z[:, 0], z[:, 1]
         p = i * v
         zi = np.isfinite(i)
         i_view = i[zi]
         if len(i_view):
             i_range = np.bitwise_and(d_bits, 0x0f)
             i_lsb = np.right_shift(np.bitwise_and(d_bits, 0x10), 4)
             v_lsb = np.right_shift(np.bitwise_and(d_bits, 0x20), 5)
             for idx, field in enumerate(
                 [i_view, v[zi], p[zi], i_range, i_lsb[zi], v_lsb[zi]]):
                 stats[idx, 0] = np.mean(field, axis=0)
                 stats[idx, 1] = np.var(field, axis=0)
                 stats[idx, 2] = np.amin(field, axis=0)
                 stats[idx, 3] = np.amax(field, axis=0)
         else:
             stats[:, :] = np.full((1, STATS_FIELDS, STATS_VALUES),
                                   np.nan,
                                   dtype=np.float32)
             stats[3, 0] = I_RANGE_MISSING
             stats[3, 1] = 0
             stats[3, 2] = I_RANGE_MISSING
             stats[3, 3] = I_RANGE_MISSING
         s = Statistics(length=len(i_view), stats=stats)
     else:
         if s1 < k1:
             s_start = self._stats_get(s1, k1)
             s.combine(s_start)
         if s2 > k2:
             s_stop = self._stats_get(k2, s2)
             s.combine(s_stop)
     return s
示例#14
0
 def test_initialize_empty(self):
     s = Statistics()
     self.assertEqual(0, len(s))
     self.assertEqual((STATS_FIELDS, STATS_VALUES), s.value.shape)
     np.testing.assert_allclose(0, s.value[:, 0])
     np.testing.assert_allclose(0, s.value[:, 1])
示例#15
0
 def test_initialize_zero(self):
     d = np.zeros(STATS_FIELD_COUNT, dtype=STATS_DTYPE)
     s = Statistics(stats=d)
     self.assertEqual(0, len(s))
     assert_stat_close(d, s.value)
示例#16
0
 def test_initialize(self):
     d = np.arange(3*4, dtype=np.float32).reshape((3, 4))
     s = Statistics(length=10, stats=d)
     self.assertEqual(10, len(s))
     np.testing.assert_allclose(d, s.value)
示例#17
0
 def test_combine_both_empty(self):
     s1 = Statistics()
     s2 = Statistics()
     s1.combine(s2)
     self.assertEqual(0, len(s1))
示例#18
0
 def test_initialize(self):
     d = np.arange(STATS_FIELDS * STATS_VALUES, dtype=np.float32).reshape(
         (STATS_FIELDS, STATS_VALUES))
     s = Statistics(length=10, stats=d)
     self.assertEqual(10, len(s))
     np.testing.assert_allclose(d, s.value)
示例#19
0
 def test_initialize_empty(self):
     s = Statistics()
     self.assertEqual(0, len(s))
     self.assertEqual(0.0, s.mean)
     self.assertEqual(0.0, s.var)
     self.assertEqual(0.0, s.std)