def test_can_sort_bytes_by_reordering_partially_not_cross_stationary_element():
    start = hbytes([5, 3, 0, 2, 1, 4])
    finish = Lexical.shrink(
        start, lambda x: set(x) == set(start) and x[3] == 2,
        random=Random(0),
    )
    assert finish <= hbytes([0, 3, 5, 2, 1, 4])
def test_can_sort_bytes_by_reordering_partially2():
    start = hbytes([5, 4, 3, 2, 1, 0])
    finish = Lexical.shrink(
        start, lambda x: Counter(x) == Counter(start) and x[0] > x[2],
        random=Random(0), full=True,
    )
    assert finish <= hbytes([1, 2, 0, 3, 4, 5])
def test_can_pass_to_an_indirect_descendant(monkeypatch):
    def tree(data):
        data.start_example(1)
        n = data.draw_bits(1)
        label = data.draw_bits(8)
        if n:
            tree(data)
            tree(data)
        data.stop_example(1)
        return label

    initial = hbytes([1, 10, 0, 0, 1, 0, 0, 10, 0, 0])
    target = hbytes([0, 10])

    good = {initial, target}

    @shrinking_from(initial)
    def shrinker(data):
        tree(data)
        if hbytes(data.buffer) in good:
            data.mark_interesting()

    shrinker.fixate_shrink_passes(["pass_to_descendant"])

    assert shrinker.shrink_target.buffer == target
def test_exhaustive_enumeration_of_partial_buffer():
    seen = set()

    def f(data):
        k = data.draw_bytes(2)
        assert k[1] == 0
        assert k not in seen
        seen.add(k)

    seen_prefixes = set()

    runner = ConjectureRunner(
        f, settings=settings(database=None, max_examples=256, buffer_size=2),
        random=Random(0),
    )
    with pytest.raises(RunIsComplete):
        runner.cached_test_function(b'')
        for _ in hrange(256):
            p = runner.generate_novel_prefix()
            assert p not in seen_prefixes
            seen_prefixes.add(p)
            data = ConjectureData.for_buffer(hbytes(p + hbytes(2)))
            runner.test_function(data)
            assert data.status == Status.VALID
            node = 0
            for b in data.buffer:
                node = runner.tree[node][b]
            assert node in runner.dead
    assert len(seen) == 256
def test_keeps_using_solid_passes_while_they_shrink_size():
    good = {
        hbytes([0, 1, 2, 3, 4, 5]),
        hbytes([0, 1, 2, 3, 5]),
        hbytes([0, 1, 3, 5]),
        hbytes([1, 3, 5]),
        hbytes([1, 5]),
    }
    initial = max(good, key=sort_key)

    @shrinking_from(initial)
    def shrinker(data):
        while True:
            data.draw_bits(8)
            if hbytes(data.buffer) in good:
                data.mark_interesting()
    shrinker.clear_passes()

    d1 = shrinker.add_new_pass(block_program('X'))
    d2 = shrinker.add_new_pass(block_program('-'))

    for _ in range(3):
        shrinker.single_greedy_shrink_iteration()
        assert d1.classification == PassClassification.HOPEFUL
        assert d2.classification == PassClassification.CANDIDATE
def test_can_sort_bytes_by_reordering_partially():
    start = hbytes([5, 4, 3, 2, 1, 0])
    finish = Lexical.shrink(
        start, lambda x: set(x) == set(start) and x[0] > x[-1],
        random=Random(0),
    )
    assert finish == hbytes([1, 2, 3, 4, 5, 0])
def test_database_uses_values_from_secondary_key():
    key = b'key'
    database = InMemoryExampleDatabase()

    def f(data):
        if data.draw_bits(8) >= 5:
            data.mark_interesting()
        else:
            data.mark_invalid()

    runner = ConjectureRunner(f, settings=settings(
        max_examples=1, buffer_size=1024,
        database=database, suppress_health_check=HealthCheck.all(),
    ), database_key=key)

    for i in range(10):
        database.save(runner.secondary_key, hbytes([i]))

    runner.test_function(ConjectureData.for_buffer(hbytes([10])))
    assert runner.interesting_examples

    assert len(set(database.fetch(key))) == 1
    assert len(set(database.fetch(runner.secondary_key))) == 10

    runner.clear_secondary_key()

    assert len(set(database.fetch(key))) == 1
    assert set(
        map(int_from_bytes, database.fetch(runner.secondary_key))
    ) == set(range(6, 11))

    v, = runner.interesting_examples.values()

    assert list(v.buffer) == [5]
Beispiel #8
0
 def for_buffer(self, buffer):
     buffer = hbytes(buffer)
     return ConjectureData(
         max_length=len(buffer),
         draw_bytes=lambda data, n:
         hbytes(buffer[data.index:data.index + n])
     )
def test_exhaustive_enumeration(prefix, bits, seed):
    seen = set()

    def f(data):
        if prefix:
            data.write(hbytes(prefix))
            assert len(data.buffer) == len(prefix)
        k = data.draw_bits(bits)
        assert k not in seen
        seen.add(k)

    size = 2 ** bits

    seen_prefixes = set()

    runner = ConjectureRunner(
        f, settings=settings(database=None, max_examples=size),
        random=Random(seed),
    )
    with pytest.raises(RunIsComplete):
        runner.cached_test_function(b'')
        for _ in hrange(size):
            p = runner.generate_novel_prefix()
            assert p not in seen_prefixes
            seen_prefixes.add(p)
            data = ConjectureData.for_buffer(
                hbytes(p + hbytes(2 + len(prefix))))
            runner.test_function(data)
            assert data.status == Status.VALID
            node = 0
            for b in data.buffer:
                node = runner.tree[node][b]
            assert node in runner.dead
    assert len(seen) == size
    def run(self):
        if not any(self.current):
            return
        if self.incorporate(hbytes(self.size)):
            return
        for c in hrange(max(self.current)):
            if self.incorporate(
                hbytes(min(b, c) for b in self.current)
            ):
                break

        change_counter = -1
        while self.current and change_counter < self.changes:
            change_counter = self.changes
            for i in hrange(self.size):
                t = self.current[i]
                if t > 0:
                    ss = small_shrinks[self.current[i]]
                    for c in ss:
                        if self._shrink_index(i, c):
                            for c in hrange(self.current[i]):
                                if c in ss:
                                    continue
                                if self._shrink_index(i, c):
                                    break
                            break
def test_try_shrinking_blocks_out_of_bounds():
    @shrinking_from(hbytes([1]))
    def shrinker(data):
        data.draw_bits(1)
        data.mark_interesting()

    assert not shrinker.try_shrinking_blocks((1,), hbytes([1]))
def test_draw_to_overrun():
    @run_to_buffer
    def x(data):
        d = (data.draw_bytes(1)[0] - 8) & 0xff
        data.draw_bytes(128 * d)
        if d >= 2:
            data.mark_interesting()
    assert x == hbytes([10]) + hbytes(128 * 2)
def test_can_write_empty_string():
    d = ConjectureData.for_buffer([1, 1, 1])
    d.draw_bits(1)
    d.write(hbytes())
    d.draw_bits(1)
    d.draw_bits(0, forced=0)
    d.draw_bits(1)
    assert d.buffer == hbytes([1, 1, 1])
def test_detects_too_small_block_starts():
    def f(data):
        data.draw_bytes(8)
        data.mark_interesting()
    runner = ConjectureRunner(f, settings=settings(database=None))
    r = ConjectureData.for_buffer(hbytes(8))
    runner.test_function(r)
    assert r.status == Status.INTERESTING
    assert not runner.prescreen_buffer(hbytes([255] * 7))
Beispiel #15
0
 def draw_bytes(self, n):
     self.__assert_not_frozen('draw_bytes')
     if n == 0:
         return hbytes(b'')
     self.__check_capacity(n)
     result = self._draw_bytes(self, n)
     assert len(result) == n
     self.__write(result)
     return hbytes(result)
    def find_necessary_prefix_for_novelty(self):
        """Finds a prefix that any novel example must start with.
        This is currently only used for generate_novel_prefix, where
        it allows us to significantly speed it up in the case where
        we start with a very shallow tree.

        For example, suppose we had a test function that looked like:

        .. code-block:: python

            def test_function(data):
                while data.draw_bits(1):
                    pass

        This has a unique example of size ``n`` for any ``n``, but we
        only draw that example with probability ``2 ** (-n)`` through
        random sampling, so we will very rapidly exhaust the search
        space. By first searching to find the necessary sequence
        that any novel example must satisfy, we can find novel
        examples with probability 1 instead.
        """
        necessary_prefix = bytearray()

        def append_int(n_bits, value):
            necessary_prefix.extend(int_to_bytes(value, bits_to_bytes(n_bits)))

        current_node = self.root
        while True:
            assert not current_node.is_exhausted
            for i, (n_bits, value) in enumerate(
                zip(current_node.bit_lengths, current_node.values)
            ):
                if i in current_node.forced:
                    append_int(n_bits, value)
                else:
                    # We've now found a value that is allowed to
                    # vary, so what follows is not fixed.
                    return hbytes(necessary_prefix)
            else:
                assert not isinstance(current_node.transition, Conclusion)
                if current_node.transition is None:
                    return hbytes(necessary_prefix)
                branch = current_node.transition
                assert isinstance(branch, Branch)
                if len(branch.children) < branch.max_children:
                    return hbytes(necessary_prefix)
                else:
                    choices = [
                        (k, v) for k, v in branch.children.items() if not v.is_exhausted
                    ]
                    assert len(choices) > 0
                    if len(choices) == 1:
                        k, v = choices[0]
                        append_int(branch.bit_length, k)
                        current_node = v
                    else:
                        return hbytes(necessary_prefix)
Beispiel #17
0
    def try_buffer_with_rewriting_from(self, initial_attempt, v):
        initial_data = None
        node_index = 0
        for c in initial_attempt:
            try:
                node_index = self.tree[node_index][c]
            except KeyError:
                break
            node = self.tree[node_index]
            if isinstance(node, ConjectureData):
                initial_data = node
                break

        if initial_data is None:
            initial_data = ConjectureData.for_buffer(initial_attempt)
            self.test_function(initial_data)

        if initial_data.status == Status.INTERESTING:
            return initial_data is self.last_data

        # If this produced something completely invalid we ditch it
        # here rather than trying to persevere.
        if initial_data.status < Status.VALID:
            return False

        if len(initial_data.buffer) < v:
            return False

        lost_data = len(self.last_data.buffer) - \
            len(initial_data.buffer)

        # If this did not in fact cause the data size to shrink we
        # bail here because it's not worth trying to delete stuff from
        # the remainder.
        if lost_data <= 0:
            return False

        try_with_deleted = bytearray(initial_attempt)
        del try_with_deleted[v:v + lost_data]
        try_with_deleted.extend(hbytes(lost_data - 1))
        if self.incorporate_new_buffer(try_with_deleted):
            return True

        for r, s in self.last_data.intervals:
            if (
                r >= v and
                s - r <= lost_data and
                r < len(initial_data.buffer)
            ):
                try_with_deleted = bytearray(initial_attempt)
                del try_with_deleted[r:s]
                try_with_deleted.extend(hbytes(s - r - 1))
                if self.incorporate_new_buffer(try_with_deleted):
                    return True
        return False
def test_discarding_iterates_to_fixed_point():
    @shrinking_from(hbytes([1] * 10) + hbytes([0]))
    def shrinker(data):
        data.start_example(0)
        data.draw_bits(1)
        data.stop_example(discard=True)
        while data.draw_bits(1):
            pass
        data.mark_interesting()
    shrinker.run_shrink_pass('remove_discarded')
    assert list(shrinker.buffer) == [1, 0]
def test_zero_examples_is_adaptive():
    @shrinking_from(hbytes([1]) * 1001)
    def shrinker(data):
        for _ in hrange(1000):
            data.draw_bits(1)
        if data.draw_bits(1):
            data.mark_interesting()

    shrinker.fixate_shrink_passes(["zero_examples"])

    assert shrinker.shrink_target.buffer == hbytes(1000) + hbytes([1])
    assert shrinker.calls <= 60
def test_block_programs_are_adaptive():
    @shrinking_from(hbytes(1000) + hbytes([1]))
    def shrinker(data):
        while not data.draw_bits(1):
            pass
        data.mark_interesting()

    p = shrinker.add_new_pass(block_program("X"))
    shrinker.fixate_shrink_passes([p.name])

    assert len(shrinker.shrink_target.buffer) == 1
    assert shrinker.calls <= 60
Beispiel #21
0
 def _shrink_index(self, i, c):
     assert isinstance(self.current, hbytes)
     assert 0 <= i < self.size
     if self.current[i] <= c:
         return False
     if self.incorporate(self.current[:i] + hbytes([c]) + self.current[i + 1 :]):
         return True
     if i == self.size - 1:
         return False
     return self.incorporate(self.current[:i] + hbytes([c, 255]) + self.current[i + 2 :]) or self.incorporate(
         self.current[:i] + hbytes([c]) + hbytes([255] * (self.size - i - 1))
     )
def test_can_remove_discarded_data():
    @shrinking_from(hbytes([0] * 10) + hbytes([11]))
    def shrinker(data):
        while True:
            data.start_example(SOME_LABEL)
            b = data.draw_bits(8)
            data.stop_example(discard=(b == 0))
            if b == 11:
                break
        data.mark_interesting()
    shrinker.run_shrink_pass('remove_discarded')
    assert list(shrinker.buffer) == [11]
def test_shrinking_from_mostly_zero(monkeypatch):
    monkeypatch.setattr(
        ConjectureRunner, 'generate_new_examples',
        lambda self: self.cached_test_function(hbytes(5) + hbytes([2]))
    )

    @run_to_buffer
    def x(data):
        s = [data.draw_bits(8) for _ in hrange(6)]
        if any(s):
            data.mark_interesting()

    assert x == hbytes(5) + hbytes([1])
def test_shrinks_both_interesting_examples(monkeypatch):
    def generate_new_examples(self):
        self.test_function(ConjectureData.for_buffer(hbytes([1])))

    monkeypatch.setattr(
        ConjectureRunner, 'generate_new_examples', generate_new_examples)

    def f(data):
        n = data.draw_bits(8)
        data.mark_interesting(n & 1)
    runner = ConjectureRunner(f, database_key=b'key')
    runner.run()
    assert runner.interesting_examples[0].buffer == hbytes([0])
    assert runner.interesting_examples[1].buffer == hbytes([1])
Beispiel #25
0
    def run(self):
        if not any(self.current):
            return
        if self.incorporate(hbytes(self.size)):
            return
        change_counter = -1
        while self.current and change_counter < self.changes:
            change_counter = self.changes
            for c in hrange(max(self.current)):
                if self.incorporate(
                    hbytes(min(b, c) for b in self.current)
                ):
                    break

            for c in sorted(set(self.current), reverse=True):
                for d in hrange(c):
                    if self.incorporate(
                        hbytes(d if b == c else b for b in self.current)
                    ):
                        break

            for c in hrange(max(self.current)):
                k = len(self.current) // 2
                while k > 0:
                    i = 0
                    while i + k <= len(self.current):
                        self.incorporate(
                            self.current[:i] +
                            hbytes(min(b, c) for b in self.current[i:i + k]) +
                            self.current[i + k:]
                        )
                        i += k
                    k //= 2

            if change_counter != self.changes or self.cautious:
                continue

            for i in hrange(self.size):
                t = self.current[i]
                if t > 0:
                    ss = small_shrinks[self.current[i]]
                    for c in ss:
                        if self._shrink_index(i, c):
                            for c in hrange(self.current[i]):
                                if c in ss:
                                    continue
                                if self._shrink_index(i, c):
                                    break
                            break
def test_avoids_zig_zag_trap(p):
    b, marker, lower_bound = p

    random.seed(0)

    b = hbytes(b)
    marker = hbytes(marker)

    n_bits = 8 * (len(b) + 1)

    def test_function(data):
        m = data.draw_bits(n_bits)
        if m < lower_bound:
            data.mark_invalid()
        n = data.draw_bits(n_bits)
        if data.draw_bytes(len(marker)) != marker:
            data.mark_invalid()
        if abs(m - n) == 1:
            data.mark_interesting()

    runner = ConjectureRunner(
        test_function, database_key=None, settings=settings(
            base_settings,
            phases=(Phase.generate, Phase.shrink)
        )
    )

    runner.test_function(ConjectureData.for_buffer(
        b + hbytes([0]) + b + hbytes([1]) + marker))

    assert runner.interesting_examples

    runner.run()

    v, = runner.interesting_examples.values()

    data = ConjectureData.for_buffer(v.buffer)

    m = data.draw_bits(n_bits)
    n = data.draw_bits(n_bits)
    assert m == lower_bound
    if m == 0:
        assert n == 1
    else:
        assert n == m - 1

    budget = 2 * n_bits * ceil(log(n_bits, 2)) + 2

    assert runner.shrinks <= budget
def test_cached_with_masked_byte_agrees_with_results(byte_a, byte_b):
    def f(data):
        data.draw_bits(2)

    runner = ConjectureRunner(f)

    cached_a = runner.cached_test_function(hbytes([byte_a]))
    cached_b = runner.cached_test_function(hbytes([byte_b]))

    data_b = ConjectureData.for_buffer(hbytes([byte_b]))
    runner.test_function(data_b)

    # If the cache found an old result, then it should match the real result.
    # If it did not, then it must be because A and B were different.
    assert (cached_a is cached_b) == (cached_a.buffer == data_b.buffer)
def test_can_pass_to_an_indirect_descendant(monkeypatch):
    initial = hbytes([
        1, 10,
        0, 0,
        1, 0,
        0, 10,
        0, 0,
    ])

    monkeypatch.setattr(
        ConjectureRunner, 'generate_new_examples',
        lambda runner: runner.cached_test_function(initial))

    monkeypatch.setattr(Shrinker, 'shrink', Shrinker.pass_to_descendant)

    def tree(data):
        data.start_example(1)
        n = data.draw_bits(1)
        label = data.draw_bits(8)
        if n:
            tree(data)
            tree(data)
        data.stop_example(1)
        return label

    @run_to_buffer
    def x(data):
        if tree(data) == 10:
            data.mark_interesting()

    assert list(x) == [0, 10]
def test_non_cloneable_intervals():
    @run_to_buffer
    def x(data):
        data.draw_bytes(10)
        data.draw_bytes(9)
        data.mark_interesting()
    assert x == hbytes(19)
 def f(data):
     if prefix:
         data.write(hbytes(prefix))
         assert len(data.buffer) == len(prefix)
     k = data.draw_bits(bits)
     assert k not in seen
     seen.add(k)
Beispiel #31
0
 def f(data):
     x = data.draw_bytes(10)
     if sum(x) >= 2000 and len(seen) < n:
         seen.add(hbytes(x))
     if hbytes(x) in seen:
         data.mark_interesting()
 def shrinker(data):
     tree(data)
     if hbytes(data.buffer) in good:
         data.mark_interesting()
Beispiel #33
0
 def gen(self):
     data = ConjectureData.for_buffer(hbytes(10))
     self.test_function(data)
     # Would normally be added by minimize_individual_blocks, but we skip
     # that phase in this test.
     data.shrinking_blocks.add(0)
 def draw_bytes(data, n):
     return hbytes([255] * n)
Beispiel #35
0
 def draw_zero(data, n):
     return hbytes(b'\0' * n)
Beispiel #36
0
 def shift_right(mid):
     if mid == 0:
         return True
     if mid == self.size:
         return False
     return self.incorporate(hbytes(mid) + base[:-mid])
Beispiel #37
0
 def replace(b):
     return hbytes(EMPTY_BYTES.join(
         hbytes(b if c == block else c) for c in parts
     ))
 def generate_new_examples(self):
     self.test_function(ConjectureData.for_buffer(hbytes(reversed(target))))
def zero_dist(random, n):
    return hbytes(n)
 def f(data):
     data.write(hbytes(1))
     n = data.draw_bits(3)
     assert n not in seen
     seen.add(n)
 def generate_new_examples(self):
     self.test_function(ConjectureData.for_buffer(hbytes([1])))
 def f(data):
     if data.draw_bits(1):
         data.draw_bytes(5)
         data.write(hbytes(buf))
         assert hbytes(data.buffer[-len(buf):]) == buf
 def gen(self):
     data = ConjectureData.for_buffer(hbytes(10))
     self.test_function(data)
 def f(data):
     seen.add(hbytes(data.draw_bytes(32)))
     data.mark_interesting()
 def __init__(self, initial, condition, random):
     self.current = hbytes(initial)
     self.size = len(self.current)
     self.condition = condition
     self.random = random
     self.changes = 0
Beispiel #46
0
def test_can_mark_interesting():
    x = ConjectureData.for_buffer(hbytes())
    with pytest.raises(StopTest):
        x.mark_interesting()
    assert x.frozen
    assert x.status == Status.INTERESTING
Beispiel #47
0
def has_a_non_zero_byte(x):
    return any(hbytes(x))
Beispiel #48
0
 def draw_bytes(data, n):
     result = buffer[data.index:data.index + n]
     if len(result) < n:
         result += hbytes(n - len(result))
     return self.__rewrite(data, result)
Beispiel #49
0
 def zero_prefix(mid):
     return self.incorporate(hbytes(mid) + base[mid:])
Beispiel #50
0
    def __rewrite_for_novelty(self, data, result):
        """Take a block that is about to be added to data as the result of a
        draw_bytes call and rewrite it a small amount to ensure that the result
        will be novel: that is, not hit a part of the tree that we have fully
        explored.

        This is mostly useful for test functions which draw a small
        number of blocks.

        """
        assert isinstance(result, hbytes)
        try:
            node_index = data.__current_node_index
        except AttributeError:
            node_index = 0
            data.__current_node_index = node_index
            data.__hit_novelty = False
            data.__evaluated_to = 0

        if data.__hit_novelty:
            return result

        node = self.tree[node_index]

        for i in hrange(data.__evaluated_to, len(data.buffer)):
            node = self.tree[node_index]
            try:
                node_index = node[data.buffer[i]]
                assert node_index not in self.dead
                node = self.tree[node_index]
            except KeyError:
                data.__hit_novelty = True
                return result

        for i, b in enumerate(result):
            assert isinstance(b, int)
            try:
                new_node_index = node[b]
            except KeyError:
                data.__hit_novelty = True
                return result

            new_node = self.tree[new_node_index]

            if new_node_index in self.dead:
                if isinstance(result, hbytes):
                    result = bytearray(result)
                for c in range(256):
                    if c not in node:
                        assert c <= self.capped.get(node_index, c)
                        result[i] = c
                        data.__hit_novelty = True
                        return hbytes(result)
                    else:
                        new_node_index = node[c]
                        new_node = self.tree[new_node_index]
                        if new_node_index not in self.dead:
                            result[i] = c
                            break
                else:  # pragma: no cover
                    assert False, (
                        'Found a tree node which is live despite all its '
                        'children being dead.')
            node_index = new_node_index
            node = new_node
        assert node_index not in self.dead
        data.__current_node_index = node_index
        data.__evaluated_to = data.index + len(result)
        return hbytes(result)
Beispiel #51
0
 def for_buffer(self, buffer):
     return ConjectureData(
         max_length=len(buffer),
         draw_bytes=lambda data, n, distribution:
         hbytes(buffer[data.index:data.index + n])
     )
Beispiel #52
0
def test_drawing_zero_bits_is_free():
    x = ConjectureData.for_buffer(hbytes())
    assert x.draw_bits(0) == 0
Beispiel #53
0
    def _run(self):
        self.last_data = None
        mutations = 0
        start_time = time.time()

        self.reuse_existing_examples()

        if (
            Phase.generate in self.settings.phases and not
            self.__tree_is_exhausted()
        ):
            if (
                self.last_data is None or
                self.last_data.status < Status.INTERESTING
            ):
                self.new_buffer()

            mutator = self._new_mutator()

            zero_bound_queue = []

            while (
                self.last_data.status != Status.INTERESTING and
                not self.__tree_is_exhausted()
            ):
                if self.valid_examples >= self.settings.max_examples:
                    self.exit_reason = ExitReason.max_examples
                    return
                if self.call_count >= max(
                    self.settings.max_iterations, self.settings.max_examples
                ):
                    self.exit_reason = ExitReason.max_iterations
                    return
                if (
                    self.settings.timeout > 0 and
                    time.time() >= start_time + self.settings.timeout
                ):
                    self.exit_reason = ExitReason.timeout
                    return
                if zero_bound_queue:
                    # Whenever we generated an example and it hits a bound
                    # which forces zero blocks into it, this creates a weird
                    # distortion effect by making certain parts of the data
                    # stream (especially ones to the right) much more likely
                    # to be zero. We fix this by redistributing the generated
                    # data by shuffling it randomly. This results in the
                    # zero data being spread evenly throughout the buffer.
                    # Hopefully the shrinking this causes will cause us to
                    # naturally fail to hit the bound.
                    # If it doesn't then we will queue the new version up again
                    # (now with more zeros) and try again.
                    overdrawn = zero_bound_queue.pop()
                    buffer = bytearray(overdrawn.buffer)

                    # These will have values written to them that are different
                    # from what's in them anyway, so the value there doesn't
                    # really "count" for distributional purposes, and if we
                    # leave them in then they can cause the fraction of non
                    # zero bytes to increase on redraw instead of decrease.
                    for i in overdrawn.forced_indices:
                        buffer[i] = 0

                    self.random.shuffle(buffer)
                    buffer = hbytes(buffer)

                    if buffer == overdrawn.buffer:
                        continue

                    def draw_bytes(data, n):
                        result = buffer[data.index:data.index + n]
                        if len(result) < n:
                            result += hbytes(n - len(result))
                        return self.__rewrite(data, result)

                    data = ConjectureData(
                        draw_bytes=draw_bytes,
                        max_length=self.settings.buffer_size,
                    )
                    self.test_function(data)
                    data.freeze()
                elif mutations >= self.settings.max_mutations:
                    mutations = 0
                    data = self.new_buffer()
                    mutator = self._new_mutator()
                else:
                    data = ConjectureData(
                        draw_bytes=mutator,
                        max_length=self.settings.buffer_size
                    )
                    self.test_function(data)
                    data.freeze()
                    prev_data = self.last_data
                    if self.consider_new_test_data(data):
                        self.last_data = data
                        if data.status > prev_data.status:
                            mutations = 0
                    else:
                        mutator = self._new_mutator()
                if getattr(data, 'hit_zero_bound', False):
                    zero_bound_queue.append(data)
                mutations += 1

        if self.__tree_is_exhausted():
            self.exit_reason = ExitReason.finished
            return

        data = self.last_data
        if data is None:
            self.exit_reason = ExitReason.finished
            return
        assert isinstance(data.output, text_type)

        if Phase.shrink not in self.settings.phases:
            self.exit_reason = ExitReason.finished
            return

        data = ConjectureData.for_buffer(self.last_data.buffer)
        self.test_function(data)
        if data.status != Status.INTERESTING:
            self.exit_reason = ExitReason.flaky
            return

        while len(self.shrunk_examples) < len(self.interesting_examples):
            target, d = min([
                (k, v) for k, v in self.interesting_examples.items()
                if k not in self.shrunk_examples],
                key=lambda kv: (sort_key(kv[1].buffer), sort_key(repr(kv[0]))),
            )
            self.debug('Shrinking %r' % (target,))
            self.last_data = d
            assert self.last_data.interesting_origin == target
            self.shrink()
            self.shrunk_examples.add(target)
Beispiel #54
0
def write_float(data, f):
    data.write(int_to_bytes(float_to_lex(abs(f)), 8))
    sign = float_to_int(f) >> 63
    data.write(hbytes([sign]))
Beispiel #55
0
 def draw_max(data, n):
     return hbytes([255]) * n
def test_shrink_to_zero():
    assert minimize(hbytes([255] * 8), lambda x: True,
                    random=Random(0)) == hbytes(8)
Beispiel #57
0
 def x(data):
     data.write(hbytes(b"\x01\x02"))
     data.write(hbytes(b"\x01\x00"))
     v = data.draw_bits(41)
     if v >= 512 or v == 254:
         data.mark_interesting()
Beispiel #58
0
 def x(data):
     seen = set()
     while len(seen) < n:
         seen.add(hbytes(data.draw_bytes(1)))
     data.mark_interesting()
 def x(data):
     for _ in hrange(len(target)):
         data.draw_bits(8)
     if hbytes(data.buffer) == target:
         data.mark_interesting()
Beispiel #60
0
def test_can_mark_invalid():
    x = ConjectureData.for_buffer(hbytes())
    with pytest.raises(StopTest):
        x.mark_invalid()
    assert x.frozen
    assert x.status == Status.INVALID