class DepthMachine(RuleBasedStateMachine): charges = Bundle('charges') @rule(targets=(charges, ), child=charges) @rule(targets=(charges, ), child=none()) def charge(self, child): return DepthCharge(child) @rule(check=charges) def is_not_too_deep(self, check): assert check.depth < 3
def init_bundles(schema: "BaseOpenAPISchema") -> Dict[str, CaseInsensitiveDict]: """Create bundles for all endpoints in the given schema. Each endpoint has a bundle that stores all responses from that endpoint. We need to create bundles first, so they can be referred when building connections between endpoints. """ output: Dict[str, CaseInsensitiveDict] = {} for endpoint in schema.get_all_endpoints(): output.setdefault(endpoint.path, CaseInsensitiveDict()) output[endpoint.path][endpoint.method.upper()] = Bundle(endpoint.verbose_name) # type: ignore return output
class QueryManagerMachine(RuleBasedStateMachine): """Stateful tests for the Query Manager class""" fields = Bundle('fields') criteria = Bundle('criteria') def __init__(self): super(QueryManagerMachine, self).__init__() self.config = { 'db': 'test.db', 'fields': [], 'filters': [], 'table': 'Customers', 'max_export_rows': 500000, 'max_display_rows': 1000, 'order_by': '' } self.query_manager = QueryManager(self.config) @rule(target=fields, field=FieldStrategy) def add_field(self, field): assume(field.name not in self.query_manager.headers) # must be unique assume(valid_sql_field_name(field.name)) self.config.get('fields').append([field.name, field.type]) @rule(field=fields, value=text()) def add_criteria(self, field, value): assume(field) assume(field.name) random_op = random_operator(field) self.query_manager.add_criteria(field=field, value=value, operator=random_op) @rule() def query_is_valid(self): assume(self.query_manager.fields) fieldlist = [fld.name for fld in self.query_manager.fields.values()] note('field list: {}'.format(list(fieldlist))) note('sql display: {}'.format(self.query_manager.sql_display)) assert valid_sql(stmt=self.query_manager.sql_display)
class DatabaseComparison(RuleBasedStateMachine): def __init__(self): super(DatabaseComparison, self).__init__() writer.writerow(['init_state']) self.tempd = tempfile.mkdtemp() self.database = DirectoryBasedExampleDatabase(self.tempd) self.model = defaultdict(set) keys = Bundle("keys") values = Bundle("values") @rule(target=keys, k=st.binary()) def add_key(self, k): writer.writerow(['add_key']) return k @rule(target=values, v=st.binary()) def add_value(self, v): writer.writerow(['add_value']) return v @rule(k=keys, v=values) def save(self, k, v): writer.writerow(['save']) self.model[k].add(v) self.database.save(k, v) @rule(k=keys, v=values) def delete(self, k, v): writer.writerow(['delete']) self.model[k].discard(v) # outcomment this to see it working self.database.delete(k, v) @rule(k=keys) def values_agree(self, k): assert set(self.database.fetch(k)) == self.model[k] def teardown(self): writer.writerow(['teardown']) shutil.rmtree(self.tempd)
class DatabaseComparison(RuleBasedStateMachine): def __init__(self): super(DatabaseComparison, self).__init__() self.database = Graph() self.database.bind("foaf", FOAF) self.model = defaultdict() keys = Bundle('keys') values = Bundle('values') _invalid_uri_chars = '<>" {}|\\^`' @rule(target=keys, k=st.text(st.characters(max_codepoint=1000, blacklist_characters=_invalid_uri_chars), min_size=1)) def add_key(self, k): return k @rule(target=values, v=st.integers()) def add_value(self, v): return v @rule(k=keys, v=values) def save(self, k, v): self.model[k] = v node = URIRef("http://example.org/people/%s" % k) self.database.set((node, FOAF.value, Literal(v))) @rule(k=keys) def delete(self, k): self.model[k] = None node = URIRef("http://example.org/people/%s" % k) self.database.remove((node, FOAF.value, None)) @rule(k=keys) def values_agree(self, k): node = URIRef("http://example.org/people/%s" % k) assert str(self.database.value(node, FOAF.value)) == str(self.model.get(k))
class AutoFSMTest(RuleBasedStateMachine): def __init__(self): super().__init__() self.auto = TransmissionSystem() rpms = Bundle('rpms') rpm_sets = Bundle('rpm_sets') @rule(target=rpms, rpm=st.integers(min_value=0)) def add_rpm(self, rpm): return rpm @rule(target=rpm_sets, rpms=st.lists(st.integers(min_value=0))) def add_rpms(self, rpms): return rpms ## These methods exercise the step and run methods of ## TransmissionSystem, as possible intervening actions between ## test assertions @rule(rpm=consumes(rpms)) def step(self, rpm): self.auto.step(rpm) @rule(rpms=consumes(rpm_sets)) def run(self, rpms): self.auto.run(rpms) # These are the test methods that assert facts about the state machine @invariant() def state_is_always_a_gear_state(self): assert isinstance(self.auto.state, GearState) @precondition(lambda self: isinstance(self.auto.state, Neutral)) @rule(rpm=consumes(rpms)) def step_from_neutral_must_be_neutral_or_first(self, rpm): """Given Neutral state, then next state must be Neutral or FirstGear""" self.auto.step(rpm) state = self.auto.state assert isinstance(state, Neutral) or isinstance(state, FirstGear)
class DepthMachine(RuleBasedStateMachine): charges = Bundle(u'charges') # double-rule is deprecated with Settings(strict=False): @rule(targets=(charges, ), child=charges) @rule(targets=(charges, ), child=none()) def charge(self, child): return DepthCharge(child) @rule(check=charges) def is_not_too_deep(self, check): assert check.depth < 3
class GitlabStateful(RuleBasedStateMachine): def __init__(self): super().__init__() self._gitlab = GitlabAPI() self._gitlab.prepare() created_users = Bundle("users") @rule(target=created_users, user=users()) def create_new_user(self, user: User): """Create new user from generated user. TODO: Should this check to not accidentally create the same user as before? """ # Perform operation on real system self._gitlab.create_user(user) # Return value store it into bundle return user @rule(user=created_users) @expect_exception(GitlabException) def create_existing_user(self, user: User): """Test creating an existing user, should raise an exception. User is drawn from the `created_users` bundle, guaranteeing it has been created. """ self._gitlab.create_user(user) @rule(user=created_users) def get_existing_user(self, user: User): """Test fetching an existing user, as post-condition both model and states should agree. """ fetched_user = self._gitlab.fetch_user(user.uid) assert fetched_user == user @rule(user=users()) def get_non_existing_user(self, user: User): """Test fetching an non-existing user, should return None. """ fetched_user = self._gitlab.fetch_user(user.uid) assert fetched_user is None @rule(user=consumes(created_users)) def delete_user(self, user: User): """Test deleting an existing user. Consumes user from the created users bundle. """ self._gitlab.delete_user(user.uid)
class ManagerTest(RuleBasedStateMachine): boxes = Bundle("boxes") def __init__(self): super().__init__() self.manager = box_manager.BoxManager() self.current_boxes = set() @rule(target=boxes, new_boxes=BOXES_ST) def add_boxes(self, new_boxes): for b in new_boxes: self.manager.register(b) self.current_boxes.add(b) assert b.manager is not None return multiple(*new_boxes) @rule(to_remove=consumes(boxes)) def remove_box(self, to_remove): self.manager.remove(to_remove) assert to_remove in self.current_boxes self.current_boxes.remove(to_remove) assert to_remove.manager is None if to_remove.stationary: assert not self.manager.stationary_cache_valid @rule( box=boxes, new_x=st.floats(min_value=-100.0, max_value=100.0), new_y=st.floats(min_value=-100.0, max_value=100.0), ) def move_box(self, box, new_x, new_y): box.move(new_x, new_y) assert box.manager is not None if box.stationary: assert not self.manager.stationary_cache_valid @invariant() def check_box_collisions(self): all_boxes = list(self.current_boxes) good = list(base_algorithms.check_deduplicated(all_boxes)) # We run this one twice, so that the first one has a chance to cache the stationary boxes. # Then we check all 3 of them. unknown_precached = list(self.manager.yield_collisions()) unknown_cached = list(self.manager.yield_collisions()) good_s = make_deterministic_set(good) unknown_precached_s = make_deterministic_set(unknown_precached) unknown_cached_s = make_deterministic_set(unknown_cached) assert len(good_s.intersection(unknown_precached_s)) == len(good_s) assert len(good_s.intersection(unknown_cached_s)) == len(good_s)
def init_bundles( schema: "BaseOpenAPISchema") -> Dict[str, CaseInsensitiveDict]: """Create bundles for all operations in the given schema. Each API operation has a bundle that stores all responses from that operation. We need to create bundles first, so they can be referred when building connections between operations. """ output: Dict[str, CaseInsensitiveDict] = {} for result in schema.get_all_operations(): if isinstance(result, Ok): operation = result.ok() output.setdefault(operation.path, CaseInsensitiveDict()) output[operation.path][operation.method.upper()] = Bundle( operation.verbose_name) # type: ignore return output
class RoseTreeStateMachine(RuleBasedStateMachine): nodes = Bundle('nodes') @rule(target=nodes, source=lists(nodes)) def bunch(self, source): return source @rule(source=nodes) def shallow(self, source): def d(ls): if not ls: return 0 else: return 1 + max(map(d, ls)) assert d(source) <= 5
class MachineUsingMultiple(RuleBasedStateMachine): b = Bundle("b") def __init__(self): self.expected_bundle_length = 0 super(MachineUsingMultiple, self).__init__() @invariant() def bundle_length(self): assert len(self.bundle("b")) == self.expected_bundle_length @rule(target=b, items=lists(elements=integers(), max_size=10)) def populate_bundle(self, items): self.expected_bundle_length += len(items) return multiple(*items) @rule(target=b) def do_not_populate(self): return multiple()
class NotTheLastMachine(RuleBasedStateMachine): stuff = Bundle('stuff') def __init__(self): super(NotTheLastMachine, self).__init__() self.last = None self.bye_called = False @rule(target=stuff) def hi(self): result = object() self.last = result return result @precondition(lambda self: not self.bye_called) @rule(v=stuff) def bye(self, v): assert v == self.last self.bye_called = True
class HeapMachine(RuleBasedStateMachine): Heaps = Bundle('heaps') @rule(target=Heaps) def newheap(self): return [] @rule(heap=Heaps, value=integers()) def push(self, heap, value): heappush(heap, value) @rule(heap=Heaps.filter(bool)) def pop(self, heap): correct = min(heap) result = heappop(heap) assert correct == result @rule(target=Heaps, heap1=Heaps, heap2=Heaps) def merge(self, heap1, heap2): return heapmerge(heap1, heap2)
class FiniteStateMachine(RuleBasedStateMachine): output = Bundle("output") def __init__(self): super().__init__() self.concrete_fsm = create_concrete_fsm_partial() self.reference_fsm = create_reference_fsm_partial() @rule(target=output, input=st.sampled_from(inputs)) def tick(self, input): concrete_output = self.concrete_fsm.tick(input) reference_output = self.reference_fsm.tick(input) return reference_output, concrete_output @rule(output=output) def check_output(self, output): reference_output, concrete_output = output assert ( reference_output == concrete_output ), f"State machine produce wrong output, expected: {reference_output}, got: {concrete_output}"
class InMemoryUserRepositoryFSM(RuleBasedStateMachine): def __init__(self): super(InMemoryUserRepositoryFSM, self).__init__() self.repository = InMemoryUserRepository() self.model = dict() users = Bundle('users') @rule(target=users, user=user_gen) def add_user(self, user): return user @rule(user=users) def save(self, user: User): self.model[user.id] = user self.repository.save(user) @rule(user=users) def get(self, user): assert self.repository.get(user.id) == self.model.get(user.id)
class HeapMachine(RuleBasedStateMachine): Heaps = Bundle('heaps') @rule(target=Heaps) def newheap(self): return BinaryHeap() @rule(heap=Heaps, value=integers()) def push(self, heap, value): heap.Ajout(value) @rule(heap=Heaps.filter(lambda self: self.estVide() != True)) def pop(self, heap): correct = heap.getMin() result = heap.SupMin() assert correct == result @rule(target=Heaps, heap1=Heaps, heap2=Heaps) def merge(self, heap1, heap2): res = Union(heap1, heap2) assert res.getTaille() == heap1.getTaille() + heap2.getTaille() return res
class StrategyStateMachine(RuleBasedStateMachine): def __init__(self): super(StrategyStateMachine, self).__init__() self.model = set() self.agenda = Agenda() self.strategy = strategy() self.fss = set() activations = Bundle("activations") @rule(target=activations, r=st.integers(min_value=0), fs=st.sets(st.integers(min_value=0), min_size=1)) def declare(self, r, fs): assume((r, frozenset(fs)) not in self.fss) self.fss.add((r, frozenset(fs))) fs = [Fact(i, __factid__=i) for i in fs] act = Activation(Rule(Fact(r)), facts=tuple(fs)) # Update agenda self.strategy.update_agenda(self.agenda, [act], []) # Update model self.model |= set([act]) return act @rule(act=consumes(activations)) def retract(self, act): # Update agenda self.strategy.update_agenda(self.agenda, [], [act]) # Update model self.model -= set([act]) @invariant() def values_agree(self): assert set(self.agenda.activations) == self.model
class FactoryMachine(RuleBasedStateMachine): meta_list = Bundle("meta_list") @initialize(cache=st.one_of(dict()), always_cache=st.booleans()) def build_factory(self, cache, always_cache=False): # TODO: add more cache types self.factory = SequenceFactory.from_cache(cache, session=SESSION, always_cache=always_cache) self.registry = Registry.from_factory(self.factory) @rule( target=meta_list, key=st.one_of(random_ids(), random_names()), check_name=st.booleans(), ) def load_meta(self, key, check_name): return self.factory.load_meta(key, check_name=check_name) @invariant() def caches_match(self): assert self.factory.cache == self.registry.cache
class SLPRules(RuleBasedStateMachine): SLPs = Bundle('slps') @rule(target=SLPs, items=st.lists(elements=st.integers())) def newslp(self, items): return curver.kernel.StraightLineProgram(items) @rule(target=SLPs, slp=SLPs) def copy(self, slp): copy = curver.kernel.StraightLineProgram(slp) assert list(copy) == list(slp) return copy @rule(target=SLPs, slp1=SLPs, slp2=SLPs) def add(self, slp1, slp2): added = slp1 + slp2 assert list(added) == list(slp1) + list(slp2) return added @rule(slp=SLPs, factor=st.integers(min_value=0, max_value=1000)) def multiply(self, slp, factor): assert list(slp * factor) == list(slp) * factor assert list(factor * slp) == list(slp * factor) @rule(data=st.data()) def getitem(self, data): slp = data.draw(self.SLPs.filter(lambda s: len(s) > 0)) # Non-empty. index = data.draw(st.integers(min_value=0, max_value=len(slp) - 1)) assert list(slp)[index] == slp[index] @rule(target=SLPs, slp=SLPs) def reverse(self, slp): rev = slp.reverse() assert list(rev) == list(slp)[::-1] assert list(reversed(slp)) == list(slp)[::-1] return rev
class FolderOperationsStateMachine(RuleBasedStateMachine): Files = Bundle("file") Folders = Bundle("folder") # Moving mountpoint NonRootFolder = Folders.filter(lambda x: not x.is_workspace()) @initialize(target=Folders) def init(self): nonlocal tentative tentative += 1 caplog.clear() async def _bootstrap(user_fs, mountpoint_manager): wid = await user_fs.workspace_create("w") self.parsec_root = await mountpoint_manager.mount_workspace(wid ) self.mountpoint_service = mountpoint_service_factory(_bootstrap) self.folder_oracle = Path(tmpdir / f"oracle-test-{tentative}") self.folder_oracle.mkdir() oracle_root = self.folder_oracle / "root" oracle_root.mkdir() self.folder_oracle.chmod( 0o500) # Root oracle can no longer be removed this way (oracle_root / "w").mkdir() oracle_root.chmod(0o500) # Also protect workspace from deletion return PathElement(f"/", self.parsec_root, oracle_root / "w") def teardown(self): if hasattr(self, "mountpoint_service"): self.mountpoint_service.stop() @rule(target=Files, parent=Folders, name=st_entry_name) def touch(self, parent, name): path = parent / name expected_exc = None try: path.to_oracle().touch(exist_ok=False) except OSError as exc: expected_exc = exc with expect_raises(expected_exc): path.to_parsec().touch(exist_ok=False) return path @rule(target=Folders, parent=Folders, name=st_entry_name) def mkdir(self, parent, name): path = parent / name expected_exc = None try: path.to_oracle().mkdir(exist_ok=False) except OSError as exc: expected_exc = exc with expect_raises(expected_exc): path.to_parsec().mkdir(exist_ok=False) return path @rule(path=Files) def unlink(self, path): expected_exc = None try: path.to_oracle().unlink() except OSError as exc: expected_exc = exc with expect_raises(expected_exc): path.to_parsec().unlink() @rule(path=Files, length=st.integers(min_value=0, max_value=16)) def resize(self, path, length): expected_exc = None try: os.truncate(path.to_oracle(), length) except OSError as exc: expected_exc = exc with expect_raises(expected_exc): os.truncate(path.to_parsec(), length) @rule(path=NonRootFolder) def rmdir(self, path): expected_exc = None try: path.to_oracle().rmdir() except OSError as exc: expected_exc = exc with expect_raises(expected_exc): path.to_parsec().rmdir() def _move(self, src, dst_parent, dst_name): dst = dst_parent / dst_name expected_exc = None try: oracle_rename(src.to_oracle(), dst.to_oracle()) except OSError as exc: expected_exc = exc with expect_raises(expected_exc): src.to_parsec().rename(str(dst.to_parsec())) return dst @rule(target=Files, src=Files, dst_parent=Folders, dst_name=st_entry_name) def move_file(self, src, dst_parent, dst_name): return self._move(src, dst_parent, dst_name) @rule(target=Folders, src=NonRootFolder, dst_parent=Folders, dst_name=st_entry_name) def move_folder(self, src, dst_parent, dst_name): return self._move(src, dst_parent, dst_name) @rule(path=Folders) def iterdir(self, path): expected_exc = None try: expected_children = { x.name for x in path.to_oracle().iterdir() } except OSError as exc: expected_exc = exc with expect_raises(expected_exc): children = {x.name for x in path.to_parsec().iterdir()} if not expected_exc: assert children == expected_children
class DynamicMachine(RuleBasedStateMachine): @rule(value=Bundle(u'hi')) def test_stuff(x): pass
class NonTerminalMachine(RuleBasedStateMachine): @rule(value=Bundle(u'hi')) def bye(self, hi): pass
DynamicMachine.define_rule(targets=(), function=lambda self: 1, arguments={}) class IntAdder(RuleBasedStateMachine): pass IntAdder.define_rule(targets=(u'ints', ), function=lambda self, x: x, arguments={u'x': integers()}) IntAdder.define_rule(targets=(u'ints', ), function=lambda self, x, y: x, arguments={ u'x': integers(), u'y': Bundle(u'ints'), }) class ChoosingMachine(GenericStateMachine): def steps(self): return choices() def execute_step(self, choices): choices([1, 2, 3]) with Settings(max_examples=10): TestChoosingMachine = ChoosingMachine.TestCase TestGoodSets = GoodSet.TestCase TestGivenLike = GivenLikeStateMachine.TestCase
class verifyingstatemachine(RuleBasedStateMachine): """This defines the set of acceptable operations on a Mercurial repository using Hypothesis's RuleBasedStateMachine. The general concept is that we manage multiple repositories inside a repos/ directory in our temporary test location. Some of these are freshly inited, some are clones of the others. Our current working directory is always inside one of these repositories while the tests are running. Hypothesis then performs a series of operations against these repositories, including hg commands, generating contents and editing the .hgrc file. If these operations fail in unexpected ways or behave differently in different configurations of Mercurial, the test will fail and a minimized .t test file will be written to the hypothesis-generated directory to exhibit that failure. Operations are defined as methods with @rule() decorators. See the Hypothesis documentation at http://hypothesis.readthedocs.org/en/release/stateful.html for more details.""" # A bundle is a reusable collection of previously generated data which may # be provided as arguments to future operations. repos = Bundle('repos') paths = Bundle('paths') contents = Bundle('contents') branches = Bundle('branches') committimes = Bundle('committimes') def __init__(self): super(verifyingstatemachine, self).__init__() self.repodir = os.path.join(testtmp, "repos") if os.path.exists(self.repodir): shutil.rmtree(self.repodir) os.chdir(testtmp) self.log = [] self.failed = False self.configperrepo = {} self.all_extensions = set() self.non_skippable_extensions = set() self.mkdirp("repos") self.cd("repos") self.mkdirp("repo1") self.cd("repo1") self.hg("init") def teardown(self): """On teardown we clean up after ourselves as usual, but we also do some additional testing: We generate a .t file based on our test run using run-test.py -i to get the correct output. We then test it in a number of other configurations, verifying that each passes the same test.""" super(verifyingstatemachine, self).teardown() try: shutil.rmtree(self.repodir) except OSError: pass ttest = os.linesep.join(" " + l for l in self.log) os.chdir(testtmp) path = os.path.join(testtmp, "test-generated.t") with open(path, 'w') as o: o.write(ttest + os.linesep) with open(os.devnull, "w") as devnull: rewriter = subprocess.Popen( [runtests, "--local", "-i", path], stdin=subprocess.PIPE, stdout=devnull, stderr=devnull, ) rewriter.communicate("yes") with open(path, 'r') as i: ttest = i.read() e = None if not self.failed: try: output = subprocess.check_output( [runtests, path, "--local", "--pure"], stderr=subprocess.STDOUT) assert "Ran 1 test" in output, output for ext in (self.all_extensions - self.non_skippable_extensions): tf = os.path.join(testtmp, "test-generated-no-%s.t" % (ext, )) with open(tf, 'w') as o: for l in ttest.splitlines(): if l.startswith(" $ hg"): l = l.replace( "--config %s=" % (extensionconfigkey(ext), ), "") o.write(l + os.linesep) with open(tf, 'r') as r: t = r.read() assert ext not in t, t output = subprocess.check_output([ runtests, tf, "--local", ], stderr=subprocess.STDOUT) assert "Ran 1 test" in output, output except subprocess.CalledProcessError as e: note(e.output) if self.failed or e is not None: with open(savefile, "wb") as o: o.write(ttest) if e is not None: raise e def execute_step(self, step): try: return super(verifyingstatemachine, self).execute_step(step) except (HypothesisException, KeyboardInterrupt): raise except Exception: self.failed = True raise # Section: Basic commands. def mkdirp(self, path): if os.path.exists(path): return self.log.append("$ mkdir -p -- %s" % (pipes.quote(os.path.relpath(path)), )) os.makedirs(path) def cd(self, path): path = os.path.relpath(path) if path == ".": return os.chdir(path) self.log.append("$ cd -- %s" % (pipes.quote(path), )) def hg(self, *args): extra_flags = [] for key, value in self.config.items(): extra_flags.append("--config") extra_flags.append("%s=%s" % (key, value)) self.command("hg", *(tuple(extra_flags) + args)) def command(self, *args): self.log.append("$ " + ' '.join(map(pipes.quote, args))) subprocess.check_output(args, stderr=subprocess.STDOUT) # Section: Set up basic data # This section has no side effects but generates data that we will want # to use later. @rule(target=paths, source=st.lists(files, min_size=1).map(lambda l: os.path.join(*l))) def genpath(self, source): return source @rule(target=committimes, when=datetimes(min_year=1970, max_year=2038) | st.none()) def gentime(self, when): return when @rule(target=contents, content=st.one_of(st.binary(), st.text().map(lambda x: x.encode('utf-8')))) def gencontent(self, content): return content @rule( target=branches, name=safetext, ) def genbranch(self, name): return name @rule(target=paths, source=paths) def lowerpath(self, source): return source.lower() @rule(target=paths, source=paths) def upperpath(self, source): return source.upper() # Section: Basic path operations @rule(path=paths, content=contents) def writecontent(self, path, content): self.unadded_changes = True if os.path.isdir(path): return parent = os.path.dirname(path) if parent: try: self.mkdirp(parent) except OSError: # It may be the case that there is a regular file that has # previously been created that has the same name as an ancestor # of the current path. This will cause mkdirp to fail with this # error. We just turn this into a no-op in that case. return with open(path, 'wb') as o: o.write(content) self.log.append(("$ python -c 'import binascii; " "print(binascii.unhexlify(\"%s\"))' > %s") % ( binascii.hexlify(content), pipes.quote(path), )) @rule(path=paths) def addpath(self, path): if os.path.exists(path): self.hg("add", "--", path) @rule(path=paths) def forgetpath(self, path): if os.path.exists(path): with acceptableerrors("file is already untracked", ): self.hg("forget", "--", path) @rule(s=st.none() | st.integers(0, 100)) def addremove(self, s): args = ["addremove"] if s is not None: args.extend(["-s", str(s)]) self.hg(*args) @rule(path=paths) def removepath(self, path): if os.path.exists(path): with acceptableerrors( 'file is untracked', 'file has been marked for add', 'file is modified', ): self.hg("remove", "--", path) @rule( message=safetext, amend=st.booleans(), when=committimes, addremove=st.booleans(), secret=st.booleans(), close_branch=st.booleans(), ) def maybecommit(self, message, amend, when, addremove, secret, close_branch): command = ["commit"] errors = ["nothing changed"] if amend: errors.append("cannot amend public changesets") command.append("--amend") command.append("-m" + pipes.quote(message)) if secret: command.append("--secret") if close_branch: command.append("--close-branch") errors.append("can only close branch heads") if addremove: command.append("--addremove") if when is not None: if when.year == 1970: errors.append('negative date value') if when.year == 2038: errors.append('exceeds 32 bits') command.append("--date=%s" % (when.strftime('%Y-%m-%d %H:%M:%S %z'), )) with acceptableerrors(*errors): self.hg(*command) # Section: Repository management @property def currentrepo(self): return os.path.basename(os.getcwd()) @property def config(self): return self.configperrepo.setdefault(self.currentrepo, {}) @rule( target=repos, source=repos, name=reponames, ) def clone(self, source, name): if not os.path.exists(os.path.join("..", name)): self.cd("..") self.hg("clone", source, name) self.cd(name) return name @rule( target=repos, name=reponames, ) def fresh(self, name): if not os.path.exists(os.path.join("..", name)): self.cd("..") self.mkdirp(name) self.cd(name) self.hg("init") return name @rule(name=repos) def switch(self, name): self.cd(os.path.join("..", name)) assert self.currentrepo == name assert os.path.exists(".hg") @rule(target=repos) def origin(self): return "repo1" @rule() def pull(self, repo=repos): with acceptableerrors( "repository default not found", "repository is unrelated", ): self.hg("pull") @rule(newbranch=st.booleans()) def push(self, newbranch): with acceptableerrors( "default repository not configured", "no changes found", ): if newbranch: self.hg("push", "--new-branch") else: with acceptableerrors("creates new branches"): self.hg("push") # Section: Simple side effect free "check" operations @rule() def log(self): self.hg("log") @rule() def verify(self): self.hg("verify") @rule() def diff(self): self.hg("diff", "--nodates") @rule() def status(self): self.hg("status") @rule() def export(self): self.hg("export") # Section: Branch management @rule() def checkbranch(self): self.hg("branch") @rule(branch=branches) def switchbranch(self, branch): with acceptableerrors( 'cannot use an integer as a name', 'cannot be used in a name', 'a branch of the same name already exists', 'is reserved', ): self.hg("branch", "--", branch) @rule(branch=branches, clean=st.booleans()) def update(self, branch, clean): with acceptableerrors( 'unknown revision', 'parse error', ): if clean: self.hg("update", "-C", "--", branch) else: self.hg("update", "--", branch) # Section: Extension management def hasextension(self, extension): return extensionconfigkey(extension) in self.config def commandused(self, extension): assert extension in self.all_extensions self.non_skippable_extensions.add(extension) @rule(extension=extensions) def addextension(self, extension): self.all_extensions.add(extension) self.config[extensionconfigkey(extension)] = "" @rule(extension=extensions) def removeextension(self, extension): self.config.pop(extensionconfigkey(extension), None) # Section: Commands from the shelve extension @rule() @precondition(lambda self: self.hasextension("shelve")) def shelve(self): self.commandused("shelve") with acceptableerrors("nothing changed"): self.hg("shelve") @rule() @precondition(lambda self: self.hasextension("shelve")) def unshelve(self): self.commandused("shelve") with acceptableerrors("no shelved changes to apply"): self.hg("unshelve")
class PVectorBuilder(RuleBasedStateMachine): """ Build a list and matching pvector step-by-step. In each step in the state machine we do same operation on a list and on a pvector, and then when we're done we compare the two. """ sequences = Bundle("sequences") @rule(target=sequences, start=PVectorAndLists) def initial_value(self, start): """ Some initial values generated by a hypothesis strategy. """ return start @rule(target=sequences, former=sequences) @verify_inputs_unmodified def append(self, former): """ Append an item to the pair of sequences. """ l, pv = former obj = TestObject() l2 = l[:] l2.append(obj) return l2, pv.append(obj) @rule(target=sequences, start=sequences, end=sequences) @verify_inputs_unmodified def extend(self, start, end): """ Extend a pair of sequences with another pair of sequences. """ l, pv = start l2, pv2 = end # compare() has O(N**2) behavior, so don't want too-large lists: assume(len(l) + len(l2) < 50) l3 = l[:] l3.extend(l2) return l3, pv.extend(pv2) @rule(target=sequences, former=sequences, choice=st.choices()) @verify_inputs_unmodified def remove(self, former, choice): """ Remove an item from the sequences. """ l, pv = former assume(l) l2 = l[:] i = choice(range(len(l))) del l2[i] return l2, pv.delete(i) @rule(target=sequences, former=sequences, choice=st.choices()) @verify_inputs_unmodified def set(self, former, choice): """ Overwrite an item in the sequence. """ l, pv = former assume(l) l2 = l[:] i = choice(range(len(l))) obj = TestObject() l2[i] = obj return l2, pv.set(i, obj) @rule(target=sequences, former=sequences, choice=st.choices()) @verify_inputs_unmodified def transform_set(self, former, choice): """ Transform the sequence by setting value. """ l, pv = former assume(l) l2 = l[:] i = choice(range(len(l))) obj = TestObject() l2[i] = obj return l2, pv.transform([i], obj) @rule(target=sequences, former=sequences, choice=st.choices()) @verify_inputs_unmodified def transform_discard(self, former, choice): """ Transform the sequence by discarding a value. """ l, pv = former assume(l) l2 = l[:] i = choice(range(len(l))) del l2[i] return l2, pv.transform([i], discard) @rule(target=sequences, former=sequences, choice=st.choices()) @verify_inputs_unmodified def subset(self, former, choice): """ A subset of the previous sequence. """ l, pv = former assume(l) i = choice(range(len(l))) j = choice(range(len(l))) return l[i:j], pv[i:j] @rule(pair=sequences) @verify_inputs_unmodified def compare(self, pair): """ The list and pvector must match. """ l, pv = pair # compare() has O(N**2) behavior, so don't want too-large lists: assume(len(l) < 50) assert_equal(l, pv)
class PVectorEvolverBuilder(RuleBasedStateMachine): """ Build a list and matching pvector evolver step-by-step. In each step in the state machine we do same operation on a list and on a pvector evolver, and then when we're done we compare the two. """ sequences = Bundle("evolver_sequences") @rule(target=sequences, start=PVectorAndLists) def initial_value(self, start): """ Some initial values generated by a hypothesis strategy. """ l, pv = start return EvolverItem(original_list=l, original_pvector=pv, current_list=l[:], current_evolver=pv.evolver()) @rule(item=sequences) def append(self, item): """ Append an item to the pair of sequences. """ obj = TestObject() item.current_list.append(obj) item.current_evolver.append(obj) @rule(start=sequences, end=sequences) def extend(self, start, end): """ Extend a pair of sequences with another pair of sequences. """ # compare() has O(N**2) behavior, so don't want too-large lists: assume(len(start.current_list) + len(end.current_list) < 50) start.current_evolver.extend(end.current_list) start.current_list.extend(end.current_list) @rule(item=sequences, choice=st.choices()) def delete(self, item, choice): """ Remove an item from the sequences. """ assume(item.current_list) i = choice(range(len(item.current_list))) del item.current_list[i] del item.current_evolver[i] @rule(item=sequences, choice=st.choices()) def setitem(self, item, choice): """ Overwrite an item in the sequence using ``__setitem__``. """ assume(item.current_list) i = choice(range(len(item.current_list))) obj = TestObject() item.current_list[i] = obj item.current_evolver[i] = obj @rule(item=sequences, choice=st.choices()) def set(self, item, choice): """ Overwrite an item in the sequence using ``set``. """ assume(item.current_list) i = choice(range(len(item.current_list))) obj = TestObject() item.current_list[i] = obj item.current_evolver.set(i, obj) @rule(item=sequences) def compare(self, item): """ The list and pvector evolver must match. """ item.current_evolver.is_dirty() # compare() has O(N**2) behavior, so don't want too-large lists: assume(len(item.current_list) < 50) # original object unmodified assert item.original_list == item.original_pvector # evolver matches: for i in range(len(item.current_evolver)): assert item.current_list[i] == item.current_evolver[i] # persistent version matches assert_equal(item.current_list, item.current_evolver.persistent()) # original object still unmodified assert item.original_list == item.original_pvector
class MediatorMixin: def __init__(self): super().__init__() self.partner_to_balance_proof_data = dict() self.secrethash_to_secret = dict() self.waiting_for_unlock = dict() self.initial_number_of_channels = 2 def _get_balance_proof_data(self, partner): if partner not in self.partner_to_balance_proof_data: partner_channel = self.address_to_channel[partner] self.partner_to_balance_proof_data[partner] = BalanceProofData( canonical_identifier=partner_channel.canonical_identifier) return self.partner_to_balance_proof_data[partner] def _update_balance_proof_data(self, partner, amount, expiration, secret): expected = self._get_balance_proof_data(partner) lock = HashTimeLockState(amount=amount, expiration=expiration, secrethash=sha256_secrethash(secret)) expected.update(amount, lock) return expected init_mediators = Bundle("init_mediators") secret_requests = Bundle("secret_requests") unlocks = Bundle("unlocks") def _new_mediator_transfer(self, initiator_address, target_address, payment_id, amount, secret) -> LockedTransferSignedState: initiator_pkey = self.address_to_privkey[initiator_address] balance_proof_data = self._update_balance_proof_data( initiator_address, amount, self.block_number + 10, secret) self.secrethash_to_secret[sha256_secrethash(secret)] = secret return factories.create( factories.LockedTransferSignedStateProperties( **balance_proof_data.properties.__dict__, amount=amount, expiration=self.block_number + 10, payment_identifier=payment_id, secret=secret, initiator=initiator_address, target=target_address, token=self.token_id, sender=initiator_address, recipient=self.address, pkey=initiator_pkey, message_identifier=1, )) def _action_init_mediator( self, transfer: LockedTransferSignedState) -> ActionInitMediator: initiator_channel = self.address_to_channel[transfer.initiator] target_channel = self.address_to_channel[transfer.target] return ActionInitMediator( route_states=[factories.make_route_from_channel(target_channel)], from_hop=factories.make_hop_to_channel(initiator_channel), from_transfer=transfer, balance_proof=transfer.balance_proof, sender=transfer.balance_proof.sender, ) @rule( target=init_mediators, initiator_address=partners, target_address=partners, payment_id=payment_id(), # pylint: disable=no-value-for-parameter amount=integers(min_value=1, max_value=100), secret=secret(), # pylint: disable=no-value-for-parameter ) def valid_init_mediator(self, initiator_address, target_address, payment_id, amount, secret): assume(initiator_address != target_address) transfer = self._new_mediator_transfer(initiator_address, target_address, payment_id, amount, secret) action = self._action_init_mediator(transfer) result = node.state_transition(self.chain_state, action) assert event_types_match(result.events, SendProcessed, SendLockedTransfer) return action @rule(target=secret_requests, previous_action=consumes(init_mediators)) def valid_receive_secret_reveal(self, previous_action): secret = self.secrethash_to_secret[ previous_action.from_transfer.lock.secrethash] sender = previous_action.from_transfer.target recipient = previous_action.from_transfer.initiator action = ReceiveSecretReveal(secret=secret, sender=sender) result = node.state_transition(self.chain_state, action) expiration = previous_action.from_transfer.lock.expiration in_time = self.block_number < expiration - DEFAULT_NUMBER_OF_BLOCK_CONFIRMATIONS still_waiting = self.block_number < expiration + DEFAULT_WAIT_BEFORE_LOCK_REMOVAL if in_time and self.channel_opened(sender) and self.channel_opened( recipient): assert event_types_match(result.events, SendSecretReveal, SendBalanceProof, EventUnlockSuccess) self.event("Unlock successful.") self.waiting_for_unlock[secret] = recipient elif still_waiting and self.channel_opened(recipient): assert event_types_match(result.events, SendSecretReveal) self.event("Unlock failed, secret revealed too late.") else: assert not result.events self.event( "ReceiveSecretRevealed after removal of lock - dropped.") return action @rule(previous_action=secret_requests) def replay_receive_secret_reveal(self, previous_action): result = node.state_transition(self.chain_state, previous_action) assert not result.events # pylint: disable=no-value-for-parameter @rule(previous_action=secret_requests, invalid_sender=address()) # pylint: enable=no-value-for-parameter def replay_receive_secret_reveal_scrambled_sender(self, previous_action, invalid_sender): action = ReceiveSecretReveal(previous_action.secret, invalid_sender) result = node.state_transition(self.chain_state, action) assert not result.events # pylint: disable=no-value-for-parameter @rule(previous_action=init_mediators, secret=secret()) # pylint: enable=no-value-for-parameter def wrong_secret_receive_secret_reveal(self, previous_action, secret): sender = previous_action.from_transfer.target action = ReceiveSecretReveal(secret, sender) result = node.state_transition(self.chain_state, action) assert not result.events # pylint: disable=no-value-for-parameter @rule(target=secret_requests, previous_action=consumes(init_mediators), invalid_sender=address()) # pylint: enable=no-value-for-parameter def wrong_address_receive_secret_reveal(self, previous_action, invalid_sender): secret = self.secrethash_to_secret[ previous_action.from_transfer.lock.secrethash] invalid_action = ReceiveSecretReveal(secret, invalid_sender) result = node.state_transition(self.chain_state, invalid_action) assert not result.events valid_sender = previous_action.from_transfer.target valid_action = ReceiveSecretReveal(secret, valid_sender) return valid_action
class InitiatorMixin: def __init__(self): super().__init__() self.used_secrets = set() self.processed_secret_requests = set() self.initiated = set() def _action_init_initiator(self, transfer: TransferDescriptionWithSecretState): channel = self.address_to_channel[transfer.target] if transfer.secrethash not in self.expected_expiry: self.expected_expiry[transfer.secrethash] = self.block_number + 10 return ActionInitInitiator( transfer, [factories.make_route_from_channel(channel)]) def _receive_secret_request(self, transfer: TransferDescriptionWithSecretState): secrethash = sha256(transfer.secret).digest() return ReceiveSecretRequest( payment_identifier=transfer.payment_identifier, amount=transfer.amount, expiration=self.expected_expiry[transfer.secrethash], secrethash=secrethash, sender=transfer.target, ) def _new_transfer_description(self, target, payment_id, amount, secret): self.used_secrets.add(secret) return TransferDescriptionWithSecretState( token_network_registry_address=self.token_network_registry_address, payment_identifier=payment_id, amount=amount, token_network_address=self.token_network_address, initiator=self.address, target=target, secret=secret, ) def _invalid_authentic_secret_request(self, previous, action): result = node.state_transition(self.chain_state, action) if action.secrethash in self.processed_secret_requests or self._is_removed( previous): assert not result.events else: self.processed_secret_requests.add(action.secrethash) def _unauthentic_secret_request(self, action): result = node.state_transition(self.chain_state, action) assert not result.events def _available_amount(self, partner_address): netting_channel = self.address_to_channel[partner_address] return channel.get_distributable(netting_channel.our_state, netting_channel.partner_state) def _assume_channel_opened(self, action): assume(self.channel_opened(action.transfer.target)) def _is_removed(self, action): expiry = self.expected_expiry[action.transfer.secrethash] return self.block_number >= expiry + DEFAULT_WAIT_BEFORE_LOCK_REMOVAL init_initiators = Bundle("init_initiators") @rule( target=init_initiators, partner=partners, payment_id=payment_id(), # pylint: disable=no-value-for-parameter amount=integers(min_value=1, max_value=100), secret=secret(), # pylint: disable=no-value-for-parameter ) def valid_init_initiator(self, partner, payment_id, amount, secret): assume(amount <= self._available_amount(partner)) assume(secret not in self.used_secrets) transfer = self._new_transfer_description(partner, payment_id, amount, secret) action = self._action_init_initiator(transfer) result = node.state_transition(self.chain_state, action) assert event_types_match(result.events, SendLockedTransfer) self.initiated.add(transfer.secret) self.expected_expiry[transfer.secrethash] = self.block_number + 10 return action @rule( partner=partners, payment_id=payment_id(), # pylint: disable=no-value-for-parameter excess_amount=integers(min_value=1), secret=secret(), # pylint: disable=no-value-for-parameter ) def exceeded_capacity_init_initiator(self, partner, payment_id, excess_amount, secret): amount = self._available_amount(partner) + excess_amount transfer = self._new_transfer_description(partner, payment_id, amount, secret) action = self._action_init_initiator(transfer) result = node.state_transition(self.chain_state, action) assert event_types_match(result.events, EventPaymentSentFailed) self.event("ActionInitInitiator failed: Amount exceeded") @rule( previous_action=init_initiators, partner=partners, payment_id=payment_id(), # pylint: disable=no-value-for-parameter amount=integers(min_value=1), ) def used_secret_init_initiator(self, previous_action, partner, payment_id, amount): assume(not self._is_removed(previous_action)) secret = previous_action.transfer.secret transfer = self._new_transfer_description(partner, payment_id, amount, secret) action = self._action_init_initiator(transfer) result = node.state_transition(self.chain_state, action) assert not result.events self.event("ActionInitInitiator failed: Secret already in use.") @rule(previous_action=init_initiators) def replay_init_initator(self, previous_action): assume(not self._is_removed(previous_action)) result = node.state_transition(self.chain_state, previous_action) assert not result.events @rule(previous_action=init_initiators) def valid_secret_request(self, previous_action): action = self._receive_secret_request(previous_action.transfer) self._assume_channel_opened(previous_action) result = node.state_transition(self.chain_state, action) if action.secrethash in self.processed_secret_requests: assert not result.events self.event( "Valid SecretRequest dropped due to previous invalid one.") elif self._is_removed(previous_action): assert not result.events self.event( "Otherwise valid SecretRequest dropped due to expired lock.") else: assert event_types_match(result.events, SendSecretReveal) self.event("Valid SecretRequest accepted.") self.processed_secret_requests.add(action.secrethash) @rule(previous_action=init_initiators, amount=integers()) def wrong_amount_secret_request(self, previous_action, amount): assume(amount != previous_action.transfer.amount) self._assume_channel_opened(previous_action) transfer = deepcopy(previous_action.transfer) transfer.amount = amount action = self._receive_secret_request(transfer) self._invalid_authentic_secret_request(previous_action, action) @rule( previous_action=init_initiators, secret=secret() # pylint: disable=no-value-for-parameter ) def secret_request_with_wrong_secrethash(self, previous_action, secret): assume( sha256_secrethash(secret) != sha256_secrethash( previous_action.transfer.secret)) self._assume_channel_opened(previous_action) transfer = deepcopy(previous_action.transfer) transfer.secret = secret action = self._receive_secret_request(transfer) return self._unauthentic_secret_request(action) @rule(previous_action=init_initiators, payment_identifier=integers()) def secret_request_with_wrong_payment_id(self, previous_action, payment_identifier): assume( payment_identifier != previous_action.transfer.payment_identifier) self._assume_channel_opened(previous_action) transfer = deepcopy(previous_action.transfer) transfer.payment_identifier = payment_identifier action = self._receive_secret_request(transfer) self._unauthentic_secret_request(action)
return Counter([type(event) for event in events]) == Counter(expected_types) def transferred_amount(state): return 0 if not state.balance_proof else state.balance_proof.transferred_amount # use of hypothesis.stateful.multiple() breaks the failed-example code # generation at the moment, this function is a temporary workaround def unwrap_multiple(multiple_results): values = multiple_results.values return values[0] if len(values) == 1 else values partners = Bundle("partners") # shared bundle of ChainStateStateMachine and all mixin classes class ChainStateStateMachine(RuleBasedStateMachine): def __init__(self, address=None): self.address = address or factories.make_address() self.replay_path = False self.address_to_channel = dict() self.address_to_privkey = dict() self.initial_number_of_channels = 1 self.our_previous_deposit = defaultdict(int) self.partner_previous_deposit = defaultdict(int) self.our_previous_transferred = defaultdict(int) self.partner_previous_transferred = defaultdict(int)