def initialize(self, block_number, random, random_seed): self.random_seed = random_seed self.block_number = block_number self.block_hash = factories.make_block_hash() self.random = random self.private_key, self.address = factories.make_privkey_address() self.chain_state = ChainState( pseudo_random_generator=self.random, block_number=self.block_number, block_hash=self.block_hash, our_address=self.address, chain_id=factories.UNIT_CHAIN_ID, ) self.token_network_id = factories.UNIT_TOKEN_NETWORK_ADDRESS self.token_id = factories.UNIT_TOKEN_ADDRESS self.token_network_state = TokenNetworkState(self.token_network_id, self.token_id) self.payment_network_id = factories.make_payment_network_identifier() self.payment_network_state = PaymentNetworkState( self.payment_network_id, [self.token_network_state]) self.chain_state.identifiers_to_paymentnetworks[ self.payment_network_id] = self.payment_network_state self.chain_state.tokennetworkaddresses_to_paymentnetworkaddresses[ self.token_network_id] = self.payment_network_id channels = [ self.new_channel_with_transaction() for _ in range(self.initial_number_of_channels) ] return multiple(*channels)
async def runner(machine): try: if print_steps: machine.print_start() await machine.check_invariants() max_steps = settings.stateful_step_count steps_run = 0 cd = data.conjecture_data while True: # We basically always want to run the maximum number of steps, # but need to leave a small probability of terminating early # in order to allow for reducing the number of steps once we # find a failing test case, so we stop with probability of # 2 ** -16 during normal operation but force a stop when we've # generated enough steps. cd.start_example(STATE_MACHINE_RUN_LABEL) if steps_run == 0: cd.draw_bits(16, forced=1) elif steps_run >= max_steps: cd.draw_bits(16, forced=0) break else: # All we really care about is whether this value is zero # or non-zero, so if it's > 1 we discard it and insert a # replacement value after cd.start_example(SHOULD_CONTINUE_LABEL) should_continue_value = cd.draw_bits(16) if should_continue_value > 1: cd.stop_example(discard=True) cd.draw_bits( 16, forced=int(bool(should_continue_value)) ) else: cd.stop_example() if should_continue_value == 0: break steps_run += 1 value = data.conjecture_data.draw(machine.steps()) # Assign 'result' here in case 'execute_step' fails below result = multiple() try: result = await machine.execute_step(value) finally: if print_steps: # 'result' is only used if the step has target bundles. # If it does, and the result is a 'MultipleResult', # then 'print_step' prints a multi-variable assignment. machine.print_step(value, result) await machine.check_invariants() data.conjecture_data.stop_example() finally: if print_steps: machine.print_end() await machine.teardown()
def new_user(self, user): resp = self.client.post("/api/users", json=user) if user["id"] in self.model_users.keys(): assert resp.status_code == 409 assert resp.is_json assert resp.json['type'] == "duplicate-resource" return multiple() else: assert resp.status_code == 201 self.model_users[user["id"]] = user return user["id"]
def new_bin(self, bin): resp = self.client.post('/api/bins', json=bin.to_dict(mask_default=True)) if bin.id in self.model_bins.keys(): assert resp.status_code == 409 assert resp.is_json assert resp.json['type'] == 'duplicate-resource' return multiple() else: assert resp.status_code == 201 self.model_bins[bin.id] = bin return bin.id
def new_sku(self, sku): resp = self.client.post('/api/skus', json=sku.to_dict(mask_default=True)) if sku.id in self.model_skus.keys(): assert resp.status_code == 409 assert resp.is_json assert resp.json['type'] == 'duplicate-resource' return multiple() else: assert resp.status_code == 201 self.model_skus[sku.id] = sku return sku.id
def new_anonymous_batch(self, batch): assert not batch.sku_id rp = self.client.post("/api/batches", json=batch.to_dict(mask_default=True)) if batch.id in self.model_batches.keys(): assert rp.status_code == 409 assert rp.json['type'] == 'duplicate-resource' assert rp.is_json return multiple() else: assert rp.json.get('type') is None assert rp.status_code == 201 self.model_batches[batch.id] = batch return batch.id
def initialize_all(self, block_number, random, random_seed): self.random_seed = random_seed self.block_number = block_number self.block_hash = factories.make_block_hash() self.random = random self.token_network_address = factories.UNIT_TOKEN_NETWORK_ADDRESS self.token_id = factories.UNIT_TOKEN_ADDRESS self.token_network_state = TokenNetworkState( address=self.token_network_address, token_address=self.token_id, network_graph=TokenNetworkGraphState(self.token_network_address), ) self.token_network_registry_address = factories.make_token_network_registry_address() self.token_network_registry_state = TokenNetworkRegistryState( self.token_network_registry_address, [self.token_network_state] ) channels = [] for _ in range(self.number_of_clients): private_key, address = factories.make_privkey_address() self.address_to_privkey[address] = private_key chain_state = ChainState( pseudo_random_generator=self.random, block_number=self.block_number, block_hash=self.block_hash, our_address=address, chain_id=factories.UNIT_CHAIN_ID, ) chain_state.identifiers_to_tokennetworkregistries[ self.token_network_registry_address ] = self.token_network_registry_state chain_state.tokennetworkaddresses_to_tokennetworkregistryaddresses[ self.token_network_address ] = self.token_network_registry_address self.address_to_client[address] = Client(chain_state=chain_state) channels.extend( self.new_channel_with_transaction(client_address=address) for _ in range(self.initial_number_of_channels) ) return multiple(*channels)
def new_batch_existing_sku(self, sku_id, data): # assume(self.model_skus != {}) # TODO: check if this is necessary batch = data.draw(dst.batches_(sku_id=sku_id)) rp = self.client.post('/api/batches', json=batch.to_dict(mask_default=True)) if batch.id in self.model_batches.keys(): assert rp.status_code == 409 assert rp.json['type'] == 'duplicate-resource' assert rp.is_json return multiple() else: assert rp.status_code == 201 self.model_batches[batch.id] = batch return batch.id
def populate_bundle(self): return multiple()
def do_not_populate(self): return multiple()
def populate_bundle(self, items): self.expected_bundle_length += len(items) return multiple(*items)
def test_multiple(): none = multiple() some = multiple(1, 2.01, "3", b"4", 5) assert len(none.values) == 0 and len(some.values) == 5 assert all(value in some.values for value in (1, 2.01, "3", b"4", 5))
def add_creg(self, n): """Add a new variable sized creg to the circuit.""" creg = ClassicalRegister(n) self.qc.add_register(creg) return multiple(*list(creg))
def test_multiple(): none = multiple() some = multiple(1, 2.01, "3", b"4", 5) assert len(none.values) == 0 and len(some.values) == 5 assert set(some.values) == {1, 2.01, "3", b"4", 5}
def init_standard_channels(self) -> Any: return sta.multiple(*self.chat.standard_channels)
def add_qreg(self, n): """Adds a new variable sized qreg to the circuit, up to max_qubits.""" n = min(n, self.max_qubits - len(self.qc.qubits)) qreg = QuantumRegister(n) self.qc.add_register(qreg) return multiple(*list(qreg))
def add_boxes(self, new_boxes): for b in new_boxes: self.manager.register(b) self.current_boxes.add(b) assert b.manager is not None return multiple(*new_boxes)
def mixptr(self, a, b): self._var = lib.mixptr(a, b) self._contents = {a: b, b: a} return stateful.multiple(a, b)
async def initialize(self): return multiple(1, 2)