def test_same_timeslot_watch(self): dag = Dag(0) conflict_watcher = ConflictWatcher(dag) actor1 = Private.publickey(Private.generate()) actor2 = Private.publickey(Private.generate()) actor3 = Private.publickey(Private.generate()) block1_hash = ChainGenerator.insert_dummy(dag, [dag.genesis_hash()], 1) conflict_watcher.on_new_block_by_validator(block1_hash, 1, actor1) block2_hash = ChainGenerator.insert_dummy(dag, [block1_hash], 2) conflict_watcher.on_new_block_by_validator(block2_hash, 1, actor2) block2c_hash = ChainGenerator.insert_dummy(dag, [block1_hash], 2) conflict_watcher.on_new_block_by_validator(block2c_hash, 1, actor2) block3_hash = ChainGenerator.insert_dummy(dag, [block2_hash, block2c_hash], 3) conflict_watcher.on_new_block_by_validator(block3_hash, 1, actor3) conflicts = conflict_watcher.get_conflicts_by_block(block2_hash) self.assertEqual(len(conflicts), 2) self.assertIn(block2_hash, conflicts) self.assertIn(block2c_hash, conflicts) conflicts = conflict_watcher.get_conflicts_by_block(block1_hash) self.assertEqual(conflicts, None)
def test_explicit_conflict(self): dag = Dag(0) watcher = ConflictWatcher(dag) actor1 = Private.publickey(Private.generate()) actor2 = Private.publickey(Private.generate()) block1_hash = ChainGenerator.insert_dummy(dag, [dag.genesis_hash()], 1) watcher.on_new_block_by_validator(block1_hash, 1, actor2) block2_hash = ChainGenerator.insert_dummy(dag, [block1_hash], 2) watcher.on_new_block_by_validator(block2_hash, 1, actor1) block3_hash = ChainGenerator.insert_dummy(dag, [block2_hash], 3) watcher.on_new_block_by_validator(block3_hash, 1, actor2) block3c_hash = ChainGenerator.insert_dummy(dag, [block2_hash], 3) watcher.on_new_block_by_validator(block3c_hash, 1, actor2) tops = dag.get_top_hashes() #here block was signed by node even before merge appeared #this is explicit merge and both following blocks are conflicting explicits, candidates = watcher.find_conflicts_in_between(tops) self.assertEqual(len(explicits), 2) self.assertEqual(len(candidates), 0) self.assertIn(block3_hash, explicits) self.assertIn(block3c_hash, explicits)
def test_different_timeslot_watch(self): dag = Dag(0) conflict_watcher = ConflictWatcher(dag) actor1 = Private.publickey(Private.generate()) actor2 = Private.publickey(Private.generate()) actor3 = Private.publickey(Private.generate()) block1_hash = ChainGenerator.insert_dummy(dag, [dag.genesis_hash()], 1) conflict_watcher.on_new_block_by_validator(block1_hash, 1, actor1) block2_hash = ChainGenerator.insert_dummy(dag, [block1_hash], 2) conflict_watcher.on_new_block_by_validator(block2_hash, 1, actor2) # second block is signed by third validator # its not possible by usual means, but quite possible when we have two different epoch seeds block2c_hash = ChainGenerator.insert_dummy(dag, [block1_hash], 2) conflict_watcher.on_new_block_by_validator(block2c_hash, 1, actor3) block3_hash = ChainGenerator.insert_dummy(dag, [block2_hash, block2c_hash], 3) conflict_watcher.on_new_block_by_validator(block3_hash, 1, actor3) conflicts = conflict_watcher.get_conflicts_by_block(block3_hash) self.assertEqual(len(conflicts), 2) self.assertIn(block2c_hash, conflicts) self.assertIn(block3_hash, conflicts)
def test_find_conflicts(self): dag = Dag(0) watcher = ConflictWatcher(dag) actor1 = Private.publickey(Private.generate()) actor2 = Private.publickey(Private.generate()) block1_hash = ChainGenerator.insert_dummy(dag, [dag.genesis_hash()], 1) watcher.on_new_block_by_validator(block1_hash, 1, actor1) block2_hash = ChainGenerator.insert_dummy(dag, [block1_hash], 2) watcher.on_new_block_by_validator(block2_hash, 1, actor2) block2c_hash = ChainGenerator.insert_dummy(dag, [block1_hash], 2) watcher.on_new_block_by_validator(block2c_hash, 1, actor2) # block3_hash = ChainGenerator.insert_dummy(dag, [block2_hash, block2c_hash], 3) # watcher.on_new_block_by_validator(block3_hash, 1, actor3) tops = dag.get_top_hashes() explicits, candidate_groups = watcher.find_conflicts_in_between(tops) self.assertEqual(len(explicits), 0) self.assertEqual(len(candidate_groups), 1) self.assertEqual(len(candidate_groups[0]), 2) self.assertIn(block2_hash, candidate_groups[0]) self.assertIn(block2c_hash, candidate_groups[0])
def test_both_types_of_conflicts(self): dag = Dag(0) watcher = ConflictWatcher(dag) actor1 = Private.publickey(Private.generate()) actor2 = Private.publickey(Private.generate()) actor3 = Private.publickey(Private.generate()) block1_hash = ChainGenerator.insert_dummy(dag, [dag.genesis_hash()], 1) watcher.on_new_block_by_validator(block1_hash, 1, actor1) block2_hash = ChainGenerator.insert_dummy(dag, [block1_hash], 2) watcher.on_new_block_by_validator(block2_hash, 1, actor2) block2c_hash = ChainGenerator.insert_dummy(dag, [block1_hash], 2) watcher.on_new_block_by_validator(block2c_hash, 1, actor2) block3_hash = ChainGenerator.insert_dummy(dag, [block2_hash], 3) watcher.on_new_block_by_validator(block3_hash, 1, actor3) #this is possible if we have two epoch seeds block4_hash = ChainGenerator.insert_dummy(dag, [block2c_hash], 4) watcher.on_new_block_by_validator(block4_hash, 1, actor1) tops = dag.get_top_hashes() explicits, candidate_groups = watcher.find_conflicts_in_between(tops) self.assertEqual(len(explicits), 1) self.assertIn(block4_hash, explicits) self.assertEqual(len(candidate_groups), 1) self.assertEqual(len(candidate_groups[0]), 2) self.assertIn(block2_hash, candidate_groups[0]) self.assertIn(block2c_hash, candidate_groups[0])
def test_conflicts_with_skips(self): dag = Dag(0) # generate test case # time_slot [0, 1, 2, 3, 4, 5] # ------------------------------- # 1 ------- [0, 1, 2, 3, , 5, 6, 7, 8] # 2 ------- [ , , , 3, 4, , 6, 7, 8] # 3 ------- [ , , , , 4, 5, 6, , 8] # block number 3 MUST BE signed by same key private1 = Private.generate() private2 = Private.generate() private3 = Private.generate() top_hash_1 = \ ChainGenerator.fill_with_dummies_and_skips(dag=dag, prev_hash=dag.genesis_block().get_hash(), range=range(1, 9), indices_to_skip=[4], dummy_private=private1) top_hash_2 = \ ChainGenerator.fill_with_dummies_and_skips(dag=dag, prev_hash=dag.blocks_by_number[2][0].get_hash(), range=range(3, 9), indices_to_skip=[5], dummy_private=private2) top_hash_3 = \ ChainGenerator.fill_with_dummies_and_skips(dag=dag, prev_hash=dag.blocks_by_number[3][1].get_hash(), range=range(4, 9), indices_to_skip=[7], dummy_private=private3) # DagVisualizer.visualize(dag) conflict_finder = ConflictFinder(dag) top_blocks = list(dag.get_top_blocks().keys()) top, conflicts = conflict_finder.find_conflicts(top_blocks) # assert determined top (it can be one of top_hash1,2,3) tops = [top_hash_1, top_hash_2, top_hash_3] self.assertIn(top, tops) if top == top_hash_1: # test conflicts # conflicts include all [3],[4,4],[5],[6,6],[7],[8,8] # EXCLUDE flatten top chain from list of conflict block hashes self.assertEqual(len(conflicts), 9) if top == top_hash_2: # test conflicts # conflicts include all [3],[4],[5,5],[6,6],[7],[8,8] # EXCLUDE flatten top chain from list of conflict block hashes self.assertEqual(len(conflicts), 9) if top == top_hash_3: # test conflicts # conflicts include all [3],[4],[5],[6,6],[7,7],[8,8] # EXCLUDE flatten top chain from list of conflict block hashes self.assertEqual(len(conflicts), 9)
def test_complicated_dag_with_skips(self): dag = Dag(0) # generate test case # time_slot [0, 1, 2, 3, 4, 5] # ------------------------------- # 1 ------- [-, -, 2, 3, 4, 5, , , 8] # 2 ------- [0, 1, 2, , , 5, 6, 7, 8] # 3 ------- [-, -, -, 3, 4, , 6, 7, 8] # 4 ------- [-, -, -, -, 4, 5, 6, , 8] # block number 3 MUST BE signed by same key private1 = Private.generate() private2 = Private.generate() private3 = Private.generate() private4 = Private.generate() top_hash_2 = \ ChainGenerator.fill_with_dummies_and_skips(dag=dag, prev_hash=dag.genesis_block().get_hash(), range=range(1, 9), indices_to_skip=[3, 4], dummy_private=private2) top_hash_1 = \ ChainGenerator.fill_with_dummies_and_skips(dag=dag, prev_hash=dag.blocks_by_number[1][0].get_hash(), range=range(2, 9), indices_to_skip=[6, 7], dummy_private=private1) top_hash_3 = \ ChainGenerator.fill_with_dummies_and_skips(dag=dag, prev_hash=dag.blocks_by_number[2][0].get_hash(), range=range(3, 9), indices_to_skip=[5], dummy_private=private3) top_hash_4 = \ ChainGenerator.fill_with_dummies_and_skips(dag=dag, prev_hash=dag.blocks_by_number[3][1].get_hash(), range=range(4, 9), indices_to_skip=[7], dummy_private=private4) # DagVisualizer.visualize(dag) conflict_finder = ConflictFinder(dag) top_blocks = list(dag.get_top_blocks().keys()) top, conflicts = conflict_finder.find_conflicts(top_blocks) # assert determined top (it can be one of longest top_hash_3,4) tops = [top_hash_3, top_hash_4] self.assertIn(top, tops) # test conflicts self.assertEqual(len(conflicts), 13)
def test_pack_parse_stakerelease_transaction(self): private = Private.generate() original = StakeReleaseTransaction() original.pubkey = Private.publickey(private) original.signature = Private.sign(original.get_hash(), Private.generate()) raw = original.pack() restored = StakeReleaseTransaction() restored.parse(raw) self.assertEqual(original.get_hash(), restored.get_hash())
def test_pack_parse_penalty_transaction(self): original = PenaltyTransaction() original.violator_pubkey = Private.publickey(Private.generate()) original.conflicts = [os.urandom(32), os.urandom(32), os.urandom(32)] original.signature = Private.sign(original.get_hash(), Private.generate()) raw = original.pack() restored = PenaltyTransaction() restored.parse(raw) self.assertEqual(original.get_hash(), restored.get_hash())
def test_merge_in_merge(self): dag = Dag(0) genesis_hash = dag.genesis_block().get_hash() ChainGenerator.fill_with_dummies_and_skips(dag, genesis_hash, range(1, 5), [1, 3]) second_block = dag.blocks_by_number[2][0].get_hash() ChainGenerator.fill_with_dummies_and_skips(dag, second_block, range(3, 4), []) tops = dag.get_top_hashes() merging_block = BlockFactory.create_block_with_timestamp( tops, BLOCK_TIME * 5) merging_signed_block = BlockFactory.sign_block(merging_block, Private.generate()) dag.add_signed_block(5, merging_signed_block) ChainGenerator.fill_with_dummies_and_skips(dag, genesis_hash, range(1, 7), [2, 3, 4, 5]) tops = dag.get_top_hashes() merging_block = BlockFactory.create_block_with_timestamp( tops, BLOCK_TIME * 7) merging_signed_block = BlockFactory.sign_block(merging_block, Private.generate()) dag.add_signed_block(7, merging_signed_block) # DagVisualizer.visualize(dag, True) iterator = MergingIter(dag, merging_block.get_hash()) #shortest chain goes last # 3 and 4 are swapped because 4 has a priority #TODO But is it okay? Maybe we should sometimes give priority to earlier blocks if equal?1 self.assertEqual(iterator.next().get_hash(), dag.blocks_by_number[7][0].get_hash()) self.assertEqual(iterator.next().get_hash(), dag.blocks_by_number[6][0].get_hash()) self.assertEqual(iterator.next().get_hash(), dag.blocks_by_number[1][0].get_hash()) self.assertEqual(iterator.next().get_hash(), dag.blocks_by_number[5][0].get_hash()) self.assertEqual(iterator.next().get_hash(), dag.blocks_by_number[3][0].get_hash()) self.assertEqual(iterator.next().get_hash(), dag.blocks_by_number[4][0].get_hash()) self.assertEqual(iterator.next().get_hash(), dag.blocks_by_number[2][0].get_hash()) self.assertEqual(iterator.next().get_hash(), dag.blocks_by_number[0][0].get_hash())
def test_ancestry(self): dag = Dag(0) private = Private.generate() block1 = BlockFactory.create_block_with_timestamp( [dag.genesis_block().get_hash()], BLOCK_TIME) signed_block1 = BlockFactory.sign_block(block1, private) dag.add_signed_block(1, signed_block1) block2 = BlockFactory.create_block_with_timestamp([block1.get_hash()], BLOCK_TIME * 2) signed_block2 = BlockFactory.sign_block(block2, private) dag.add_signed_block(2, signed_block2) block3 = BlockFactory.create_block_with_timestamp([block2.get_hash()], BLOCK_TIME * 3) signed_block3 = BlockFactory.sign_block(block3, private) dag.add_signed_block(3, signed_block3) # alternative chain other_block2 = BlockFactory.create_block_with_timestamp( [block1.get_hash()], BLOCK_TIME * 2 + 1) other_signed_block2 = BlockFactory.sign_block(other_block2, private) dag.add_signed_block(2, other_signed_block2) # alternative chain other_block3 = BlockFactory.create_block_with_timestamp( [other_block2.get_hash()], BLOCK_TIME * 3 + 1) other_signed_block3 = BlockFactory.sign_block(other_block3, private) dag.add_signed_block(3, other_signed_block3) self.assertEqual( dag.is_ancestor(other_block3.get_hash(), other_block2.get_hash()), True) self.assertEqual( dag.is_ancestor(other_block3.get_hash(), block2.get_hash()), False)
def test_top_blocks(self): dag = Dag(0) epoch = Epoch(dag) private = Private.generate() epoch_hash = dag.genesis_block().get_hash() self.assertEqual(dag.genesis_block().get_hash(), list(epoch.get_epoch_hashes().keys())[0]) self.assertEqual(dag.genesis_block().get_hash(), list(epoch.get_epoch_hashes().values())[0]) block1 = BlockFactory.create_block_with_timestamp( [dag.genesis_block().get_hash()], BLOCK_TIME) signed_block1 = BlockFactory.sign_block(block1, private) dag.add_signed_block(1, signed_block1) self.assertEqual(block1.get_hash(), list(epoch.get_epoch_hashes().keys())[0]) self.assertEqual(epoch_hash, list(epoch.get_epoch_hashes().values())[0]) prev_hash = block1.get_hash() epoch_length = ROUND_DURATION * 6 + 1 for i in range(2, epoch_length + 1): block = BlockFactory.create_block_with_timestamp([prev_hash], BLOCK_TIME * i) signed_block = BlockFactory.sign_block(block, private) dag.add_signed_block(i, signed_block) prev_hash = block.get_hash() if epoch.is_new_epoch_upcoming(epoch_length + 1): epoch.accept_tops_as_epoch_hashes() top_block_hash = dag.blocks_by_number[epoch_length][0].get_hash() epoch_hash = dag.blocks_by_number[epoch_length][0].get_hash() self.assertEqual(top_block_hash, list(epoch.get_epoch_hashes().keys())[0]) self.assertEqual(epoch_hash, list(epoch.get_epoch_hashes().values())[0]) epoch2 = epoch_length * 2 + 1 for i in range(epoch_length + 1, epoch2): block = BlockFactory.create_block_with_timestamp([prev_hash], BLOCK_TIME * i) signed_block = BlockFactory.sign_block(block, private) dag.add_signed_block(i, signed_block) prev_hash = block.get_hash() if epoch.is_new_epoch_upcoming(epoch2): epoch.accept_tops_as_epoch_hashes() top_block_hash = dag.blocks_by_number[epoch2 - 1][0].get_hash() epoch_hash = dag.blocks_by_number[epoch2 - 1][0].get_hash() self.assertEqual(top_block_hash, list(epoch.get_epoch_hashes().keys())[0]) self.assertEqual(epoch_hash, list(epoch.get_epoch_hashes().values())[0])
def try_to_publish_public_key(self, current_block_number): if self.epoch_private_keys: return epoch_hashes = self.epoch.get_epoch_hashes() for _, epoch_hash in epoch_hashes.items(): allowed_round_validators = self.permissions.get_ordered_randomizers_pubkeys_for_round( epoch_hash, Round.PUBLIC) pubkey_publishers_pubkeys = [ validator.public_key for validator in allowed_round_validators ] if self.node_pubkey in pubkey_publishers_pubkeys: node_private = self.block_signer.private_key pubkey_index = self.permissions.get_signer_index_from_public_key( self.node_pubkey, epoch_hash) generated_private = Private.generate() tx = TransactionFactory.create_public_key_transaction( generated_private=generated_private, epoch_hash=epoch_hash, validator_index=pubkey_index, node_private=node_private) if self.behaviour.malicious_wrong_signature: tx.signature = b'0' + tx.signature[1:] self.epoch_private_keys.append(generated_private) self.logger.debug("Broadcasted public key") self.logger.debug(Keys.to_visual_string(tx.generated_pubkey)) self.mempool.add_transaction(tx) self.network.broadcast_transaction(self.node_id, TransactionParser.pack(tx))
def test_chain_length(self): dag = Dag(0) private = Private.generate() block1 = BlockFactory.create_block_with_timestamp( [dag.genesis_block().get_hash()], BLOCK_TIME) signed_block1 = BlockFactory.sign_block(block1, private) dag.add_signed_block(1, signed_block1) block2 = BlockFactory.create_block_with_timestamp([block1.get_hash()], BLOCK_TIME * 2) signed_block2 = BlockFactory.sign_block(block2, private) dag.add_signed_block(2, signed_block2) block3 = BlockFactory.create_block_with_timestamp([block2.get_hash()], BLOCK_TIME * 3) signed_block3 = BlockFactory.sign_block(block3, private) dag.add_signed_block(3, signed_block3) # alternative chain other_block2 = BlockFactory.create_block_with_timestamp( [block1.get_hash()], BLOCK_TIME * 2 + 1) other_signed_block2 = BlockFactory.sign_block(other_block2, private) dag.add_signed_block(2, other_signed_block2) self.assertEqual( dag.calculate_chain_length(other_block2.get_hash(), dag.genesis_hash()), 3) self.assertEqual( dag.calculate_chain_length(block3.get_hash(), dag.genesis_hash()), 4)
def test_storing_tx_by_hash(self): dag = Dag(0) private0 = Private.generate() private1 = Private.generate() private2 = Private.generate() # add block 1 block1 = BlockFactory.create_block_with_timestamp( [dag.genesis_block().get_hash()], BLOCK_TIME) signed_block1 = BlockFactory.sign_block(block1, private0) dag.add_signed_block(1, signed_block1) # check transactions in dag.transactions_by_hash for empty self.assertTrue(len(dag.transactions_by_hash) == 0) # add block 2 block2 = BlockFactory.create_block_with_timestamp([block1.get_hash()], BLOCK_TIME) # add penalty gossip case by tx in block tx1 = TransactionFactory.create_negative_gossip_transaction( 1, private1) tx2 = TransactionFactory.create_positive_gossip_transaction( block2.get_hash(), private1) block2.system_txs.append(tx1) block2.system_txs.append(tx2) # -------------------------------------- signed_block2 = BlockFactory.sign_block(block2, private1) dag.add_signed_block(2, signed_block2) # check transactions in dag.transactions_by_hash self.assertTrue( set(dag.transactions_by_hash).issuperset({tx1.get_hash(): tx1})) self.assertTrue( set(dag.transactions_by_hash).issuperset({tx2.get_hash(): tx2})) block3 = BlockFactory.create_block_with_timestamp([block2.get_hash()], BLOCK_TIME) signed_block3 = BlockFactory.sign_block(block3, private2) dag.add_signed_block(3, signed_block3) # check transactions in dag.transactions_by_hash self.assertTrue( set(dag.transactions_by_hash).issuperset({tx1.get_hash(): tx1})) self.assertTrue( set(dag.transactions_by_hash).issuperset({tx2.get_hash(): tx2}))
def insert_dummy(dag, prev_hashes, position): dummy_private = Private.generate() dummy_time_offset = len(dag.blocks_by_number.get(position, [])) assert dummy_time_offset <= BLOCK_TIME, "This much blocks in one timeslot may lead to problems" block = BlockFactory.create_block_with_timestamp( prev_hashes, BLOCK_TIME * position + dummy_time_offset) signed_block = BlockFactory.sign_block(block, dummy_private) dag.add_signed_block(position, signed_block) return block.get_hash()
def create_dummy_commit_reveal(random_bytes, epoch_hash): node_private = Private.generate() private = Private.generate() encoded = Private.encrypt(random_bytes, private) commit = CommitRandomTransaction() commit.rand = encoded commit.pubkey_index = 10 commit.signature = Private.sign(commit.get_signing_hash(epoch_hash), node_private) reveal = RevealRandomTransaction() reveal.commit_hash = commit.get_hash() reveal.key = Keys.to_bytes(private) return commit, reveal
def test_different_epoch_watch(self): dag = Dag(0) conflict_watcher = ConflictWatcher(dag) actor1 = Private.publickey(Private.generate()) actor2 = Private.publickey(Private.generate()) actor3 = Private.publickey(Private.generate()) block1_hash = ChainGenerator.insert_dummy(dag, [dag.genesis_hash()], 1) conflict_watcher.on_new_block_by_validator(block1_hash, 1, actor1) block2_hash = ChainGenerator.insert_dummy(dag, [block1_hash], 2) conflict_watcher.on_new_block_by_validator(block2_hash, 1, actor2) block2c_hash = ChainGenerator.insert_dummy(dag, [block1_hash], 2) conflict_watcher.on_new_block_by_validator(block2c_hash, 1, actor2) block3_hash = ChainGenerator.insert_dummy(dag, [block2_hash, block2c_hash], 3) conflict_watcher.on_new_block_by_validator(block3_hash, 1, actor3) #switch to next epoch block4_hash = ChainGenerator.insert_dummy(dag, [block3_hash], 4) conflict_watcher.on_new_block_by_validator(block4_hash, 2, actor2) block4c_hash = ChainGenerator.insert_dummy(dag, [block3_hash], 4) conflict_watcher.on_new_block_by_validator(block4c_hash, 2, actor2) #first epoch conflicts conflicts = conflict_watcher.get_conflicts_by_block(block2_hash) self.assertEqual(len(conflicts), 2) self.assertIn(block2_hash, conflicts) self.assertIn(block2c_hash, conflicts) #second epoch conflicts of the same public key conflicts = conflict_watcher.get_conflicts_by_block(block4_hash) self.assertEqual(len(conflicts), 2) self.assertIn(block4_hash, conflicts) self.assertIn(block4c_hash, conflicts) conflicts = conflict_watcher.get_conflicts_by_block(block1_hash) self.assertEqual(conflicts, None)
def create_commit_reveal_pair(node_private, random_bytes, pubkey_index, epoch_hash): private = Private.generate() encoded = Private.encrypt(random_bytes, private) commit = TransactionFactory.create_commit_random_transaction( encoded, pubkey_index, epoch_hash, node_private) reveal = TransactionFactory.create_reveal_random_transaction( commit.get_hash(), private) return commit, reveal
def test_find_conflicts_longest_chain(self): dag = Dag(0) # generate test case # time_slot [0, 1, 2, 3, 4, 5, 6] # ------------------------------- # 1 ------- [0, 1, 2, 3, 4, 5, 6] # 2 ------- [ , , , 3, 4, 5, ] # 3 ------- [ , , , , 4, 5, ] # block number 3 MUST BE signed by same key private1 = Private.generate() private2 = Private.generate() private3 = Private.generate() determinated_top_hash = \ ChainGenerator.fill_with_dummies_and_skips(dag=dag, prev_hash=dag.genesis_block().get_hash(), range=range(1, 7), indices_to_skip=[], dummy_private=private1) ChainGenerator.fill_with_dummies_and_skips(dag=dag, prev_hash=dag.blocks_by_number[2][0].get_hash(), range=range(3, 6), indices_to_skip=[], dummy_private=private2) ChainGenerator.fill_with_dummies_and_skips(dag=dag, prev_hash=dag.blocks_by_number[3][1].get_hash(), range=range(4, 6), indices_to_skip=[], dummy_private=private3) # DagVisualizer.visualize(dag) conflict_finder = ConflictFinder(dag) top_blocks = list(dag.get_top_blocks().keys()) top, conflicts = conflict_finder.find_conflicts(top_blocks) # assert determined top self.assertEqual(determinated_top_hash, top) # test conflicts [3],[4,4],[5,5] EXCLUDE flatten top chain from list of conflict block hashes self.assertEqual(len(conflicts), 5)
def test_pack_parse_stakehold_transaction(self): private = Private.generate() original = StakeHoldTransaction() original.amount = 1000 original.pubkey = Private.publickey(private) original.signature = Private.sign(original.get_hash(), private) raw = original.pack() restored = StakeHoldTransaction() restored.parse(raw) self.assertEqual(original.get_hash(), restored.get_hash())
def insert_dummy_with_payments(dag, prev_hashes, payments, position): dummy_private = Private.generate() dummy_time_offset = len(dag.blocks_by_number.get(position, [])) assert dummy_time_offset <= BLOCK_TIME, "This much blocks in one timeslot may lead to problems" block = BlockFactory.create_block_with_timestamp( prev_hashes, BLOCK_TIME * position + dummy_time_offset) block_reward = TransactionFactory.create_block_reward( urandom(32), position) block.payment_txs = [block_reward] + payments signed_block = BlockFactory.sign_block(block, dummy_private) dag.add_signed_block(position, signed_block) return block.get_hash(), block_reward.get_hash()
def add_stakeholders(self, count): behaviour = Behaviour() behaviour.wants_to_hold_stake = True for i in range(0, count): index = len(self.network.nodes) logger = logging.getLogger("Node " + str(index)) node = Node(genesis_creation_time=1, block_signer=BlockSigner(Private.generate()), node_id=index, network=self.network, behaviour=behaviour, logger=logger) self.network.register_node(node)
def test_private_keys_extraction(self): dag = Dag(0) epoch = Epoch(dag) node_private = Private.generate() prev_hash = dag.genesis_block().get_hash() round_start, round_end = Epoch.get_round_bounds(1, Round.PRIVATE) for i in range(1, round_start): block = BlockFactory.create_block_with_timestamp([prev_hash], BLOCK_TIME * i) signed_block = BlockFactory.sign_block(block, node_private) dag.add_signed_block(i, signed_block) prev_hash = block.get_hash() generated_private_keys = [] for i in range(round_start, round_end): # intentionally skip last block of round generated_private = Private.generate() generated_private_keys.append(Keys.to_bytes(generated_private)) private_key_tx = PrivateKeyTransaction() private_key_tx.key = Keys.to_bytes(generated_private) block = Block() block.system_txs = [private_key_tx] block.prev_hashes = dag.get_top_blocks_hashes() block.timestamp = i * BLOCK_TIME signed_block = BlockFactory.sign_block(block, node_private) dag.add_signed_block(i, signed_block) prev_hash = block.get_hash() ChainGenerator.fill_with_dummies(dag, prev_hash, Epoch.get_round_range(1, Round.FINAL)) epoch_hash = dag.blocks_by_number[ROUND_DURATION * 6 + 1][0].get_hash() extracted_privates = epoch.get_private_keys_for_epoch(epoch_hash) for i in range(0, ROUND_DURATION - 1): self.assertEqual(extracted_privates[i], generated_private_keys[i])
def test_parse_pack_gossip_negative(self): private = Private.generate() original = NegativeGossipTransaction() original.pubkey = Private.publickey(private) original.timestamp = Time.get_current_time() original.number_of_block = 47 original.signature = Private.sign(original.get_hash(), private) raw = original.pack() restored = NegativeGossipTransaction() restored.parse(raw) self.assertEqual(original.get_hash(), restored.get_hash())
def test_getting_tx_by_hash(self): dag = Dag(0) private = Private.generate() block1 = BlockFactory.create_block_with_timestamp( [dag.genesis_block().get_hash()], BLOCK_TIME) tx1 = TransactionFactory.create_negative_gossip_transaction(1, private) tx2 = TransactionFactory.create_positive_gossip_transaction( block1.get_hash(), private) tx3 = TransactionFactory.create_penalty_gossip_transaction( {tx1.get_hash(): tx2.get_hash()}, private) not_appended_tx = TransactionFactory.create_public_key_transaction( generated_private=Private.generate(), epoch_hash=sha256(b'epoch_hash').digest(), validator_index=1, node_private=private) block1.system_txs.append(tx1) block1.system_txs.append(tx2) block1.system_txs.append(tx3) signed_block1 = BlockFactory.sign_block(block1, private) dag.add_signed_block(1, signed_block1) self.assertTrue( set(dag.transactions_by_hash).issuperset({tx1.get_hash(): tx1})) self.assertTrue( set(dag.transactions_by_hash).issuperset({tx2.get_hash(): tx2})) self.assertTrue( set(dag.transactions_by_hash).issuperset({tx3.get_hash(): tx3})) self.assertFalse( set(dag.transactions_by_hash).issuperset( {not_appended_tx.get_hash(): not_appended_tx})) # test dag.tx_by_hash getter self.assertTrue(dag.get_tx_by_hash(tx1.get_hash()) == tx1) self.assertTrue(dag.get_tx_by_hash(tx2.get_hash()) == tx2) self.assertTrue(dag.get_tx_by_hash(tx3.get_hash()) == tx3)
def test_hold_stake(self): dag = Dag(0) epoch = Epoch(dag) permissions = Permissions(epoch) node_private = Private.generate() initial_validators = Validators.read_genesis_validators_from_file() genesis_hash = dag.genesis_block().get_hash() prev_hash = genesis_hash for i in range(1, 9): block = BlockFactory.create_block_with_timestamp([prev_hash], BLOCK_TIME * i) signed_block = BlockFactory.sign_block(block, node_private) dag.add_signed_block(i, signed_block) prev_hash = block.get_hash() block = BlockFactory.create_block_with_timestamp([prev_hash], BLOCK_TIME * 9) tx = StakeHoldTransaction() tx.amount = 1000 node_new_private = Private.generate() tx.pubkey = Private.publickey(node_new_private) tx.signature = Private.sign(tx.get_hash(), node_new_private) block.system_txs.append(tx) signed_block = BlockFactory.sign_block(block, node_private) dag.add_signed_block(9, signed_block) resulting_validators = permissions.get_validators(block.get_hash()) pub_keys = [] for validator in resulting_validators: pub_keys.append(validator.public_key) self.assertIn(Private.publickey(node_new_private), pub_keys)
def test_find_epoch_hash_for_block(self): dag = Dag(0) epoch = Epoch(dag) genesis_hash = dag.genesis_block().get_hash() genesis_epoch_hash = epoch.find_epoch_hash_for_block(genesis_hash) self.assertEqual(genesis_hash, genesis_epoch_hash) block = BlockFactory.create_block_with_timestamp([genesis_hash], BLOCK_TIME) signed_block = BlockFactory.sign_block(block, Private.generate()) dag.add_signed_block(1, signed_block) first_block_hash = block.get_hash() first_epoch_hash = epoch.find_epoch_hash_for_block(first_block_hash) self.assertEqual(genesis_hash, first_epoch_hash)
def test_pack_parse_reveal_transaction(self): for _ in range(10): dummy_private = Private.generate() original = RevealRandomTransaction() original.commit_hash = sha256(b"previous_transaction").digest() original.key = Keys.to_bytes(dummy_private) raw = original.pack() restored = RevealRandomTransaction() restored.parse(raw) self.assertEqual(TransactionParser.pack(original), TransactionParser.pack(restored)) self.assertEqual(original.get_hash(), restored.get_hash())
def test_split_pack_unpack(self): dummy_private = Private.generate() original = SplitRandomTransaction() original.pieces = [os.urandom(128), os.urandom(127), os.urandom(128)] original.pubkey_index = 0 original.signature = Private.sign( original.get_signing_hash(b"epoch_hash"), dummy_private) raw = original.pack() restored = SplitRandomTransaction() restored.parse(raw) self.assertEqual(original.get_hash(), restored.get_hash()) self.assertEqual(original.get_signing_hash(b"epoch_hash"), restored.get_signing_hash(b"epoch_hash"))