Exemple #1
0
    def test_ancestry(self):
        dag = Dag(0)
        private = Private.generate()
        block1 = BlockFactory.create_block_with_timestamp(
            [dag.genesis_block().get_hash()], BLOCK_TIME)
        signed_block1 = BlockFactory.sign_block(block1, private)
        dag.add_signed_block(1, signed_block1)

        block2 = BlockFactory.create_block_with_timestamp([block1.get_hash()],
                                                          BLOCK_TIME * 2)
        signed_block2 = BlockFactory.sign_block(block2, private)
        dag.add_signed_block(2, signed_block2)

        block3 = BlockFactory.create_block_with_timestamp([block2.get_hash()],
                                                          BLOCK_TIME * 3)
        signed_block3 = BlockFactory.sign_block(block3, private)
        dag.add_signed_block(3, signed_block3)

        # alternative chain
        other_block2 = BlockFactory.create_block_with_timestamp(
            [block1.get_hash()], BLOCK_TIME * 2 + 1)
        other_signed_block2 = BlockFactory.sign_block(other_block2, private)
        dag.add_signed_block(2, other_signed_block2)

        # alternative chain
        other_block3 = BlockFactory.create_block_with_timestamp(
            [other_block2.get_hash()], BLOCK_TIME * 3 + 1)
        other_signed_block3 = BlockFactory.sign_block(other_block3, private)
        dag.add_signed_block(3, other_signed_block3)

        self.assertEqual(
            dag.is_ancestor(other_block3.get_hash(), other_block2.get_hash()),
            True)
        self.assertEqual(
            dag.is_ancestor(other_block3.get_hash(), block2.get_hash()), False)
Exemple #2
0
    def test_chain_length(self):
        dag = Dag(0)
        private = Private.generate()
        block1 = BlockFactory.create_block_with_timestamp(
            [dag.genesis_block().get_hash()], BLOCK_TIME)
        signed_block1 = BlockFactory.sign_block(block1, private)
        dag.add_signed_block(1, signed_block1)

        block2 = BlockFactory.create_block_with_timestamp([block1.get_hash()],
                                                          BLOCK_TIME * 2)
        signed_block2 = BlockFactory.sign_block(block2, private)
        dag.add_signed_block(2, signed_block2)

        block3 = BlockFactory.create_block_with_timestamp([block2.get_hash()],
                                                          BLOCK_TIME * 3)
        signed_block3 = BlockFactory.sign_block(block3, private)
        dag.add_signed_block(3, signed_block3)

        # alternative chain
        other_block2 = BlockFactory.create_block_with_timestamp(
            [block1.get_hash()], BLOCK_TIME * 2 + 1)
        other_signed_block2 = BlockFactory.sign_block(other_block2, private)
        dag.add_signed_block(2, other_signed_block2)

        self.assertEqual(
            dag.calculate_chain_length(other_block2.get_hash(),
                                       dag.genesis_hash()), 3)
        self.assertEqual(
            dag.calculate_chain_length(block3.get_hash(), dag.genesis_hash()),
            4)
    def test_find_epoch_hash_for_block(self):
        dag = Dag(0)
        epoch = Epoch(dag)
        genesis_hash = dag.genesis_block().get_hash()
        genesis_epoch_hash = epoch.find_epoch_hash_for_block(genesis_hash)
        self.assertEqual(genesis_hash, genesis_epoch_hash)

        block = BlockFactory.create_block_with_timestamp([genesis_hash],
                                                         BLOCK_TIME)
        signed_block = BlockFactory.sign_block(block, Private.generate())
        dag.add_signed_block(1, signed_block)
        first_block_hash = block.get_hash()

        first_epoch_hash = epoch.find_epoch_hash_for_block(first_block_hash)
        self.assertEqual(genesis_hash, first_epoch_hash)
    def test_merge_in_merge(self):
        dag = Dag(0)
        genesis_hash = dag.genesis_block().get_hash()
        ChainGenerator.fill_with_dummies_and_skips(dag, genesis_hash,
                                                   range(1, 5), [1, 3])
        second_block = dag.blocks_by_number[2][0].get_hash()
        ChainGenerator.fill_with_dummies_and_skips(dag, second_block,
                                                   range(3, 4), [])
        tops = dag.get_top_hashes()
        merging_block = BlockFactory.create_block_with_timestamp(
            tops, BLOCK_TIME * 5)
        merging_signed_block = BlockFactory.sign_block(merging_block,
                                                       Private.generate())
        dag.add_signed_block(5, merging_signed_block)

        ChainGenerator.fill_with_dummies_and_skips(dag, genesis_hash,
                                                   range(1, 7), [2, 3, 4, 5])

        tops = dag.get_top_hashes()
        merging_block = BlockFactory.create_block_with_timestamp(
            tops, BLOCK_TIME * 7)
        merging_signed_block = BlockFactory.sign_block(merging_block,
                                                       Private.generate())
        dag.add_signed_block(7, merging_signed_block)

        # DagVisualizer.visualize(dag, True)

        iterator = MergingIter(dag, merging_block.get_hash())

        #shortest chain goes last
        # 3 and 4 are swapped because 4 has a priority
        #TODO But is it okay? Maybe we should sometimes give priority to earlier blocks if equal?1
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[7][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[6][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[1][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[5][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[3][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[4][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[2][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[0][0].get_hash())
    def test_simple_merge(self):
        dag = Dag(0)
        genesis_hash = dag.genesis_block().get_hash()
        ChainGenerator.fill_with_dummies_and_skips(dag, genesis_hash,
                                                   range(1, 10), [2, 5, 7, 8])
        first_block = dag.blocks_by_number[1][0].get_hash()
        ChainGenerator.fill_with_dummies_and_skips(dag, first_block,
                                                   range(2, 10),
                                                   [3, 4, 6, 7, 8, 9])
        second_block = dag.blocks_by_number[2][0].get_hash()
        ChainGenerator.fill_with_dummies_and_skips(dag, second_block,
                                                   range(3, 10),
                                                   [3, 4, 5, 6, 9])

        hanging_tips = dag.get_top_hashes()

        merging_block = BlockFactory.create_block_with_timestamp(
            hanging_tips, BLOCK_TIME * 10)
        merging_signed_block = BlockFactory.sign_block(merging_block,
                                                       Private.generate())
        dag.add_signed_block(10, merging_signed_block)
        # DagVisualizer.visualize(dag, True)

        iterator = MergingIter(dag, merging_block.get_hash())

        self.assertEqual(iterator.next().get_hash(), merging_block.get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[5][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[8][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[7][0].get_hash())
        self.assertEqual(iterator.next().get_hash(), dag.blocks_by_number[2]
                         [0].get_hash())  #TODO find out why is this 2 here
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[9][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[6][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[4][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[3][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[1][0].get_hash())
        self.assertEqual(iterator.next().get_hash(),
                         dag.blocks_by_number[0][0].get_hash())
Exemple #6
0
    def test_iterator(self):
        dag = Dag(0)
        private = Private.generate()
        block1 = BlockFactory.create_block_with_timestamp(
            [dag.genesis_block().get_hash()], BLOCK_TIME)
        signed_block1 = BlockFactory.sign_block(block1, private)
        dag.add_signed_block(1, signed_block1)

        block2 = BlockFactory.create_block_with_timestamp([block1.get_hash()],
                                                          BLOCK_TIME * 2)
        signed_block2 = BlockFactory.sign_block(block2, private)
        dag.add_signed_block(2, signed_block2)

        block3 = BlockFactory.create_block_with_timestamp([block2.get_hash()],
                                                          BLOCK_TIME * 3)
        signed_block3 = BlockFactory.sign_block(block3, private)
        dag.add_signed_block(3, signed_block3)

        # alternative chain
        other_block2 = BlockFactory.create_block_with_timestamp(
            [block1.get_hash()], BLOCK_TIME * 2 + 1)
        other_signed_block2 = BlockFactory.sign_block(other_block2, private)
        dag.add_signed_block(2, other_signed_block2)

        # intentionally skipped block

        # alternative chain
        other_block4 = BlockFactory.create_block_with_timestamp(
            [other_block2.get_hash()], BLOCK_TIME * 3 + 1)
        other_signed_block4 = BlockFactory.sign_block(other_block4, private)
        dag.add_signed_block(4, other_signed_block4)

        chain_iter = ChainIter(dag, block3.get_hash())
        self.assertEqual(chain_iter.next().block.get_hash(), block3.get_hash())
        self.assertEqual(chain_iter.next().block.get_hash(), block2.get_hash())
        self.assertEqual(chain_iter.next().block.get_hash(), block1.get_hash())

        chain_iter = ChainIter(dag, other_block4.get_hash())
        self.assertEqual(chain_iter.next().block.get_hash(),
                         other_block4.get_hash())
        self.assertEqual(chain_iter.next(),
                         None)  # detect intentionally skipped block
        self.assertEqual(chain_iter.next().block.get_hash(),
                         other_block2.get_hash())
        self.assertEqual(chain_iter.next().block.get_hash(), block1.get_hash())
    def test_penalty(self):
        dag = Dag(0)
        epoch = Epoch(dag)
        permissions = Permissions(epoch)
        node_private = Private.generate()

        initial_validators = Validators.read_genesis_validators_from_file()

        genesis_hash = dag.genesis_block().get_hash()

        last_block_number = Epoch.get_epoch_end_block_number(1)
        prev_hash = ChainGenerator.fill_with_dummies(
            dag, genesis_hash, range(1, last_block_number))

        block = BlockFactory.create_block_with_timestamp(
            [prev_hash], BLOCK_TIME * last_block_number)
        tx = PenaltyTransaction()
        tx.conflicts = [prev_hash]
        tx.signature = Private.sign(tx.get_hash(), node_private)
        block.system_txs = [tx]
        signed_block = BlockFactory.sign_block(block, node_private)
        dag.add_signed_block(last_block_number, signed_block)

        initial_validators_order = permissions.get_signers_indexes(
            genesis_hash)
        # we substract two here: one because it is last but one block
        # and one, because epoch starts from 1
        validator_index_to_penalize = initial_validators_order[
            last_block_number - 2]

        resulting_validators = permissions.get_validators(block.get_hash())

        self.assertNotEqual(len(initial_validators), len(resulting_validators))

        initial_validators.pop(validator_index_to_penalize)

        init_pubkeys = list(
            map(lambda validator: validator.public_key, initial_validators))
        result_pubkeys = list(
            map(lambda validator: validator.public_key, resulting_validators))

        self.assertEqual(init_pubkeys, result_pubkeys)
Exemple #8
0
    def generate_two_chains(length):
        dag = Dag(0)
        private = Private.generate()
        prev_hash = dag.genesis_block().get_hash()
        for i in range(1, length + 1):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        prev_hash = dag.blocks_by_number[1][0].get_hash()
        for i in range(2, length + 1):  # intentionally one block less
            if i == 4: continue  # intentionally skipped block
            block = BlockFactory.create_block_with_timestamp(
                [prev_hash], BLOCK_TIME * i + 1)
            signed_block = BlockFactory.sign_block(block, private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        return dag
    def test_stake_release_by_genesis_validator(self):
        # base initialization
        dag = Dag(0)
        epoch = Epoch(dag)
        permissions = Permissions(epoch)
        node_private = Private.generate()

        initial_validators = Validators.read_genesis_validators_from_file()

        genesis_hash = dag.genesis_block().get_hash()
        prev_hash = genesis_hash
        for i in range(1, 9):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, node_private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        # get one of validators
        genesis_validator = initial_validators[9]

        # create stake release transaction for new stakeholder
        tx_release = StakeReleaseTransaction()
        tx_release.pubkey = Keys.to_bytes(genesis_validator.public_key)
        tx_release.signature = Private.sign(tx_release.get_hash(),
                                            node_private)

        # append signed stake release transaction
        block.system_txs.append(tx_release)

        # sign block by one of validators
        signed_block = BlockFactory.sign_block(block, node_private)
        # add signed block to DAG
        dag.add_signed_block(19, signed_block)

        resulting_validators = permissions.get_validators(block.get_hash())
        pub_keys = []
        for validator in resulting_validators:
            pub_keys.append(validator.public_key)
        self.assertNotIn(genesis_validator.public_key, pub_keys)
    def test_top_blocks(self):
        dag = Dag(0)
        epoch = Epoch(dag)
        private = Private.generate()

        epoch_hash = dag.genesis_block().get_hash()

        self.assertEqual(dag.genesis_block().get_hash(),
                         list(epoch.get_epoch_hashes().keys())[0])
        self.assertEqual(dag.genesis_block().get_hash(),
                         list(epoch.get_epoch_hashes().values())[0])

        block1 = BlockFactory.create_block_with_timestamp(
            [dag.genesis_block().get_hash()], BLOCK_TIME)
        signed_block1 = BlockFactory.sign_block(block1, private)
        dag.add_signed_block(1, signed_block1)

        self.assertEqual(block1.get_hash(),
                         list(epoch.get_epoch_hashes().keys())[0])
        self.assertEqual(epoch_hash,
                         list(epoch.get_epoch_hashes().values())[0])

        prev_hash = block1.get_hash()
        epoch_length = ROUND_DURATION * 6 + 1
        for i in range(2, epoch_length + 1):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        if epoch.is_new_epoch_upcoming(epoch_length + 1):
            epoch.accept_tops_as_epoch_hashes()

        top_block_hash = dag.blocks_by_number[epoch_length][0].get_hash()
        epoch_hash = dag.blocks_by_number[epoch_length][0].get_hash()

        self.assertEqual(top_block_hash,
                         list(epoch.get_epoch_hashes().keys())[0])
        self.assertEqual(epoch_hash,
                         list(epoch.get_epoch_hashes().values())[0])

        epoch2 = epoch_length * 2 + 1
        for i in range(epoch_length + 1, epoch2):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        if epoch.is_new_epoch_upcoming(epoch2):
            epoch.accept_tops_as_epoch_hashes()

        top_block_hash = dag.blocks_by_number[epoch2 - 1][0].get_hash()
        epoch_hash = dag.blocks_by_number[epoch2 - 1][0].get_hash()

        self.assertEqual(top_block_hash,
                         list(epoch.get_epoch_hashes().keys())[0])
        self.assertEqual(epoch_hash,
                         list(epoch.get_epoch_hashes().values())[0])
    def test_private_keys_extraction(self):
        dag = Dag(0)
        epoch = Epoch(dag)
        node_private = Private.generate()

        prev_hash = dag.genesis_block().get_hash()
        round_start, round_end = Epoch.get_round_bounds(1, Round.PRIVATE)
        for i in range(1, round_start):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, node_private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        generated_private_keys = []
        for i in range(round_start,
                       round_end):  # intentionally skip last block of round
            generated_private = Private.generate()
            generated_private_keys.append(Keys.to_bytes(generated_private))

            private_key_tx = PrivateKeyTransaction()
            private_key_tx.key = Keys.to_bytes(generated_private)
            block = Block()
            block.system_txs = [private_key_tx]
            block.prev_hashes = dag.get_top_blocks_hashes()
            block.timestamp = i * BLOCK_TIME
            signed_block = BlockFactory.sign_block(block, node_private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        ChainGenerator.fill_with_dummies(dag, prev_hash,
                                         Epoch.get_round_range(1, Round.FINAL))

        epoch_hash = dag.blocks_by_number[ROUND_DURATION * 6 + 1][0].get_hash()

        extracted_privates = epoch.get_private_keys_for_epoch(epoch_hash)

        for i in range(0, ROUND_DURATION - 1):
            self.assertEqual(extracted_privates[i], generated_private_keys[i])
    def test_hold_stake(self):
        dag = Dag(0)
        epoch = Epoch(dag)
        permissions = Permissions(epoch)
        node_private = Private.generate()

        initial_validators = Validators.read_genesis_validators_from_file()

        genesis_hash = dag.genesis_block().get_hash()
        prev_hash = genesis_hash
        for i in range(1, 9):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, node_private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                         BLOCK_TIME * 9)

        tx = StakeHoldTransaction()
        tx.amount = 1000
        node_new_private = Private.generate()

        tx.pubkey = Private.publickey(node_new_private)
        tx.signature = Private.sign(tx.get_hash(), node_new_private)

        block.system_txs.append(tx)
        signed_block = BlockFactory.sign_block(block, node_private)
        dag.add_signed_block(9, signed_block)

        resulting_validators = permissions.get_validators(block.get_hash())
        pub_keys = []
        for validator in resulting_validators:
            pub_keys.append(validator.public_key)

        self.assertIn(Private.publickey(node_new_private), pub_keys)
Exemple #13
0
    def test_getting_tx_by_hash(self):
        dag = Dag(0)
        private = Private.generate()

        block1 = BlockFactory.create_block_with_timestamp(
            [dag.genesis_block().get_hash()], BLOCK_TIME)
        tx1 = TransactionFactory.create_negative_gossip_transaction(1, private)
        tx2 = TransactionFactory.create_positive_gossip_transaction(
            block1.get_hash(), private)
        tx3 = TransactionFactory.create_penalty_gossip_transaction(
            {tx1.get_hash(): tx2.get_hash()}, private)
        not_appended_tx = TransactionFactory.create_public_key_transaction(
            generated_private=Private.generate(),
            epoch_hash=sha256(b'epoch_hash').digest(),
            validator_index=1,
            node_private=private)
        block1.system_txs.append(tx1)
        block1.system_txs.append(tx2)
        block1.system_txs.append(tx3)

        signed_block1 = BlockFactory.sign_block(block1, private)
        dag.add_signed_block(1, signed_block1)

        self.assertTrue(
            set(dag.transactions_by_hash).issuperset({tx1.get_hash(): tx1}))
        self.assertTrue(
            set(dag.transactions_by_hash).issuperset({tx2.get_hash(): tx2}))
        self.assertTrue(
            set(dag.transactions_by_hash).issuperset({tx3.get_hash(): tx3}))
        self.assertFalse(
            set(dag.transactions_by_hash).issuperset(
                {not_appended_tx.get_hash(): not_appended_tx}))

        # test dag.tx_by_hash getter
        self.assertTrue(dag.get_tx_by_hash(tx1.get_hash()) == tx1)
        self.assertTrue(dag.get_tx_by_hash(tx2.get_hash()) == tx2)
        self.assertTrue(dag.get_tx_by_hash(tx3.get_hash()) == tx3)
Exemple #14
0
    def test_zeta_calculation(self):
        dag = Dag(0)
        private = Private.generate()
        prev_hash = dag.genesis_block().get_hash()
        for i in range(1, 3):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        #skip 3 blocks

        for i in range(6, 7):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        for i in range(10, 12):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        prev_hash = dag.blocks_by_number[1][0].get_hash()
        for i in range(2, 12):
            if i == 3: continue
            block = BlockFactory.create_block_with_timestamp(
                [prev_hash], BLOCK_TIME * i + 1)
            signed_block = BlockFactory.sign_block(block, private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        immutability = Immutability(dag)
        # zeta = immutability.calculate_zeta(dag.blocks_by_number[2][0].get_hash())
        # self.assertEqual(zeta, -2)

        zeta = immutability.calculate_zeta(
            dag.blocks_by_number[6][1].get_hash())
        self.assertEqual(zeta, 1)
Exemple #15
0
    def test_storing_tx_by_hash(self):
        dag = Dag(0)
        private0 = Private.generate()
        private1 = Private.generate()
        private2 = Private.generate()

        # add block 1
        block1 = BlockFactory.create_block_with_timestamp(
            [dag.genesis_block().get_hash()], BLOCK_TIME)
        signed_block1 = BlockFactory.sign_block(block1, private0)
        dag.add_signed_block(1, signed_block1)

        # check transactions in dag.transactions_by_hash for empty
        self.assertTrue(len(dag.transactions_by_hash) == 0)

        # add block 2
        block2 = BlockFactory.create_block_with_timestamp([block1.get_hash()],
                                                          BLOCK_TIME)
        # add penalty gossip case by tx in block
        tx1 = TransactionFactory.create_negative_gossip_transaction(
            1, private1)
        tx2 = TransactionFactory.create_positive_gossip_transaction(
            block2.get_hash(), private1)
        block2.system_txs.append(tx1)
        block2.system_txs.append(tx2)
        # --------------------------------------
        signed_block2 = BlockFactory.sign_block(block2, private1)
        dag.add_signed_block(2, signed_block2)

        # check transactions in dag.transactions_by_hash
        self.assertTrue(
            set(dag.transactions_by_hash).issuperset({tx1.get_hash(): tx1}))
        self.assertTrue(
            set(dag.transactions_by_hash).issuperset({tx2.get_hash(): tx2}))

        block3 = BlockFactory.create_block_with_timestamp([block2.get_hash()],
                                                          BLOCK_TIME)
        signed_block3 = BlockFactory.sign_block(block3, private2)
        dag.add_signed_block(3, signed_block3)

        # check transactions in dag.transactions_by_hash
        self.assertTrue(
            set(dag.transactions_by_hash).issuperset({tx1.get_hash(): tx1}))
        self.assertTrue(
            set(dag.transactions_by_hash).issuperset({tx2.get_hash(): tx2}))
Exemple #16
0
    def test_top_blocks(self):
        dag = Dag(0)
        private = Private.generate()
        block1 = BlockFactory.create_block_with_timestamp(
            [dag.genesis_block().get_hash()], BLOCK_TIME)
        signed_block1 = BlockFactory.sign_block(block1, private)
        dag.add_signed_block(1, signed_block1)

        block2 = BlockFactory.create_block_with_timestamp([block1.get_hash()],
                                                          BLOCK_TIME * 2)
        signed_block2 = BlockFactory.sign_block(block2, private)
        dag.add_signed_block(2, signed_block2)

        block3 = BlockFactory.create_block_with_timestamp([block1.get_hash()],
                                                          BLOCK_TIME * 3)
        signed_block3 = BlockFactory.sign_block(block3, private)
        dag.add_signed_block(3, signed_block3)

        top_hashes = dag.get_top_blocks_hashes()

        self.assertEqual(top_hashes[0], block2.get_hash())
        self.assertEqual(top_hashes[1], block3.get_hash())
Exemple #17
0
class Node():
    def __init__(self, genesis_creation_time, node_id, network, block_signer):
        self.dag = Dag(genesis_creation_time)
        self.permissions = Permissions()
        self.mempool = Mempool()

        self.block_signer = block_signer
        self.network = network
        self.node_id = node_id
        #self.last_commited_random_key where first element is era number, and second is key to reveal commited random

    def start(self):
        pass

    async def run(self):
        while True:
            current_block_number = self.dag.get_current_timeframe_block_number(
            )
            if self.dag.get_current_epoch() == Epoch.COMMIT:
                self.try_to_commit_random(current_block_number)
            elif self.dag.get_current_epoch() == Epoch.REVEAL:
                self.try_to_reveal_random(current_block_number)

            self.try_to_sign_block(current_block_number)
            await asyncio.sleep(3)

    def try_to_sign_block(self, current_block_number):
        current_block_validator = self.permissions.get_permission(
            self.dag, current_block_number)
        is_public_key_corresponds = current_block_validator.public_key == self.block_signer.private_key.publickey(
        )
        block_has_not_been_signed_yet = not self.dag.is_current_timeframe_block_present(
        )
        if is_public_key_corresponds and block_has_not_been_signed_yet:
            signed_block = self.dag.sign_block(self.block_signer.private_key)
            raw_signed_block = signed_block.pack()
            self.network.broadcast_block(self.node_id, raw_signed_block)

    def try_to_commit_random(self, current_block_number):
        era_number = self.dag.get_era_number(current_block_number)
        has_reveal_key = hasattr(self, "last_commited_random_key")
        has_key_for_previous_era = has_reveal_key and self.last_commited_random_key[
            0] < era_number
        if not has_reveal_key or has_key_for_previous_era:
            era_hash = self.dag.get_era_hash(era_number)
            tx = CommitRandomTransaction()
            data, key = enc_part_random(era_hash)
            tx.rand = data
            raw_tx = TransactionParser.pack(tx)
            self.last_commited_random_key = (era_number, key)
            self.network.broadcast_transaction(self.node_id, raw_tx)

    def try_to_reveal_random(self, current_block_number):
        era_number = self.dag.get_era_number(current_block_number)
        has_reveal_key = hasattr(self, "last_commited_random_key")
        has_key_for_this_era = has_reveal_key and self.last_commited_random_key[
            0] == era_number
        if has_reveal_key and has_key_for_this_era:
            tx = RevealRandomTransaction()
            tx.commit_hash = 1234567890
            tx.key = self.last_commited_random_key[1]
            raw_tx = TransactionParser.pack(tx)
            del self.last_commited_random_key
            self.network.broadcast_transaction(self.node_id, raw_tx)

    def handle_block_message(self, node_id, raw_signed_block):
        current_block_number = self.dag.get_current_timeframe_block_number()
        current_validator = self.permissions.get_permission(
            self.dag, current_block_number)
        signed_block = SignedBlock()
        signed_block.parse(raw_signed_block)
        print("Node ", self.node_id, "received block from node", node_id,
              "with block hash",
              signed_block.block.get_hash().hexdigest())
        if signed_block.verify_signature(current_validator.public_key):
            self.dag.add_signed_block(current_block_number, signed_block)
        else:
            self.network.gossip_malicious(current_validator.public_key)

    def handle_transaction_message(self, node_id, raw_transaction):
        transaction = TransactionParser.parse(raw_transaction)
        verifier = TransactionVerifier(self.dag)
        print("Node ", self.node_id, "received transaction with hash",
              transaction.get_hash().hexdigest(), " from node ", node_id)
        if verifier.check_if_valid(transaction):
            print("It is valid. Adding to mempool")
            self.mempool.add_transaction(transaction)
        else:
            print("It's invalid. Do something about it")
Exemple #18
0
class Node:
    def __init__(self,
                 genesis_creation_time,
                 node_id,
                 network,
                 block_signer=BlockSigner(Private.generate()),
                 validators=Validators(),
                 behaviour=Behaviour(),
                 logger=DummyLogger()):
        self.logger = logger
        self.dag = Dag(genesis_creation_time)
        self.epoch = Epoch(self.dag)
        self.epoch.set_logger(self.logger)
        self.permissions = Permissions(self.epoch, validators)
        self.mempool = Mempool()
        self.utxo = Utxo(self.logger)
        self.conflict_watcher = ConflictWatcher(self.dag)
        self.behaviour = behaviour

        self.block_signer = block_signer
        self.node_pubkey = Private.publickey(block_signer.private_key)
        self.logger.info("Public key is %s",
                         Keys.to_visual_string(self.node_pubkey))
        self.network = network
        self.node_id = node_id
        self.epoch_private_keys = []  # TODO make this single element
        # self.epoch_private_keys where first element is era number, and second is key to reveal commited random
        self.reveals_to_send = {}
        self.sent_shares_epochs = []  # epoch hashes of secret shares
        self.last_expected_timeslot = 0
        self.last_signed_block_number = 0
        self.tried_to_sign_current_block = False
        self.owned_utxos = []
        self.terminated = False

        self.blocks_buffer = [
        ]  # uses while receive block and do not have its ancestor in local dag (before verify)

    def start(self):
        pass

    def handle_timeslot_changed(self, previous_timeslot_number,
                                current_timeslot_number):
        self.last_expected_timeslot = current_timeslot_number
        self.try_to_broadcast_maliciously_delayed_block()
        return self.try_to_send_negative_gossip(previous_timeslot_number)

    def try_to_broadcast_maliciously_delayed_block(self):
        if self.behaviour.block_to_delay_broadcasting:
            if self.behaviour.malicious_block_broadcast_delay > 0:
                self.behaviour.malicious_block_broadcast_delay -= 1
            else:
                self.network.broadcast_block(
                    self.node_id,
                    self.behaviour.block_to_delay_broadcasting.pack())
                self.behaviour.block_to_delay_broadcasting = None

    def try_to_send_negative_gossip(self, previous_timeslot_number):
        if previous_timeslot_number not in self.dag.blocks_by_number:
            epoch_block_number = Epoch.convert_to_epoch_block_number(
                previous_timeslot_number)
            allowed_to_send_negative_gossip = False
            epoch_hashes = self.epoch.get_epoch_hashes()
            for _, epoch_hash in epoch_hashes.items():
                permissions = self.permissions.get_gossip_permission(
                    epoch_hash, epoch_block_number)
                for permission in permissions:
                    if permission.public_key == self.node_pubkey:
                        allowed_to_send_negative_gossip = True
                        break
            if allowed_to_send_negative_gossip:
                self.broadcast_gossip_negative(previous_timeslot_number)
            return True
        return False

    def step(self):
        current_block_number = self.epoch.get_current_timeframe_block_number()

        if self.epoch.is_new_epoch_upcoming(current_block_number):
            self.epoch.accept_tops_as_epoch_hashes()

        # service method for update node behavior (if behavior is temporary)
        self.behaviour.update(Epoch.get_epoch_number(current_block_number))
        # service method for update transport behavior (if behavior is temporary)
        self.behaviour.update_transport(current_block_number)

        current_round = self.epoch.get_round_by_block_number(
            current_block_number)
        if current_round == Round.PUBLIC:
            self.try_to_publish_public_key(current_block_number)
        elif current_round == Round.SECRETSHARE:
            self.try_to_share_random()
            # elif current_round == Round.PRIVATE:
            # do nothing as private key should be included to block by block signer
        elif current_round == Round.COMMIT:
            self.try_to_commit_random()
        elif current_round == Round.REVEAL:
            self.try_to_reveal_random()
        elif current_round == Round.FINAL:
            # at this point we may remove everything systemic from mempool,
            # so it does not interfere with pubkeys for next epoch
            self.mempool.remove_all_systemic_transactions()

        if self.behaviour.wants_to_hold_stake:
            self.broadcast_stakehold_transaction()
            self.behaviour.wants_to_hold_stake = False

        if self.behaviour.wants_to_release_stake:
            self.broadcast_stakerelease_transaction()
            self.behaviour.wants_to_release_stake = False

        if self.behaviour.malicious_send_negative_gossip_count > 0:
            self.broadcast_gossip_negative(self.last_expected_timeslot)
            self.behaviour.malicious_send_negative_gossip_count -= 1
        if self.behaviour.malicious_send_positive_gossip_count > 0:
            zero_block = self.dag.blocks_by_number[0][
                0].block  # send genesis block malicious
            self.broadcast_gossip_positive(zero_block.get_hash())
            self.behaviour.malicious_send_positive_gossip_count -= 1

        if self.owned_utxos:
            self.broadcast_payments()

        if current_block_number != self.last_expected_timeslot:
            self.tried_to_sign_current_block = False
            should_wait = self.handle_timeslot_changed(
                previous_timeslot_number=self.last_expected_timeslot,
                current_timeslot_number=current_block_number)
            if should_wait:
                return
        if not self.tried_to_sign_current_block:
            self.try_to_sign_block(current_block_number)
            self.tried_to_sign_current_block = True  # will reset in next timeslot

    async def run(self):
        while True:
            self.step()
            await asyncio.sleep(1)

    def try_to_sign_block(self, current_block_number):
        epoch_block_number = Epoch.convert_to_epoch_block_number(
            current_block_number)

        allowed_to_sign = False
        epoch_hashes = self.epoch.get_epoch_hashes()
        for top, epoch_hash in epoch_hashes.items():
            permission = self.permissions.get_sign_permission(
                epoch_hash, epoch_block_number)
            if permission.public_key == self.node_pubkey:
                allowed_to_sign = True
                break

        if allowed_to_sign:
            should_skip_maliciously = self.behaviour.is_malicious_skip_block()
            # first_epoch_ever = self.epoch.get_epoch_number(current_block_number) == 1
            if should_skip_maliciously:  # and not first_epoch_ever: # skip first epoch check
                self.epoch_private_keys.clear()
                self.logger.info("Maliciously skiped block")
            else:
                if self.last_signed_block_number < current_block_number:
                    self.last_signed_block_number = current_block_number
                    self.sign_block(current_block_number)
                else:
                    # skip once more block broadcast in same timeslot
                    pass

    def sign_block(self, current_block_number):
        current_round_type = self.epoch.get_round_by_block_number(
            current_block_number)
        epoch_number = Epoch.get_epoch_number(current_block_number)

        system_txs = self.get_system_transactions_for_signing(
            current_round_type)
        payment_txs = self.get_payment_transactions_for_signing(
            current_block_number)

        tops = self.dag.get_top_blocks_hashes()
        chosen_top = self.dag.get_longest_chain_top(tops)
        conflicting_tops = [top for top in tops if top != chosen_top]

        current_top_blocks = [
            chosen_top
        ] + conflicting_tops  # first link in dag is not considered conflict, the rest is.

        if self.behaviour.off_malicious_links_to_wrong_blocks:
            current_top_blocks = []
            all_hashes = list(self.dag.blocks_by_hash.keys())
            for _ in range(random.randint(1, 3)):
                block_hash = random.choice(all_hashes)
                current_top_blocks.append(block_hash)

            self.logger.info(
                "Maliciously connecting block at slot %s to random hashes",
                current_block_number)

        block = BlockFactory.create_block_dummy(current_top_blocks)
        block.system_txs = system_txs
        block.payment_txs = payment_txs
        signed_block = BlockFactory.sign_block(block,
                                               self.block_signer.private_key)

        if self.behaviour.malicious_block_broadcast_delay > 0:
            self.behaviour.block_to_delay_broadcasting = signed_block
            return  # don't do broadcasting, wait a few timeslots1

        self.dag.add_signed_block(current_block_number, signed_block)
        self.utxo.apply_payments(payment_txs)
        self.conflict_watcher.on_new_block_by_validator(
            block.get_hash(), epoch_number, self.node_pubkey)

        if not self.behaviour.transport_cancel_block_broadcast:  # behaviour flag for cancel block broadcast
            self.logger.debug("Broadcasting signed block number %s",
                              current_block_number)
            self.network.broadcast_block(self.node_id, signed_block.pack())
        else:
            self.logger.info(
                "Created but maliciously skipped broadcasted block")

        if self.behaviour.malicious_excessive_block_count > 0:
            additional_block_timestamp = block.timestamp + 1
            additional_block = BlockFactory.create_block_with_timestamp(
                current_top_blocks, additional_block_timestamp)
            additional_block.system_txs = block.system_txs
            additional_block.payment_txs = block.payment_txs
            signed_add_block = BlockFactory.sign_block(
                additional_block, self.block_signer.private_key)
            self.dag.add_signed_block(current_block_number, signed_add_block)
            self.conflict_watcher.on_new_block_by_validator(
                signed_add_block.get_hash(), epoch_number,
                self.node_pubkey)  #mark our own conflict for consistency
            self.logger.info("Sending additional block")
            self.network.broadcast_block(self.node_id, signed_add_block.pack())
            self.behaviour.malicious_excessive_block_count -= 1

    def get_system_transactions_for_signing(self, round):
        system_txs = self.mempool.pop_round_system_transactions(round)

        # skip non valid system_txs
        verifier = InBlockTransactionsAcceptor(self.epoch, self.permissions,
                                               self.logger)
        system_txs = [t for t in system_txs if verifier.check_if_valid(t)]
        # get gossip conflicts hashes (validate_gossip() ---> [gossip_negative_hash, gossip_positive_hash])
        conflicts_gossip = self.validate_gossip(self.dag, self.mempool)
        gossip_mempool_txs = self.mempool.pop_current_gossips(
        )  # POP gossips to block
        system_txs += gossip_mempool_txs

        if round == Round.PRIVATE:
            if self.epoch_private_keys:
                key_reveal_tx = self.form_private_key_reveal_transaction()
                system_txs.append(key_reveal_tx)

        if conflicts_gossip:
            for conflict in conflicts_gossip:
                self.logger.info(
                    "Adding penalty to block with conflicting gossips %s",
                    conflicts_gossip)
                penalty_gossip_tx = \
                    TransactionFactory.create_penalty_gossip_transaction(conflict=conflict,
                                                                         node_private=self.block_signer.private_key)
                system_txs.append(penalty_gossip_tx)

        return system_txs

    def get_payment_transactions_for_signing(self, block_number):
        node_public = Private.publickey(self.block_signer.private_key)
        pseudo_address = sha256(node_public).digest()
        block_reward = TransactionFactory.create_block_reward(
            pseudo_address, block_number)
        block_reward_hash = block_reward.get_hash()
        self.owned_utxos.append(block_reward_hash)
        payment_txs = [block_reward] + self.mempool.pop_payment_transactions()
        return payment_txs

    def try_to_publish_public_key(self, current_block_number):
        if self.epoch_private_keys:
            return

        epoch_hashes = self.epoch.get_epoch_hashes()
        for _, epoch_hash in epoch_hashes.items():
            allowed_round_validators = self.permissions.get_ordered_randomizers_pubkeys_for_round(
                epoch_hash, Round.PUBLIC)
            pubkey_publishers_pubkeys = [
                validator.public_key for validator in allowed_round_validators
            ]
            if self.node_pubkey in pubkey_publishers_pubkeys:
                node_private = self.block_signer.private_key
                pubkey_index = self.permissions.get_signer_index_from_public_key(
                    self.node_pubkey, epoch_hash)

                generated_private = Private.generate()
                tx = TransactionFactory.create_public_key_transaction(
                    generated_private=generated_private,
                    epoch_hash=epoch_hash,
                    validator_index=pubkey_index,
                    node_private=node_private)
                if self.behaviour.malicious_wrong_signature:
                    tx.signature = b'0' + tx.signature[1:]

                self.epoch_private_keys.append(generated_private)
                self.logger.debug("Broadcasted public key")
                self.logger.debug(Keys.to_visual_string(tx.generated_pubkey))
                self.mempool.add_transaction(tx)
                self.network.broadcast_transaction(self.node_id,
                                                   TransactionParser.pack(tx))

    def try_to_share_random(self):
        epoch_hashes = self.epoch.get_epoch_hashes()
        for top, epoch_hash in epoch_hashes.items():
            if epoch_hash in self.sent_shares_epochs: continue
            allowed_to_share_random = self.permissions.get_secret_sharers_pubkeys(
                epoch_hash)
            if not self.node_pubkey in allowed_to_share_random: continue
            split_random = self.form_split_random_transaction(top, epoch_hash)
            self.sent_shares_epochs.append(epoch_hash)
            self.mempool.add_transaction(split_random)
            self.network.broadcast_transaction(
                self.node_id, TransactionParser.pack(split_random))

    def try_to_commit_random(self):
        epoch_hashes = self.epoch.get_epoch_hashes().values()
        for epoch_hash in epoch_hashes:
            if epoch_hash not in self.reveals_to_send:
                allowed_to_commit_list = self.permissions.get_commiters_pubkeys(
                    epoch_hash)
                if self.node_pubkey not in allowed_to_commit_list:
                    continue
                pubkey_index = self.permissions.get_committer_index_from_public_key(
                    self.node_pubkey, epoch_hash)
                commit, reveal = TransactionFactory.create_commit_reveal_pair(
                    self.block_signer.private_key, os.urandom(32),
                    pubkey_index, epoch_hash)
                self.reveals_to_send[epoch_hash] = reveal
                self.logger.info("Broadcasting commit")
                self.mempool.add_transaction(commit)
                self.network.broadcast_transaction(
                    self.node_id, TransactionParser.pack(commit))

    def try_to_reveal_random(self):
        for epoch_hash in list(self.reveals_to_send.keys()):
            reveal = self.reveals_to_send[epoch_hash]
            self.logger.info("Broadcasting reveal")
            self.mempool.add_transaction(reveal)
            self.network.broadcast_transaction(self.node_id,
                                               TransactionParser.pack(reveal))
            del self.reveals_to_send[epoch_hash]

    def form_private_key_reveal_transaction(self):
        tx = TransactionFactory.create_private_key_transaction(
            self.epoch_private_keys.pop(0))
        return tx

    def form_split_random_transaction(self, top_hash, epoch_hash):
        ordered_senders = self.permissions.get_ordered_randomizers_pubkeys_for_round(
            epoch_hash, Round.PUBLIC)
        published_pubkeys = self.epoch.get_public_keys_for_epoch(top_hash)

        self.logger.info("Ordered pubkeys for secret sharing:")
        sorted_published_pubkeys = []
        for sender in ordered_senders:
            raw_pubkey = Keys.to_bytes(sender.public_key)
            raw_pubkey_index = self.permissions.get_signer_index_from_public_key(
                raw_pubkey, epoch_hash)
            if raw_pubkey_index in published_pubkeys:
                generated_pubkey = published_pubkeys[raw_pubkey_index]
                sorted_published_pubkeys.append(
                    Keys.from_bytes(generated_pubkey))
                self.logger.info(Keys.to_visual_string(generated_pubkey))
            else:
                sorted_published_pubkeys.append(None)
                self.logger.info("None")

        tx = self.form_secret_sharing_transaction(sorted_published_pubkeys,
                                                  epoch_hash)
        return tx

    def form_secret_sharing_transaction(self, sorted_public_keys, epoch_hash):
        random_bytes = os.urandom(32)
        splits = split_secret(random_bytes, MINIMAL_SECRET_SHARERS,
                              TOTAL_SECRET_SHARERS)
        encoded_splits = encode_splits(splits, sorted_public_keys)
        self.logger.info("Formed split random")

        node_private = self.block_signer.private_key
        pubkey_index = self.permissions.get_secret_sharer_from_public_key(
            self.node_pubkey, epoch_hash)

        tx = TransactionFactory.create_split_random_transaction(
            encoded_splits, pubkey_index, epoch_hash, node_private)
        return tx

    def get_allowed_signers_for_next_block(self, block):
        current_block_number = self.epoch.get_current_timeframe_block_number()
        epoch_block_number = Epoch.convert_to_epoch_block_number(
            current_block_number)
        if self.epoch.is_new_epoch_upcoming(current_block_number):
            self.epoch.accept_tops_as_epoch_hashes()
        epoch_hashes = self.epoch.get_epoch_hashes()
        allowed_signers = []
        for prev_hash in block.prev_hashes:
            epoch_hash = None
            if prev_hash in epoch_hashes:
                epoch_hash = epoch_hashes[prev_hash]
            else:
                epoch_hash = self.epoch.find_epoch_hash_for_block(prev_hash)

            if epoch_hash:
                # self.logger.info("Calculating permissions from epoch_hash %s", epoch_hash.hex())
                allowed_pubkey = self.permissions.get_sign_permission(
                    epoch_hash, epoch_block_number)
                allowed_signers.append(allowed_pubkey)

        assert len(
            allowed_signers) > 0, "No signers allowed to sign next block"
        return allowed_signers

    # -------------------------------------------------------------------------------
    # Handlers
    # -------------------------------------------------------------------------------
    def handle_block_message(self, node_id, raw_signed_block):
        signed_block = SignedBlock()
        signed_block.parse(raw_signed_block)
        block_number = self.epoch.get_block_number_from_timestamp(
            signed_block.block.timestamp)
        self.logger.info(
            "Received block with number %s at timeslot %s with hash %s",
            block_number, self.epoch.get_current_timeframe_block_number(),
            signed_block.block.get_hash().hex())

        # CHECK_ANCESTOR
        blocks_by_hash = self.dag.blocks_by_hash
        is_orphan_block = False
        for prev_hash in signed_block.block.prev_hashes:  # by every previous hash
            if prev_hash not in blocks_by_hash:  # verify local ancestor for incoming block
                is_orphan_block = True

        # CHECK_ORPHAN_DISTANCE
        block_out_of_epoch = False
        epoch_end_block = self.epoch.get_epoch_end_block_number(
            self.epoch.current_epoch)
        if block_number >= epoch_end_block:
            # income block from future epoch, cant validate signer
            block_out_of_epoch = True

        # CHECK ALLOWED SIGNER
        if not block_out_of_epoch:  # if incoming block not out of current epoch
            allowed_signers = self.get_allowed_signers_for_block_number(
                block_number)
            allowed_pubkey = None
            for allowed_signer in allowed_signers:
                if signed_block.verify_signature(allowed_signer):
                    allowed_pubkey = allowed_signer
                    break
        else:
            allowed_pubkey = 'block_out_of_epoch'  # process block as orphan

        if allowed_pubkey:  # IF SIGNER ALLOWED
            if not is_orphan_block:  # PROCESS NORMAL BLOCK (same epoch)
                if self.epoch.is_new_epoch_upcoming(
                        block_number):  # CHECK IS NEW EPOCH
                    self.epoch.accept_tops_as_epoch_hashes()
                block_verifier = BlockAcceptor(
                    self.epoch, self.logger)  # VERIFY BLOCK AS NORMAL
                if block_verifier.check_if_valid(signed_block.block):
                    self.insert_verified_block(signed_block, allowed_pubkey)
                    return
            else:  # PROCESS ORPHAN BLOCK (same epoch)
                orphan_block_verifier = OrphanBlockAcceptor(
                    self.epoch, self.blocks_buffer, self.logger)
                if orphan_block_verifier.check_if_valid(signed_block.block):
                    self.blocks_buffer.append(signed_block)
                    self.logger.info("Orphan block added to buffer")
                    # for every parent for received block
                    for prev_hash in signed_block.block.prev_hashes:  # check received block ancestor
                        if prev_hash not in self.dag.blocks_by_hash:  # check parent in local dag
                            # if parent not exist in local DAG ask for parent
                            self.network.direct_request_block_by_hash(
                                self.node_id, node_id, prev_hash)

                if len(self.blocks_buffer) > 0:
                    self.process_block_buffer()
                    self.logger.info("Orphan block buffer process success")
        else:
            self.logger.error(
                "Received block from %d, but its signature is wrong", node_id)

    def handle_transaction_message(self, node_id, raw_transaction):
        transaction = TransactionParser.parse(raw_transaction)

        verifier = MempoolTransactionsAcceptor(self.epoch, self.permissions,
                                               self.logger)
        if verifier.check_if_valid(transaction):
            self.mempool.add_transaction(transaction)
            # PROCESS NEGATIVE GOSSIP
            if isinstance(transaction, NegativeGossipTransaction):
                self.logger.info(
                    "Received negative gossip about block %s at timeslot %s",
                    transaction.number_of_block,
                    self.epoch.get_current_timeframe_block_number())

                current_gossips = self.mempool.get_negative_gossips_by_block(
                    transaction.number_of_block)
                for gossip in current_gossips:
                    # negative gossip already send by node, skip positive gossip searching and broadcasting
                    if gossip.pubkey == self.node_pubkey:
                        return
                if self.dag.has_block_number(transaction.number_of_block):
                    signed_block_by_number = self.dag.blocks_by_number[
                        transaction.number_of_block]
                    self.broadcast_gossip_positive(
                        signed_block_by_number[0].get_hash())
            # PROCESS POSITIVE GOSSIP
            if isinstance(transaction, PositiveGossipTransaction):
                # ----> ! make request ONLY if block in timeslot
                self.logger.info(
                    "Received positive gossip about block %s at timeslot %s",
                    transaction.block_hash.hex(),
                    self.epoch.get_current_timeframe_block_number())
                if transaction.block_hash not in self.dag.blocks_by_hash:
                    self.network.get_block_by_hash(
                        sender_node_id=self.node_id,
                        receiver_node_id=
                        node_id,  # request TO ----> receiver_node_id
                        block_hash=transaction.block_hash)
        else:
            self.logger.error("Received tx is invalid")

    # -------------------------------------------------------------------------------
    # Broadcast
    # -------------------------------------------------------------------------------
    def broadcast_stakehold_transaction(self):
        node_private = self.block_signer.private_key
        tx = TransactionFactory.create_stake_hold_transaction(
            1000, node_private)
        self.logger.info("Broadcasted StakeHold transaction")
        self.network.broadcast_transaction(self.node_id,
                                           TransactionParser.pack(tx))

    def broadcast_stakerelease_transaction(self):
        node_private = self.block_signer.private_key
        tx = TransactionFactory.create_stake_release_transaction(node_private)
        self.logger.info("Broadcasted release stake transaction")
        self.network.broadcast_transaction(self.node_id,
                                           TransactionParser.pack(tx))

    def broadcast_gossip_negative(self, block_number):
        node_private = self.block_signer.private_key
        tx = TransactionFactory.create_negative_gossip_transaction(
            block_number, node_private)
        self.mempool.append_gossip_tx(
            tx)  # ADD ! TO LOCAL MEMPOOL BEFORE BROADCAST
        self.logger.info("Broadcasted negative gossip transaction")
        self.network.broadcast_transaction(self.node_id,
                                           TransactionParser.pack(tx))

    def broadcast_gossip_positive(self, signed_block_hash):
        node_private = self.block_signer.private_key
        tx = TransactionFactory.create_positive_gossip_transaction(
            signed_block_hash, node_private)
        self.mempool.append_gossip_tx(
            tx)  # ADD ! TO LOCAL MEMPOOL BEFORE BROADCAST
        # self.logger.info("Broadcasted positive gossip transaction")
        self.network.broadcast_transaction(self.node_id,
                                           TransactionParser.pack(tx))

    def broadcast_payments(self):
        for utxo in self.owned_utxos:
            tx = TransactionFactory.create_payment(
                utxo, 0, [os.urandom(32), os.urandom(32)], [10, 5])
            self.mempool.add_transaction(tx)
            self.network.broadcast_transaction(self.node_id,
                                               TransactionParser.pack(tx))
            # self.logger.info("Broadcasted payment with hash %s", tx.get_hash())
        self.owned_utxos.clear()

    # -------------------------------------------------------------------------------
    # Targeted request
    # -------------------------------------------------------------------------------
    def request_block_by_hash(self, block_hash):
        # no need validate/ public info ?
        signed_block = self.dag.blocks_by_hash[block_hash]
        self.network.broadcast_block(self.node_id, signed_block.pack())

    # method returns block directly to sender without broadcast
    def direct_request_block_by_hash(self, sender_node, block_hash):
        signed_block = self.dag.blocks_by_hash[block_hash]
        self.network.direct_response_block_by_hash(self.node_id, sender_node,
                                                   signed_block.pack())

    # -------------------------------------------------------------------------------
    # Internal
    # -------------------------------------------------------------------------------
    def insert_verified_block(self, signed_block, allowed_pubkey):
        block = signed_block.block
        block_number = self.epoch.get_block_number_from_timestamp(
            block.timestamp)
        epoch_number = Epoch.get_epoch_number(block_number)

        self.dag.add_signed_block(block_number, signed_block)
        self.mempool.remove_transactions(block.system_txs)
        self.mempool.remove_transactions(block.payment_txs)
        self.utxo.apply_payments(block.payment_txs)
        self.conflict_watcher.on_new_block_by_validator(
            block.get_hash(), epoch_number, allowed_pubkey)

    def process_block_buffer(self):
        while len(self.blocks_buffer) > 0:
            block_from_buffer = self.blocks_buffer.pop()
            block_number = self.epoch.get_block_number_from_timestamp(
                block_from_buffer.block.timestamp)

            if self.epoch.is_new_epoch_upcoming(
                    block_number):  # CHECK IS NEW EPOCH
                self.epoch.accept_tops_as_epoch_hashes()

            # validate block from buffer by signature
            allowed_signers = self.get_allowed_signers_for_block_number(
                block_number)
            allowed_pubkey = None
            for allowed_signer in allowed_signers:
                if block_from_buffer.verify_signature(allowed_signer):
                    allowed_pubkey = allowed_signer
                    break

            if allowed_pubkey:
                block_verifier = BlockAcceptor(self.epoch, self.logger)
                if block_verifier.check_if_valid(
                        block_from_buffer.block):  # VERIFY BLOCK AS NORMAL
                    self.insert_verified_block(block_from_buffer,
                                               allowed_pubkey)
                else:
                    self.logger.info("Block from buffer verification failed")
            else:
                self.logger.info("Block from buffer wrong signature")

    def get_allowed_signers_for_block_number(self, block_number):
        # TODO take cached epoch hashes if block is of lastest epoch
        prev_epoch_number = self.epoch.get_epoch_number(block_number) - 1
        prev_epoch_start = self.epoch.get_epoch_start_block_number(
            prev_epoch_number)
        prev_epoch_end = self.epoch.get_epoch_end_block_number(
            prev_epoch_number)

        # this will extract every unconnected block in epoch, which is practically epoch hash
        # TODO maybe consider blocks to be epoch hashes if they are in final round and consider everything else is orphan
        epoch_hashes = self.dag.get_branches_for_timeslot_range(
            prev_epoch_start, prev_epoch_end + 1)

        if prev_epoch_number == 0:
            epoch_hashes = [self.dag.genesis_block().get_hash()]

        allowed_signers = []
        for epoch_hash in epoch_hashes:
            epoch_block_number = Epoch.convert_to_epoch_block_number(
                block_number)
            allowed_pubkey = self.permissions.get_sign_permission(
                epoch_hash, epoch_block_number).public_key
            allowed_signers.append(allowed_pubkey)

        assert len(allowed_signers) > 0, "No signers allowed to sign block"
        return allowed_signers

    @staticmethod
    def validate_gossip(dag, mempool):
        result = []

        # -------------- mempool validation
        mem_negative_gossips = mempool.get_all_negative_gossips()
        # for every negative in mempool get authors and positives
        for negative in mem_negative_gossips:  # we can have many negatives by not existing block
            negative_author = negative.pubkey
            # get block by negative number
            # skip another validations (if current validator have no block)
            if dag.has_block_number(negative.number_of_block):
                # get block hash
                blocks_by_negative = dag.blocks_by_number[
                    negative.number_of_block]
                for block in blocks_by_negative:  # we can have more than one block by number
                    positives_for_negative = \
                        mempool.get_positive_gossips_by_block_hash(block.get_hash())
                    # if have no positives for negative - do nothing
                    for positive in positives_for_negative:
                        if positive.pubkey == negative_author:
                            # add to conflict result positive and negative gossips hash with same author
                            result.append(
                                [positive.get_hash(),
                                 negative.get_hash()])

        # -------------- dag validation
        # provide penalty for standalone positive gossip (without negative) ?
        # what else we can validate by tx_by_hash ?
        # dag_negative_gossips = dag.get_negative_gossips()
        # dag_positive_gossips = dag.get_positive_gossips()
        # dag_penalty_gossips = dag.get_penalty_gossips()
        return result
    def test_secret_sharing_rounds(self):
        dag = Dag(0)
        epoch = Epoch(dag)

        dummy_private = Private.generate()

        signers = []
        for i in range(0, ROUND_DURATION + 1):
            signers.append(Private.generate())

        private_keys = []

        block_number = 1
        genesis_hash = dag.genesis_block().get_hash()
        prev_hash = genesis_hash
        signer_index = 0
        for i in Epoch.get_round_range(1, Round.PUBLIC):
            private = Private.generate()
            private_keys.append(private)

            signer = signers[signer_index]
            pubkey_tx = PublicKeyTransaction()
            pubkey_tx.generated_pubkey = Private.publickey(private)
            pubkey_tx.pubkey_index = signer_index
            pubkey_tx.signature = Private.sign(
                pubkey_tx.get_signing_hash(genesis_hash), signer)

            block = Block()
            block.timestamp = i * BLOCK_TIME
            block.prev_hashes = [prev_hash]
            block.system_txs = [pubkey_tx]
            signed_block = BlockFactory.sign_block(block, signer)
            dag.add_signed_block(i, signed_block)
            signer_index += 1
            prev_hash = block.get_hash()

        prev_hash = ChainGenerator.fill_with_dummies(
            dag, prev_hash, Epoch.get_round_range(1, Round.COMMIT))

        public_keys = []
        for private in private_keys:
            public_keys.append(Private.publickey(private))

        randoms_list = []
        expected_random_pieces = []
        for i in Epoch.get_round_range(1, Round.SECRETSHARE):
            random_bytes = os.urandom(32)
            random_value = int.from_bytes(random_bytes, byteorder='big')
            split_random_tx = SplitRandomTransaction()
            splits = split_secret(random_bytes, 2, 3)
            encoded_splits = encode_splits(splits, public_keys)
            split_random_tx.pieces = encoded_splits
            split_random_tx.pubkey_index = 0
            expected_random_pieces.append(split_random_tx.pieces)
            split_random_tx.signature = Private.sign(pubkey_tx.get_hash(),
                                                     dummy_private)
            block = Block()
            block.timestamp = i * BLOCK_TIME
            block.prev_hashes = [prev_hash]
            block.system_txs = [split_random_tx]
            signed_block = BlockFactory.sign_block(block, dummy_private)
            dag.add_signed_block(i, signed_block)
            randoms_list.append(random_value)
            prev_hash = block.get_hash()

        expected_seed = sum_random(randoms_list)

        prev_hash = ChainGenerator.fill_with_dummies(
            dag, prev_hash, Epoch.get_round_range(1, Round.REVEAL))

        signer_index = 0
        private_key_index = 0
        raw_private_keys = []
        for i in Epoch.get_round_range(1, Round.PRIVATE):
            private_key_tx = PrivateKeyTransaction()
            private_key_tx.key = Keys.to_bytes(private_keys[private_key_index])
            raw_private_keys.append(private_key_tx.key)
            signer = signers[signer_index]
            block = Block()
            block.system_txs = [private_key_tx]
            block.prev_hashes = [prev_hash]
            block.timestamp = block_number * BLOCK_TIME
            signed_block = BlockFactory.sign_block(block, signer)
            dag.add_signed_block(i, signed_block)
            signer_index += 1
            private_key_index += 1
            prev_hash = block.get_hash()

        prev_hash = ChainGenerator.fill_with_dummies(
            dag, prev_hash, Epoch.get_round_range(1, Round.FINAL))

        top_block_hash = dag.get_top_blocks_hashes()[0]

        random_splits = epoch.get_random_splits_for_epoch(top_block_hash)
        self.assertEqual(expected_random_pieces, random_splits)

        restored_randoms = []
        for i in range(0, len(random_splits)):
            random = decode_random(random_splits[i],
                                   Keys.list_from_bytes(raw_private_keys))
            restored_randoms.append(random)

        self.assertEqual(randoms_list, restored_randoms)

        seed = epoch.extract_shared_random(top_block_hash)
        self.assertEqual(expected_seed, seed)
    def test_commit_reveal(self):
        dag = Dag(0)
        epoch = Epoch(dag)

        private = Private.generate()

        prev_hash = ChainGenerator.fill_with_dummies(
            dag,
            dag.genesis_block().get_hash(),
            Epoch.get_round_range(1, Round.PUBLIC))

        randoms_list = []
        for i in Epoch.get_round_range(1, Round.COMMIT):
            random_value = int.from_bytes(os.urandom(32), byteorder='big')
            randoms_list.append(random_value)

        expected_seed = sum_random(randoms_list)

        reveals = []

        epoch_hash = dag.genesis_block().get_hash()

        for i in Epoch.get_round_range(1, Round.COMMIT):
            rand = randoms_list.pop()
            random_bytes = rand.to_bytes(32, byteorder='big')
            commit, reveal = TestEpoch.create_dummy_commit_reveal(
                random_bytes, epoch_hash)
            commit_block = BlockFactory.create_block_with_timestamp(
                [prev_hash], i * BLOCK_TIME)
            commit_block.system_txs = [commit]
            signed_block = BlockFactory.sign_block(commit_block, private)
            dag.add_signed_block(i, signed_block)
            prev_hash = commit_block.get_hash()
            reveals.append(reveal)

            revealing_key = Keys.from_bytes(reveal.key)
            encrypted_bytes = Public.encrypt(random_bytes,
                                             Private.publickey(revealing_key))
            decrypted_bytes = Private.decrypt(encrypted_bytes, revealing_key)
            # TODO check if encryption decryption can work million times in a row
            self.assertEqual(decrypted_bytes, random_bytes)

            revealed_value = Private.decrypt(commit.rand, revealing_key)
            self.assertEqual(revealed_value, random_bytes)

        # self.assertEqual(len(reveals), ROUND_DURATION)

        prev_hash = ChainGenerator.fill_with_dummies(
            dag, prev_hash, Epoch.get_round_range(1, Round.SECRETSHARE))

        for i in Epoch.get_round_range(1, Round.REVEAL):
            reveal_block = BlockFactory.create_block_with_timestamp(
                [prev_hash], i * BLOCK_TIME)
            reveal_block.system_txs = [reveals.pop()]
            signed_block = BlockFactory.sign_block(reveal_block, private)
            dag.add_signed_block(i, signed_block)
            prev_hash = reveal_block.get_hash()

        prev_hash = ChainGenerator.fill_with_dummies(
            dag, prev_hash, Epoch.get_round_range(1, Round.PRIVATE))

        prev_hash = ChainGenerator.fill_with_dummies(
            dag, prev_hash, Epoch.get_round_range(1, Round.FINAL))

        seed = epoch.reveal_commited_random(prev_hash)
        self.assertEqual(expected_seed, seed)
    def test_release_stake(self):
        # base initialization
        dag = Dag(0)
        epoch = Epoch(dag)
        permissions = Permissions(epoch)
        node_private = Private.generate()

        initial_validators = Validators.read_genesis_validators_from_file()

        genesis_hash = dag.genesis_block().get_hash()
        prev_hash = genesis_hash
        for i in range(1, 9):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, node_private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                         BLOCK_TIME * 9)

        # create new node for stake hold
        new_node_private = Private.generate()
        new_node_public = Private.publickey(new_node_private)

        # create transaction for stake hold for new node
        tx_hold = StakeHoldTransaction()
        tx_hold.amount = 2000
        tx_hold.pubkey = Keys.to_bytes(new_node_public)
        tx_hold.signature = Private.sign(tx_hold.get_hash(), new_node_private)

        # append signed stake hold transaction
        block.system_txs.append(tx_hold)

        # sign block by one of validators
        signed_block = BlockFactory.sign_block(block, node_private)
        # add signed block to DAG
        dag.add_signed_block(9, signed_block)

        resulting_validators = permissions.get_validators(block.get_hash())
        pub_keys = []
        for validator in resulting_validators:
            pub_keys.append(validator.public_key)
        self.assertIn(new_node_public, pub_keys)

        # add blocks for new epoch
        for i in range(10, 18):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, node_private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        # create stake release transaction for new stakeholder
        tx_release = StakeReleaseTransaction()
        tx_release.pubkey = Keys.to_bytes(new_node_public)
        tx_release.signature = Private.sign(tx_hold.get_hash(),
                                            new_node_private)

        # append signed stake release transaction
        block.system_txs.append(tx_release)

        # sign block by one of validators
        signed_block = BlockFactory.sign_block(block, node_private)
        # add signed block to DAG
        dag.add_signed_block(19, signed_block)

        # verify that new stake holder now is NOT in validators list (after stake release transaction signed by holder)
        resulting_validators = permissions.get_validators(block.get_hash())
        pub_keys = []
        for validator in resulting_validators:
            pub_keys.append(validator.public_key)
        self.assertNotIn(new_node_public, pub_keys)
Exemple #22
0
    def test_confirmations_calculation(self):
        dag = Dag(0)
        private = Private.generate()
        prev_hash = dag.genesis_block().get_hash()
        for i in range(1, 9):
            if i == 5 or i == 7: continue
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        prev_hash = dag.blocks_by_number[1][0].get_hash()
        for i in range(2, 5):
            if i == 3: continue
            block = BlockFactory.create_block_with_timestamp(
                [prev_hash], BLOCK_TIME * i + 1)
            signed_block = BlockFactory.sign_block(block, private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        immutability = Immutability(dag)

        # first branch check
        confirmations = immutability.calculate_confirmations(
            dag.blocks_by_number[8][0].get_hash())
        self.assertEqual(confirmations, 0)

        skipped_block = SkippedBlock(dag.blocks_by_number[8][0].get_hash(),
                                     backstep_count=1)
        confirmations = immutability.calculate_skipped_block_confirmations(
            skipped_block)
        self.assertEqual(confirmations, 1)

        confirmations = immutability.calculate_confirmations(
            dag.blocks_by_number[6][0].get_hash())
        self.assertEqual(confirmations, 1)

        skipped_block = SkippedBlock(dag.blocks_by_number[6][0].get_hash(),
                                     backstep_count=1)
        confirmations = immutability.calculate_skipped_block_confirmations(
            skipped_block)
        self.assertEqual(confirmations, 2)

        confirmations = immutability.calculate_confirmations(
            dag.blocks_by_number[4][0].get_hash())
        self.assertEqual(confirmations, 2)

        # second branch check
        confirmations = immutability.calculate_confirmations(
            dag.blocks_by_number[4][1].get_hash())
        self.assertEqual(confirmations, 0)

        skipped_block = SkippedBlock(dag.blocks_by_number[4][1].get_hash(),
                                     backstep_count=1)
        confirmations = immutability.calculate_skipped_block_confirmations(
            skipped_block)
        self.assertEqual(confirmations, 1)

        confirmations = immutability.calculate_confirmations(
            dag.blocks_by_number[2][1].get_hash())
        self.assertEqual(confirmations, 1)

        # common ancestor
        # four existing blocks in the following five slots
        confirmations = immutability.calculate_confirmations(
            dag.blocks_by_number[1][0].get_hash())
        self.assertEqual(confirmations, 4)
    def test_remove_from_validators_by_penalty_gossip(self):
        # base initialization
        dag = Dag(0)
        epoch = Epoch(dag)
        permissions = Permissions(epoch)
        node_private = Private.generate()

        initial_validators = Validators.read_genesis_validators_from_file()

        genesis_hash = dag.genesis_block().get_hash()
        prev_hash = genesis_hash
        for i in range(1, 9):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, node_private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        # get one of validators
        genesis_validator_private = Private.generate()
        genesis_validator_public = initial_validators[9].public_key

        # put to 10 block gossip+ AND gossip- by one node
        block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                         BLOCK_TIME * 10)

        gossip_negative_tx = NegativeGossipTransaction()
        gossip_negative_tx.pubkey = genesis_validator_public
        gossip_negative_tx.timestamp = Time.get_current_time()
        gossip_negative_tx.number_of_block = 5
        gossip_negative_tx.signature = Private.sign(
            gossip_negative_tx.get_hash(), genesis_validator_private)
        # create and add to block negative gossip
        block.system_txs.append(gossip_negative_tx)

        gossip_positive_tx = PositiveGossipTransaction()
        gossip_positive_tx.pubkey = genesis_validator_public
        gossip_positive_tx.timestamp = Time.get_current_time()
        gossip_positive_tx.block_hash = dag.blocks_by_number[5][0].get_hash()
        gossip_positive_tx.signature = Private.sign(
            gossip_positive_tx.get_hash(), genesis_validator_private)
        # create and add to block positive gossip for same number 5 block
        block.system_txs.append(gossip_positive_tx)

        signed_block = BlockFactory.sign_block(block,
                                               genesis_validator_private)
        dag.add_signed_block(10, signed_block)
        prev_hash = block.get_hash()
        # --------------------------------------------------

        # put to 11 block penalty gossip
        block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                         BLOCK_TIME * 11)
        penalty_gossip_tx = PenaltyGossipTransaction()
        penalty_gossip_tx.timestamp = Time.get_current_time()
        penalty_gossip_tx.conflicts = [
            gossip_positive_tx.get_hash(),
            gossip_negative_tx.get_hash()
        ]
        # set genesis validator for sign penalty gossip
        penalty_gossip_tx.signature = Private.sign(
            penalty_gossip_tx.get_hash(), genesis_validator_private)
        block.system_txs.append(penalty_gossip_tx)

        signed_block = BlockFactory.sign_block(block,
                                               genesis_validator_private)
        dag.add_signed_block(11, signed_block)
        prev_hash = block.get_hash()
        # --------------------------------------------------

        # verify that genesis node is steel in validators list
        current_epoch_hash = epoch.get_epoch_hashes()
        # for now we DO NOT NEED to recalculate validators (send genesis block hash)
        resulting_validators = permissions.get_validators(
            current_epoch_hash.get(prev_hash))
        pub_keys = []
        for validator in resulting_validators:
            pub_keys.append(validator.public_key)
        self.assertIn(genesis_validator_public, pub_keys)

        # produce epoch till end
        from chain.params import ROUND_DURATION
        for i in range(12, (ROUND_DURATION * 6 + 4)):
            block = BlockFactory.create_block_with_timestamp([prev_hash],
                                                             BLOCK_TIME * i)
            signed_block = BlockFactory.sign_block(block, node_private)
            dag.add_signed_block(i, signed_block)
            prev_hash = block.get_hash()

        # check for new epoch
        self.assertTrue(epoch.is_new_epoch_upcoming(i))
        self.assertTrue(epoch.current_epoch == 2)

        # recalculate validators for last block hash
        resulting_validators = permissions.get_validators(prev_hash)
        pub_keys = []
        for validator in resulting_validators:
            pub_keys.append(validator.public_key)

        self.assertNotIn(genesis_validator_public, pub_keys)