Пример #1
0
    def verify_spent_reward(self, block: Block) -> None:
        """ Verify that the reward being spent is old enough (has enoughs blocks after it on the best chain).

        We only consider the blocks on the best chain up to the tx's timestamp.
        """
        assert self.storage is not None
        if self._height_cache:
            # get_best_block_tips is a costly method because there are many orphan blocks in our blockchain
            # This method is called for each input that spends a block and, since we have many transactions
            # that consolidate blocks rewards, this is common.
            # This cache helps to decrease 90% of the verify time for those transactions.
            best_height = self._height_cache
        else:
            # using the timestamp, we get the block immediately before this transaction in the blockchain
            tips = self.storage.get_best_block_tips(self.timestamp - 1)
            assert len(tips) > 0
            tip = self.storage.get_transaction(tips[0])
            assert tip is not None
            assert self.timestamp > tip.timestamp
            best_height = tip.get_metadata().height
            self._height_cache = best_height
        spent_height = block.get_metadata().height
        spend_blocks = best_height - spent_height
        if spend_blocks < settings.REWARD_SPEND_MIN_BLOCKS:
            raise RewardLocked(
                f'Reward needs {settings.REWARD_SPEND_MIN_BLOCKS} blocks to be spent, {spend_blocks} '
                'not enough')
Пример #2
0
    def add_voided_by(self,
                      block: Block,
                      voided_hash: Optional[bytes] = None) -> bool:
        """ Add a new hash in its `meta.voided_by`. If `voided_hash` is None, it includes
        the block's own hash.
        """
        assert block.storage is not None
        assert block.hash is not None

        storage = block.storage

        if voided_hash is None:
            voided_hash = block.hash
        assert voided_hash is not None

        meta = block.get_metadata()
        if not meta.voided_by:
            meta.voided_by = set()
        if voided_hash in meta.voided_by:
            return False

        self.log.debug('add_voided_by',
                       block=block.hash_hex,
                       voided_hash=voided_hash.hex())

        meta.voided_by.add(voided_hash)
        storage.save_transaction(block, only_metadata=True)

        spent_by: Iterable[bytes] = chain(*meta.spent_outputs.values())
        for tx_hash in spent_by:
            tx = storage.get_transaction(tx_hash)
            assert isinstance(tx, Transaction)
            self.consensus.transaction_algorithm.add_voided_by(tx, voided_hash)
        return True
Пример #3
0
 def update_voided_by_from_parents(self, block: Block) -> bool:
     """Update block's metadata voided_by from parents.
     Return True if the block is voided and False otherwise."""
     assert block.storage is not None
     voided_by: Set[bytes] = self.union_voided_by_from_parents(block)
     if voided_by:
         meta = block.get_metadata()
         if meta.voided_by:
             meta.voided_by.update(voided_by)
         else:
             meta.voided_by = voided_by.copy()
         block.storage.save_transaction(block, only_metadata=True)
         block.storage.del_from_indexes(block, relax_assert=True)
         return True
     return False
Пример #4
0
    def verify_spent_reward(self, block: Block) -> None:
        """ Verify that the reward being spent is old enough (has enoughs blocks after it on the best chain).

        We only consider the blocks on the best chain up to the tx's timestamp.
        """
        assert self.storage is not None
        # using the timestamp, we get the block immediately before this transaction in the blockchain
        tips = self.storage.get_best_block_tips(self.timestamp - 1)
        assert len(tips) > 0
        tip = self.storage.get_transaction(tips[0])
        assert tip is not None
        assert self.timestamp > tip.timestamp
        best_height = tip.get_metadata().height
        spent_height = block.get_metadata().height
        spend_blocks = best_height - spent_height
        if spend_blocks < settings.REWARD_SPEND_MIN_BLOCKS:
            raise RewardLocked(
                f'Reward needs {settings.REWARD_SPEND_MIN_BLOCKS} blocks to be spent, {spend_blocks} '
                'not enough')
Пример #5
0
    def calculate_score(self,
                        block: Block,
                        *,
                        mark_as_best_chain: bool = False) -> float:
        """ Calculate block's score, which is the accumulated work of the verified transactions and blocks.

        :param: mark_as_best_chain: If `True`, the transactions' will point `meta.first_block` to
                                    the blocks of the chain.
        """
        assert block.storage is not None
        if block.is_genesis:
            if mark_as_best_chain:
                meta = block.get_metadata()
                meta.score = block.weight
                block.storage.save_transaction(block, only_metadata=True)
            return block.weight

        parent = self._find_first_parent_in_best_chain(block)
        newest_timestamp = parent.timestamp

        used: Set[bytes] = set()
        return self._score_block_dfs(block, used, mark_as_best_chain,
                                     newest_timestamp)
Пример #6
0
class BaseTransactionStorageTest(unittest.TestCase):
    __test__ = False

    def setUp(self, tx_storage, reactor=None):
        from hathor.manager import HathorManager

        if not reactor:
            self.reactor = Clock()
        else:
            self.reactor = reactor
        self.reactor.advance(time.time())
        self.tx_storage = tx_storage
        assert tx_storage.first_timestamp > 0

        tx_storage._manually_initialize()

        self.genesis = self.tx_storage.get_all_genesis()
        self.genesis_blocks = [tx for tx in self.genesis if tx.is_block]
        self.genesis_txs = [tx for tx in self.genesis if not tx.is_block]

        self.tmpdir = tempfile.mkdtemp()
        wallet = Wallet(directory=self.tmpdir)
        wallet.unlock(b'teste')
        self.manager = HathorManager(self.reactor,
                                     tx_storage=self.tx_storage,
                                     wallet=wallet)

        self.tx_storage.indexes.enable_address_index(self.manager.pubsub)
        self.tx_storage.indexes.enable_tokens_index()

        block_parents = [
            tx.hash for tx in chain(self.genesis_blocks, self.genesis_txs)
        ]
        output = TxOutput(200, P2PKH.create_output_script(BURN_ADDRESS))
        self.block = Block(timestamp=MIN_TIMESTAMP,
                           weight=12,
                           outputs=[output],
                           parents=block_parents,
                           nonce=100781,
                           storage=tx_storage)
        self.block.resolve()
        self.block.verify()
        self.block.get_metadata().validation = ValidationState.FULL

        tx_parents = [tx.hash for tx in self.genesis_txs]
        tx_input = TxInput(
            tx_id=self.genesis_blocks[0].hash,
            index=0,
            data=bytes.fromhex(
                '46304402203470cb9818c9eb842b0c433b7e2b8aded0a51f5903e971649e870763d0266a'
                'd2022049b48e09e718c4b66a0f3178ef92e4d60ee333d2d0e25af8868acf5acbb35aaa583'
                '056301006072a8648ce3d020106052b8104000a034200042ce7b94cba00b654d4308f8840'
                '7345cacb1f1032fb5ac80407b74d56ed82fb36467cb7048f79b90b1cf721de57e942c5748'
                '620e78362cf2d908e9057ac235a63'))

        self.tx = Transaction(
            timestamp=MIN_TIMESTAMP + 2,
            weight=10,
            nonce=932049,
            inputs=[tx_input],
            outputs=[output],
            tokens=[
                bytes.fromhex(
                    '0023be91834c973d6a6ddd1a0ae411807b7c8ef2a015afb5177ee64b666ce602'
                )
            ],
            parents=tx_parents,
            storage=tx_storage)
        self.tx.resolve()
        self.tx.get_metadata().validation = ValidationState.FULL

        # Disable weakref to test the internal methods. Otherwise, most methods return objects from weakref.
        self.tx_storage._disable_weakref()

        self.tx_storage.enable_lock()

    def tearDown(self):
        shutil.rmtree(self.tmpdir)

    def test_genesis_ref(self):
        # Enable weakref to this test only.
        self.tx_storage._enable_weakref()

        genesis_set = set(self.tx_storage.get_all_genesis())
        for tx in genesis_set:
            tx2 = self.tx_storage.get_transaction(tx.hash)
            self.assertTrue(tx is tx2)

        from hathor.transaction.genesis import _get_genesis_transactions_unsafe
        genesis_from_settings = _get_genesis_transactions_unsafe(None)
        for tx in genesis_from_settings:
            tx2 = self.tx_storage.get_transaction(tx.hash)
            self.assertTrue(tx is not tx2)
            for tx3 in genesis_set:
                self.assertTrue(tx is not tx3)
                if tx2 == tx3:
                    self.assertTrue(tx2 is tx3)

    def test_genesis(self):
        self.assertEqual(1, len(self.genesis_blocks))
        self.assertEqual(2, len(self.genesis_txs))
        for tx in self.genesis:
            tx.verify()

        for tx in self.genesis:
            tx2 = self.tx_storage.get_transaction(tx.hash)
            self.assertEqual(tx, tx2)
            self.assertTrue(self.tx_storage.transaction_exists(tx.hash))

    def test_get_empty_merklee_tree(self):
        # We use `first_timestamp - 1` to ensure that the merkle tree will be empty.
        self.tx_storage.get_merkle_tree(self.tx_storage.first_timestamp - 1)

    def test_first_timestamp(self):
        self.assertEqual(self.tx_storage.first_timestamp,
                         min(x.timestamp for x in self.genesis))

    def test_storage_basic(self):
        self.assertEqual(1, self.tx_storage.get_block_count())
        self.assertEqual(2, self.tx_storage.get_tx_count())
        self.assertEqual(3, self.tx_storage.get_count_tx_blocks())

        block_parents_hash = [x.data for x in self.tx_storage.get_block_tips()]
        self.assertEqual(1, len(block_parents_hash))
        self.assertEqual(block_parents_hash, [self.genesis_blocks[0].hash])

        tx_parents_hash = [x.data for x in self.tx_storage.get_tx_tips()]
        self.assertEqual(2, len(tx_parents_hash))
        self.assertEqual(set(tx_parents_hash),
                         {self.genesis_txs[0].hash, self.genesis_txs[1].hash})

    def test_storage_basic_v2(self):
        self.assertEqual(1, self.tx_storage.get_block_count())
        self.assertEqual(2, self.tx_storage.get_tx_count())
        self.assertEqual(3, self.tx_storage.get_count_tx_blocks())

        block_parents_hash = self.tx_storage.get_best_block_tips()
        self.assertEqual(1, len(block_parents_hash))
        self.assertEqual(block_parents_hash, [self.genesis_blocks[0].hash])

        tx_parents_hash = self.manager.get_new_tx_parents()
        self.assertEqual(2, len(tx_parents_hash))
        self.assertEqual(set(tx_parents_hash),
                         {self.genesis_txs[0].hash, self.genesis_txs[1].hash})

    def validate_save(self, obj):
        self.tx_storage.save_transaction(obj, add_to_indexes=True)

        loaded_obj1 = self.tx_storage.get_transaction(obj.hash)

        self.assertTrue(self.tx_storage.transaction_exists(obj.hash))

        self.assertEqual(obj, loaded_obj1)
        self.assertEqual(len(obj.get_funds_struct()),
                         len(loaded_obj1.get_funds_struct()))
        self.assertEqual(bytes(obj), bytes(loaded_obj1))
        self.assertEqual(obj.to_json(), loaded_obj1.to_json())
        self.assertEqual(obj.is_block, loaded_obj1.is_block)

        # Testing add and remove from cache
        if self.tx_storage.with_index:
            if obj.is_block:
                self.assertTrue(obj.hash in self.tx_storage.indexes.block_tips.
                                tx_last_interval)
            else:
                self.assertTrue(obj.hash in self.tx_storage.indexes.tx_tips.
                                tx_last_interval)

        self.tx_storage.del_from_indexes(obj)

        if self.tx_storage.with_index:
            if obj.is_block:
                self.assertFalse(obj.hash in self.tx_storage.indexes.
                                 block_tips.tx_last_interval)
            else:
                self.assertFalse(obj.hash in self.tx_storage.indexes.tx_tips.
                                 tx_last_interval)

        self.tx_storage.add_to_indexes(obj)
        if self.tx_storage.with_index:
            if obj.is_block:
                self.assertTrue(obj.hash in self.tx_storage.indexes.block_tips.
                                tx_last_interval)
            else:
                self.assertTrue(obj.hash in self.tx_storage.indexes.tx_tips.
                                tx_last_interval)

    def test_save_block(self):
        self.validate_save(self.block)

    def test_save_tx(self):
        self.validate_save(self.tx)

    def test_save_token_creation_tx(self):
        tx = create_tokens(self.manager, propagate=False)
        tx.get_metadata().validation = ValidationState.FULL
        self.validate_save(tx)

    def _validate_not_in_index(self, tx, index):
        tips = index.tips_index[self.tx.timestamp]
        self.assertNotIn(self.tx.hash, [x.data for x in tips])
        self.assertNotIn(self.tx.hash, index.tips_index.tx_last_interval)

        self.assertIsNone(index.txs_index.find_tx_index(tx))

    def _test_remove_tx_or_block(self, tx):
        self.validate_save(tx)

        self.tx_storage.remove_transaction(tx)
        with self.assertRaises(TransactionDoesNotExist):
            self.tx_storage.get_transaction(tx.hash)

        if hasattr(self.tx_storage, 'all_index'):
            self._validate_not_in_index(tx, self.tx_storage.all_index)

        if tx.is_block:
            if hasattr(self.tx_storage, 'block_index'):
                self._validate_not_in_index(tx, self.tx_storage.block_index)
        else:
            if hasattr(self.tx_storage, 'tx_index'):
                self._validate_not_in_index(tx, self.tx_storage.tx_index)

        # Check wallet index.
        addresses_index = self.tx_storage.indexes.addresses
        addresses = tx.get_related_addresses()
        for address in addresses:
            self.assertNotIn(tx.hash,
                             addresses_index.get_from_address(address))

        # TODO Check self.tx_storage.tokens_index

        # Try to remove twice. It is supposed to do nothing.
        self.tx_storage.remove_transaction(tx)

    def test_remove_tx(self):
        self._test_remove_tx_or_block(self.tx)

    def test_remove_block(self):
        self._test_remove_tx_or_block(self.block)

    def test_shared_memory(self):
        # Enable weakref to this test only.
        self.tx_storage._enable_weakref()

        self.validate_save(self.block)
        self.validate_save(self.tx)

        for tx in [self.tx, self.block]:
            # just making sure, if it is genesis the test is wrong
            self.assertFalse(tx.is_genesis)

            # load transactions twice
            tx1 = self.tx_storage.get_transaction(tx.hash)
            tx2 = self.tx_storage.get_transaction(tx.hash)

            # naturally they should be equal, but this time so do the objects
            self.assertTrue(tx1 == tx2)
            self.assertTrue(tx1 is tx2)

            meta1 = tx1.get_metadata()
            meta2 = tx2.get_metadata()

            # and naturally the metadata too
            self.assertTrue(meta1 == meta2)
            self.assertTrue(meta1 is meta2)

    def test_get_wrong_tx(self):
        hex_error = bytes.fromhex(
            '00001c5c0b69d13b05534c94a69b2c8272294e6b0c536660a3ac264820677024')
        with self.assertRaises(TransactionDoesNotExist):
            self.tx_storage.get_transaction(hex_error)

    def test_save_metadata(self):
        # Saving genesis metadata
        self.tx_storage.save_transaction(self.genesis_txs[0],
                                         only_metadata=True)

        tx = self.block
        # First we save to the storage
        self.tx_storage.save_transaction(tx)

        metadata = tx.get_metadata()
        metadata.spent_outputs[1].append(self.genesis_blocks[0].hash)
        random_tx = bytes.fromhex(
            '0000222e64683b966b4268f387c269915cc61f6af5329823a93e3696cb0f2222')
        metadata.children.append(random_tx)

        self.tx_storage.save_transaction(tx, only_metadata=True)
        tx2 = self.tx_storage.get_transaction(tx.hash)
        metadata2 = tx2.get_metadata()
        self.assertEqual(metadata, metadata2)

        total = 0
        for tx in self.tx_storage.get_all_transactions():
            total += 1

        self.assertEqual(total, 4)

    def test_storage_new_blocks(self):
        tip_blocks = [x.data for x in self.tx_storage.get_block_tips()]
        self.assertEqual(tip_blocks, [self.genesis_blocks[0].hash])

        block1 = self._add_new_block()
        tip_blocks = [x.data for x in self.tx_storage.get_block_tips()]
        self.assertEqual(tip_blocks, [block1.hash])

        block2 = self._add_new_block()
        tip_blocks = [x.data for x in self.tx_storage.get_block_tips()]
        self.assertEqual(tip_blocks, [block2.hash])

        # Block3 has the same parents as block2.
        block3 = self._add_new_block(parents=block2.parents)
        tip_blocks = [x.data for x in self.tx_storage.get_block_tips()]
        self.assertEqual(set(tip_blocks), {block2.hash, block3.hash})

        # Re-generate caches to test topological sort.
        self.tx_storage._manually_initialize()
        tip_blocks = [x.data for x in self.tx_storage.get_block_tips()]
        self.assertEqual(set(tip_blocks), {block2.hash, block3.hash})

    def test_token_list(self):
        tx = self.tx
        self.validate_save(tx)
        # 2 token uids
        tx.tokens.append(
            bytes.fromhex(
                '00001c5c0b69d13b05534c94a69b2c8272294e6b0c536660a3ac264820677024'
            ))
        tx.resolve()
        self.validate_save(tx)
        # no tokens
        tx.tokens = []
        tx.resolve()
        self.validate_save(tx)

    def _add_new_block(self, parents=None):
        block = self.manager.generate_mining_block()
        block.data = b'Testing, testing, 1, 2, 3... testing, testing...'
        if parents is not None:
            block.parents = parents
        block.weight = 10
        self.assertTrue(block.resolve())
        block.verify()
        self.manager.propagate_tx(block, fails_silently=False)
        self.reactor.advance(5)
        return block

    def test_topological_sort(self):
        _set_test_mode(TestMode.TEST_ALL_WEIGHT)
        _total = 0
        blocks = add_new_blocks(self.manager, 1, advance_clock=1)
        _total += len(blocks)
        blocks = add_blocks_unlock_reward(self.manager)
        _total += len(blocks)
        add_new_transactions(self.manager, 1, advance_clock=1)

        total = 0
        for tx in self.tx_storage._topological_sort():
            total += 1

        # added blocks + genesis txs + added tx
        self.assertEqual(total, _total + 3 + 1)

    def test_get_best_block_weight(self):
        block = self._add_new_block()
        weight = self.tx_storage.get_weight_best_block()
        self.assertEqual(block.weight, weight)

    @inlineCallbacks
    def test_concurrent_access(self):
        self.tx_storage.save_transaction(self.tx)
        self.tx_storage._enable_weakref()

        def handle_error(err):
            self.fail(
                'Error resolving concurrent access deferred. {}'.format(err))

        deferreds = []
        for i in range(5):
            d = deferToThread(self.tx_storage.get_transaction, self.tx.hash)
            d.addErrback(handle_error)
            deferreds.append(d)

        self.reactor.advance(3)
        yield gatherResults(deferreds)
        self.tx_storage._disable_weakref()

    def test_full_verification_attribute(self):
        self.assertFalse(self.tx_storage.is_running_full_verification())
        self.tx_storage.start_full_verification()
        self.assertTrue(self.tx_storage.is_running_full_verification())
        self.tx_storage.finish_full_verification()
        self.assertFalse(self.tx_storage.is_running_full_verification())

    def test_key_value_attribute(self):
        attr = 'test'
        val = 'a'

        # Try to get a key that does not exist
        self.assertIsNone(self.tx_storage.get_value(attr))

        # Try to remove this key that does not exist
        self.tx_storage.remove_value(attr)

        # Add the key/value
        self.tx_storage.add_value(attr, val)

        # Get correct value
        self.assertEqual(self.tx_storage.get_value(attr), val)

        # Remove the key
        self.tx_storage.remove_value(attr)

        # Key should not exist again
        self.assertIsNone(self.tx_storage.get_value(attr))
Пример #7
0
 def _make_block_template(self,
                          parent_block: Block,
                          parent_txs: 'ParentTxs',
                          current_timestamp: int,
                          with_weight_decay: bool = False) -> BlockTemplate:
     """ Further implementation of making block template, used by make_block_template and make_custom_block_template
     """
     assert parent_block.hash is not None
     # the absolute minimum would be the previous timestamp + 1
     timestamp_abs_min = parent_block.timestamp + 1
     # and absolute maximum limited by max time between blocks
     if not parent_block.is_genesis:
         timestamp_abs_max = parent_block.timestamp + settings.MAX_DISTANCE_BETWEEN_BLOCKS - 1
     else:
         timestamp_abs_max = 0xffffffff
     assert timestamp_abs_max > timestamp_abs_min
     # actual minimum depends on the timestamps of the parent txs
     # it has to be at least the max timestamp of parents + 1
     timestamp_min = max(timestamp_abs_min, parent_txs.max_timestamp + 1)
     assert timestamp_min <= timestamp_abs_max
     # when we have weight decay, the max timestamp will be when the next decay happens
     if with_weight_decay and settings.WEIGHT_DECAY_ENABLED:
         # we either have passed the first decay or not, the range will vary depending on that
         if timestamp_min > timestamp_abs_min + settings.WEIGHT_DECAY_ACTIVATE_DISTANCE:
             timestamp_max_decay = timestamp_min + settings.WEIGHT_DECAY_WINDOW_SIZE
         else:
             timestamp_max_decay = timestamp_abs_min + settings.WEIGHT_DECAY_ACTIVATE_DISTANCE
         timestamp_max = min(timestamp_abs_max, timestamp_max_decay)
     else:
         timestamp_max = timestamp_abs_max
     timestamp = min(max(current_timestamp, timestamp_min), timestamp_max)
     weight = daa.calculate_next_weight(parent_block, timestamp)
     parent_block_metadata = parent_block.get_metadata()
     height = parent_block_metadata.height + 1
     parents = [parent_block.hash] + parent_txs.must_include
     parents_any = parent_txs.can_include
     # simplify representation when you only have one to choose from
     if len(parents) + len(parents_any) == 3:
         parents.extend(sorted(parents_any))
         parents_any = []
     assert len(parents) + len(
         parents_any) >= 3, 'There should be enough parents to choose from'
     assert 1 <= len(parents) <= 3, 'Impossible number of parents'
     if __debug__ and len(parents) == 3:
         assert len(
             parents_any
         ) == 0, 'Extra parents to choose from that cannot be chosen'
     return BlockTemplate(
         versions={
             TxVersion.REGULAR_BLOCK.value,
             TxVersion.MERGE_MINED_BLOCK.value
         },
         reward=daa.get_tokens_issued_per_block(height),
         weight=weight,
         timestamp_now=current_timestamp,
         timestamp_min=timestamp_min,
         timestamp_max=timestamp_max,
         parents=parents,
         parents_any=parents_any,
         height=height,
         score=sum_weights(parent_block_metadata.score, weight),
     )
Пример #8
0
    def update_voided_info(self, block: Block) -> None:
        """ This method is called only once when a new block arrives.

        The blockchain part of the DAG is a tree with the genesis block as the root.
        I'll say the a block A is connected to a block B when A verifies B, i.e., B is a parent of A.

        A chain is a sequence of connected blocks starting in a leaf and ending in the root, i.e., any path from a leaf
        to the root is a chain. Given a chain, its head is a leaf in the tree, and its tail is the sub-chain without
        the head.

        The best chain is a chain that has the highest score of all chains.

        The score of a block is calculated as the sum of the weights of all transactions and blocks both direcly and
        indirectly verified by the block. The score of a chain is defined as the score of its head.

        The side chains are the chains whose scores are smaller than the best chain's.
        The head of the side chains are always voided blocks.

        There are two possible states for the block chain:
        (i)  It has a single best chain, i.e., one chain has the highest score
        (ii) It has multiple best chains, i.e., two or more chains have the same score (and this score is the highest
             among the chains)

        When there are multiple best chains, I'll call them best chain candidates.

        The arrived block can be connected in four possible ways:
        (i)   To the head of a best chain
        (ii)  To the tail of the best chain
        (iii) To the head of a side chain
        (iv)  To the tail of a side chain

        Thus, there are eight cases to be handled when a new block arrives, which are:
        (i)    Single best chain, connected to the head of the best chain
        (ii)   Single best chain, connected to the tail of the best chain
        (iii)  Single best chain, connected to the head of a side chain
        (iv)   Single best chain, connected to the tail of a side chain
        (v)    Multiple best chains, connected to the head of a best chain
        (vi)   Multiple best chains, connected to the tail of a best chain
        (vii)  Multiple best chains, connected to the head of a side chain
        (viii) Multiple best chains, connected to the tail of a side chain

        Case (i) is trivial because the single best chain will remain as the best chain. So, just calculate the new
        score and that's it.

        Case (v) is also trivial. As there are multiple best chains and the new block is connected to the head of one
        of them, this will be the new winner. So, the blockchain state will change to a single best chain again.

        In the other cases, we must calculate the score and compare with the best score.

        When there are multiple best chains, all their heads will be voided.
        """
        assert block.weight > 0, 'This algorithm assumes that block\'s weight is always greater than zero'
        if not block.parents:
            assert block.is_genesis is True
            self.update_score_and_mark_as_the_best_chain(block)
            return

        assert block.storage is not None
        assert block.hash is not None

        storage = block.storage
        assert storage.indexes is not None

        # Union of voided_by of parents
        voided_by: Set[bytes] = self.union_voided_by_from_parents(block)

        # Update accumulated weight of the transactions voiding us.
        assert block.hash not in voided_by
        for h in voided_by:
            tx = storage.get_transaction(h)
            tx_meta = tx.get_metadata()
            tx_meta.accumulated_weight = sum_weights(
                tx_meta.accumulated_weight, block.weight)
            storage.save_transaction(tx, only_metadata=True)

        # Check conflicts of the transactions voiding us.
        for h in voided_by:
            tx = storage.get_transaction(h)
            if not tx.is_block:
                assert isinstance(tx, Transaction)
                self.consensus.transaction_algorithm.check_conflicts(tx)

        parent = block.get_block_parent()
        parent_meta = parent.get_metadata()
        assert block.hash in parent_meta.children

        # This method is called after the metadata of the parent is updated.
        # So, if the parent has only one child, it must be the current block.
        is_connected_to_the_head = bool(len(parent_meta.children) == 1)
        is_connected_to_the_best_chain = bool(not parent_meta.voided_by)

        if is_connected_to_the_head and is_connected_to_the_best_chain:
            # Case (i): Single best chain, connected to the head of the best chain
            self.update_score_and_mark_as_the_best_chain_if_possible(block)
            # As `update_score_and_mark_as_the_best_chain_if_possible` may affect `voided_by`,
            # we need to check that block is not voided.
            meta = block.get_metadata()
            if not meta.voided_by:
                storage.indexes.height.add_new(meta.height, block.hash,
                                               block.timestamp)
                storage.update_best_block_tips_cache([block.hash])
            # The following assert must be true, but it is commented out for performance reasons.
            if settings.SLOW_ASSERTS:
                assert len(storage.get_best_block_tips(skip_cache=True)) == 1
        else:
            # Resolve all other cases, but (i).
            log = self.log.new(block=block.hash_hex)
            log.debug(
                'this block is not the head of the bestchain',
                is_connected_to_the_head=is_connected_to_the_head,
                is_connected_to_the_best_chain=is_connected_to_the_best_chain)

            # First, void this block.
            self.mark_as_voided(block, skip_remove_first_block_markers=True)

            # Get the score of the best chains.
            # We need to void this block first, because otherwise it would always be one of the heads.
            heads = [
                cast(Block, storage.get_transaction(h))
                for h in storage.get_best_block_tips()
            ]
            best_score = None
            for head in heads:
                head_meta = head.get_metadata(force_reload=True)
                if best_score is None:
                    best_score = head_meta.score
                else:
                    # All heads must have the same score.
                    assert abs(best_score - head_meta.score) < 1e-10
            assert isinstance(best_score, (int, float))

            # Calculate the score.
            # We cannot calculate score before getting the heads.
            score = self.calculate_score(block)

            # Finally, check who the winner is.
            if score <= best_score - settings.WEIGHT_TOL:
                # Just update voided_by from parents.
                self.update_voided_by_from_parents(block)

            else:
                # Either eveyone has the same score or there is a winner.

                valid_heads = []
                for head in heads:
                    meta = head.get_metadata()
                    if not meta.voided_by:
                        valid_heads.append(head)

                # We must have at most one valid head.
                # Either we have a single best chain or all chains have already been voided.
                assert len(
                    valid_heads
                ) <= 1, 'We must never have more than one valid head'

                # Add voided_by to all heads.
                self.add_voided_by_to_multiple_chains(block, heads)

                if score >= best_score + settings.WEIGHT_TOL:
                    # We have a new winner candidate.
                    self.update_score_and_mark_as_the_best_chain_if_possible(
                        block)
                    # As `update_score_and_mark_as_the_best_chain_if_possible` may affect `voided_by`,
                    # we need to check that block is not voided.
                    meta = block.get_metadata()
                    if not meta.voided_by:
                        self.log.debug('index new winner block',
                                       height=meta.height,
                                       block=block.hash_hex)
                        # We update the height cache index with the new winner chain
                        storage.indexes.height.update_new_chain(
                            meta.height, block)
                        storage.update_best_block_tips_cache([block.hash])
                else:
                    storage.update_best_block_tips_cache(
                        [not_none(blk.hash) for blk in heads])
Пример #9
0
    def _test_deferred_methods(self):
        # Testing without cloning
        self.cache_storage._clone_if_needed = False

        block_parents = [tx.hash for tx in self.genesis]
        output = TxOutput(
            200, bytes.fromhex('1e393a5ce2ff1c98d4ff6892f2175100f2dad049'))
        obj = Block(timestamp=MIN_TIMESTAMP,
                    weight=12,
                    outputs=[output],
                    parents=block_parents,
                    nonce=100781,
                    storage=self.cache_storage)
        obj.resolve()

        self.cache_storage.save_transaction_deferred(obj)

        loaded_obj1 = yield self.cache_storage.get_transaction_deferred(
            obj.hash)

        metadata_obj1_def = yield self.cache_storage.get_metadata_deferred(
            obj.hash)
        metadata_obj1 = obj.get_metadata()
        self.assertEqual(metadata_obj1_def, metadata_obj1)
        metadata_error = yield self.cache_storage.get_metadata_deferred(
            bytes.fromhex(
                '0001569c85fffa5782c3979e7d68dce1d8d84772505a53ddd76d636585f3977e'
            ))
        self.assertIsNone(metadata_error)

        self.cache_storage._flush_to_storage(
            self.cache_storage.dirty_txs.copy())
        self.cache_storage.cache = collections.OrderedDict()
        loaded_obj2 = yield self.cache_storage.get_transaction_deferred(
            obj.hash)

        self.assertEqual(loaded_obj1, loaded_obj2)

        self.assertTrue(
            (yield self.cache_storage.transaction_exists_deferred(obj.hash)))
        self.assertFalse((yield self.cache_storage.transaction_exists_deferred(
            '0001569c85fffa5782c3979e7d68dce1d8d84772505a53ddd76d636585f3977e')
                          ))

        self.assertFalse(
            self.cache_storage.transaction_exists(
                '0001569c85fffa5782c3979e7d68dce1d8d84772505a53ddd76d636585f3977e'
            ))

        self.assertEqual(obj, loaded_obj1)
        self.assertEqual(obj.is_block, loaded_obj1.is_block)

        count = yield self.cache_storage.get_count_tx_blocks_deferred()
        self.assertEqual(count, 4)

        all_transactions = yield self.cache_storage.get_all_transactions_deferred(
        )
        total = 0
        for tx in all_transactions:
            total += 1
        self.assertEqual(total, 4)
Пример #10
0
    def calculate_next_weight(self, parent_block: Block,
                              timestamp: int) -> float:
        """ Calculate the next block weight, aka DAA/difficulty adjustment algorithm.

        The algorithm used is described in [RFC 22](https://gitlab.com/HathorNetwork/rfcs/merge_requests/22).

        The weight must not be less than `self.min_block_weight`.
        """
        # In test mode we don't validate the block difficulty
        if self.test_mode & TestMode.TEST_BLOCK_WEIGHT:
            return 1.0

        root = parent_block
        N = min(2 * settings.BLOCK_DIFFICULTY_N_BLOCKS,
                parent_block.get_metadata().height - 1)
        K = N // 2
        T = self.avg_time_between_blocks
        S = 5
        if N < 10:
            return self.min_block_weight

        blocks: List[Block] = []
        while len(blocks) < N + 1:
            blocks.append(root)
            root = root.get_block_parent()
            assert isinstance(root, Block)
            assert root is not None

        # TODO: revise if this assertion can be safely removed
        assert blocks == sorted(blocks, key=lambda tx: -tx.timestamp)
        blocks = list(reversed(blocks))

        assert len(blocks) == N + 1
        solvetimes, weights = zip(
            *((block.timestamp - prev_block.timestamp, block.weight)
              for prev_block, block in hathor.util.iwindows(blocks, 2)))
        assert len(solvetimes) == len(
            weights
        ) == N, f'got {len(solvetimes)}, {len(weights)} expected {N}'

        sum_solvetimes = 0.0
        logsum_weights = 0.0

        prefix_sum_solvetimes = [0]
        for st in solvetimes:
            prefix_sum_solvetimes.append(prefix_sum_solvetimes[-1] + st)

        # Loop through N most recent blocks. N is most recently solved block.
        for i in range(K, N):
            solvetime = solvetimes[i]
            weight = weights[i]
            x = (prefix_sum_solvetimes[i + 1] -
                 prefix_sum_solvetimes[i - K]) / K
            ki = K * (x - T)**2 / (2 * T * T)
            ki = max(1, ki / S)
            sum_solvetimes += ki * solvetime
            logsum_weights = sum_weights(logsum_weights, log(ki, 2) + weight)

        weight = logsum_weights - log(sum_solvetimes, 2) + log(T, 2)

        # Apply weight decay
        weight -= self.get_weight_decay_amount(timestamp -
                                               parent_block.timestamp)

        # Apply minimum weight
        if weight < self.min_block_weight:
            weight = self.min_block_weight

        return weight