Example #1
0
    def __init__(self, config=Config()):
        super().__init__()

        self.chain = config.chain
        self.data_dir = config.data_dir
        self.data_dir.mkdir(exist_ok=True, parents=True)

        self.terminate_flag = threading.Event()
        self.logger = Logger(self.data_dir / "history.log", config.debug)

        self.index = BlockIndex(self.data_dir, self.chain, self.logger)
        self.chainstate = Chainstate(self.data_dir, self.logger)
        self.block_db = BlockDB(self.data_dir, self.logger)
        self.mempool = Mempool()

        self.status = NodeStatus.Starting

        self.download_window = []

        if config.p2p_port:
            self.p2p_port = config.p2p_port
        else:
            self.p2p_port = None
        peer_db = PeerDB(self.chain, self.data_dir)
        self.p2p_manager = P2pManager(self, self.p2p_port, peer_db)

        if config.rpc_port:
            self.rpc_port = config.rpc_port
        else:
            self.rpc_port = None
        self.rpc_manager = RpcManager(self, self.rpc_port)
Example #2
0
def test_simple_init(tmp_path):
    index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    index.add_headers(generate_random_header_chain(2000, RegTest().genesis.hash))
    index.db.close()
    new_index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    assert index.header_dict == new_index.header_dict
    assert index.header_index == new_index.header_index
    assert index.active_chain == new_index.active_chain
    assert index.block_candidates == new_index.block_candidates
Example #3
0
def test_long_init(tmp_path):
    index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    length = 10  # 2000
    chain = generate_random_header_chain(2000 * length, RegTest().genesis.hash)
    for x in range(length):
        index.add_headers(chain[x * 2000 : (x + 1) * 2000])
    index.db.close()
    new_index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    assert index.header_dict == new_index.header_dict
    assert index.header_index == new_index.header_index
    assert index.active_chain == new_index.active_chain
    assert index.block_candidates == new_index.block_candidates
Example #4
0
def test_init_with_fork(tmp_path):
    index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    chain = generate_random_header_chain(2000, RegTest().genesis.hash)
    fork = generate_random_header_chain(5, chain[-10].hash)
    index.add_headers(chain)
    index.add_headers(fork)
    index.db.close()
    new_index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    assert index.header_dict == new_index.header_dict
    assert index.header_index == new_index.header_index
    assert index.active_chain == new_index.active_chain
    assert sorted(index.block_candidates) == sorted(new_index.block_candidates)
Example #5
0
def test_long_init(tmp_path):
    chainstate = Chainstate(tmp_path, Logger(debug=True))
    chain = generate_random_chain(20000, RegTest().genesis.hash)
    for block in chain:
        chainstate.add_block(block)
    chainstate.finalize()
    chainstate_dict = dict(chainstate.db)
    chainstate.close()
    new_chainstate = Chainstate(tmp_path, Logger(debug=True))
    new_chainstate_dict = dict(new_chainstate.db)
    new_chainstate.close()
    assert chainstate_dict == new_chainstate_dict
Example #6
0
def test_block_candidates_3(tmp_path):
    index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    chain = generate_random_header_chain(2000, RegTest().genesis.hash)
    fork = generate_random_header_chain(200, chain[-10 - 1].hash)
    index.add_headers(chain)
    index.add_headers(fork)
    for x in chain:
        block_info = index.get_block_info(x.hash)
        block_info.status = BlockStatus.in_active_chain
        index.insert_block_info(block_info)
    index.db.close()
    new_index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    assert new_index.get_download_candidates() == [x.hash for x in fork]
Example #7
0
def test_generate_block_candidates_2(tmp_path):
    index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    chain = generate_random_header_chain(2000, RegTest().genesis.hash)
    fork = generate_random_header_chain(200, chain[-10 - 1].hash)
    index.add_headers(chain)
    index.add_headers(fork)
    for x in fork:
        block_info = index.get_block_info(x.hash)
        block_info.status = BlockStatus.invalid
        index.insert_block_info(block_info)
    index.db.close()
    new_index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    assert len(new_index.block_candidates) == 2000
Example #8
0
def test_add_headers_fork(tmp_path):
    index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    chain = generate_random_header_chain(2000, RegTest().genesis.hash)
    fork = generate_random_header_chain(200, chain[-10 - 1].hash)
    index.add_headers(chain)
    index.add_headers(fork)
    assert len(index.header_index) == 2190 + 1
Example #9
0
def test_blocks(tmp_path):
    chain = generate_random_chain(2000, RegTest().genesis.hash)
    for x in range(10):
        block_db = BlockDB(tmp_path / f"{x}", Logger(debug=True))
        for block in chain:
            block_db.add_block(block)
            stored_block = block_db.get_block(block.header.hash)
            assert stored_block == block
Example #10
0
def test_add_old_header(tmp_path):
    block_index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    chain = generate_random_header_chain(2000, RegTest().genesis.hash)
    block_index.add_headers(chain)
    assert not block_index.add_headers([chain[10]])
    assert len(block_index.header_dict) == 2000 + 1
    assert len(block_index.header_index) == 2000 + 1
    assert len(block_index.block_candidates) == 2000
Example #11
0
def test_block_locators_4(tmp_path):
    index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    chain = generate_random_header_chain(2000, RegTest().genesis.hash)
    index.add_headers(chain[:1000])
    headers = index.get_headers_from_locators(
        [chain[-1].hash, RegTest().genesis.hash], "00" * 32
    )
    assert headers == chain[:1000]
Example #12
0
def test_add_headers_long(tmp_path):
    block_index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    length = 50  # 2000
    chain = generate_random_header_chain(2000 * length, RegTest().genesis.hash)
    for x in range(length):
        block_index.add_headers(chain[x * 2000 : (x + 1) * 2000])
    assert len(block_index.header_dict) == 2000 * length + 1
    assert len(block_index.header_index) == 2000 * length + 1
    assert len(block_index.block_candidates) == 2000 * length
Example #13
0
def test_rev_patch(tmp_path):
    chainstate = Chainstate(tmp_path, Logger(debug=True))
    chain = generate_random_chain(20000, RegTest().genesis.hash)
    rev_patches = []
    for block in chain:
        _, rev_patch = chainstate.add_block(block)
        rev_patches.append(rev_patch)
    rev_patches.reverse()
    for rev_patch in rev_patches:
        chainstate.apply_rev_block(rev_patch)
    assert chainstate.updated_utxo_set == {}
Example #14
0
def test_block_candidates_2(tmp_path):
    index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    chain = generate_random_header_chain(1024, RegTest().genesis.hash)
    index.add_headers(chain)
    assert index.get_download_candidates() == [x.hash for x in chain]
Example #15
0
def test_init(tmp_path):
    Chainstate(tmp_path, Logger(debug=True))
Example #16
0
def test_empty_init(tmp_path):
    BlockIndex(tmp_path, RegTest(), Logger(debug=True))
Example #17
0
def test_block_locators(tmp_path):
    index = BlockIndex(tmp_path, RegTest(), Logger(debug=True))
    chain = generate_random_header_chain(24, RegTest().genesis.hash)
    index.add_headers(chain)
    locators = index.get_block_locator_hashes()
    assert len(locators) == 14
Example #18
0
def test_init(tmp_path):
    BlockDB(tmp_path, Logger(debug=True))
Example #19
0
class Node(threading.Thread):
    def __init__(self, config=Config()):
        super().__init__()

        self.chain = config.chain
        self.data_dir = config.data_dir
        self.data_dir.mkdir(exist_ok=True, parents=True)

        self.terminate_flag = threading.Event()
        self.logger = Logger(self.data_dir / "history.log", config.debug)

        self.index = BlockIndex(self.data_dir, self.chain, self.logger)
        self.chainstate = Chainstate(self.data_dir, self.logger)
        self.block_db = BlockDB(self.data_dir, self.logger)
        self.mempool = Mempool()

        self.status = NodeStatus.Starting

        self.download_window = []

        if config.p2p_port:
            self.p2p_port = config.p2p_port
        else:
            self.p2p_port = None
        peer_db = PeerDB(self.chain, self.data_dir)
        self.p2p_manager = P2pManager(self, self.p2p_port, peer_db)

        if config.rpc_port:
            self.rpc_port = config.rpc_port
        else:
            self.rpc_port = None
        self.rpc_manager = RpcManager(self, self.rpc_port)

    def run(self):

        self.logger.info("Starting main loop")

        if self.p2p_port:
            self.p2p_manager.start()
        if self.rpc_port:
            self.rpc_manager.start()
        self.status = NodeStatus.SyncingHeaders
        while not self.terminate_flag.is_set():
            if len(self.p2p_manager.handshake_messages):
                handle_p2p_handshake(self)
            elif len(self.rpc_manager.messages):
                handle_rpc(self)
            elif len(self.p2p_manager.messages):
                handle_p2p(self)
            else:
                time.sleep(0.0001)
            try:
                block_download(self)
                update_chain(self)
            except Exception:
                self.logger.exception("Exception occurred")
                break
        self.p2p_manager.stop()
        self.rpc_manager.stop()

        self.index.close()
        self.chainstate.close()
        self.block_db.close()

        self.logger.info("Stopping node")
        self.logger.close()

    def stop(self):
        self.terminate_flag.set()