def test_propagation_error(self): _set_test_mode(TestMode.DISABLED) manager = self.create_peer('testnet', unlock_wallet=True) # 1. propagate genesis genesis_block = self.genesis_blocks[0] genesis_block.storage = manager.tx_storage self.assertFalse(manager.propagate_tx(genesis_block)) # 2. propagate block with weight 1 block = manager.generate_mining_block() block.weight = 1 block.resolve() self.assertFalse(manager.propagate_tx(block)) # 3. propagate block with wrong amount of tokens block = manager.generate_mining_block() output = TxOutput(1, block.outputs[0].script) block.outputs = [output] block.resolve() self.assertFalse(manager.propagate_tx(block)) # 4. propagate block from the future block = manager.generate_mining_block() block.timestamp = int( self.clock.seconds()) + settings.MAX_FUTURE_TIMESTAMP_ALLOWED + 100 block.resolve(update_time=False) self.assertFalse(manager.propagate_tx(block))
def test_daa_sanity(self): # sanity test the DAA _set_test_mode(TestMode.DISABLED) manager = self.create_peer('testnet', tx_storage=self.tx_storage) N = settings.BLOCK_DIFFICULTY_N_BLOCKS T = settings.AVG_TIME_BETWEEN_BLOCKS manager.avg_time_between_blocks = T # stabilize weight on 2 and lower the minimum to 1, so it can vary around 2 manager.min_block_weight = 2 add_new_blocks(manager, N * 2, advance_clock=T) manager.min_block_weight = 1 for i in range(N): # decreasing solvetime should increase weight base_weight = manager.generate_mining_block().weight add_new_blocks(manager, i, advance_clock=T) add_new_blocks(manager, 1, advance_clock=T * 0.9) add_new_blocks(manager, N - i, advance_clock=T) new_weight = manager.generate_mining_block().weight self.assertGreater(new_weight, base_weight) add_new_blocks(manager, N, advance_clock=T) # increasing solvetime should decrease weight base_weight = manager.generate_mining_block().weight add_new_blocks(manager, i, advance_clock=T) add_new_blocks(manager, 1, advance_clock=T * 1.1) add_new_blocks(manager, N - i, advance_clock=T) new_weight = manager.generate_mining_block().weight self.assertLess(new_weight, base_weight)
def setUp(self): super().setUp() self.reactor = self.clock self.manager = HathorManager(self.reactor, **self._manager_kwargs()) self.manager.allow_mining_without_peers() _set_test_mode(TestMode.TEST_ALL_WEIGHT) self.manager.start()
def test_tx_propagate(self): _set_test_mode( TestMode.DISABLED) # disable test_mode so the weight is not 1 src_tx = self.unspent_tx output_address = 'HNXsVtRUmwDCtpcCJUrH4QiHo9kUKx199A' resp = (yield self.web.post( 'create_tx', { 'inputs': [{ 'tx_id': src_tx.hash_hex, 'index': 1, }], 'outputs': [{ 'address': output_address, 'value': 100, }] })).json_value() self.assertEqual(resp['success'], True) data = resp['data'] hex_data = resp['hex_data'] struct_bytes = bytes.fromhex(hex_data) orig_tx = Transaction.create_from_struct(struct_bytes) tx = orig_tx.clone() tx_data = tx.to_json() del tx_data['hash'] del tx_data['nonce'] self.assertEqual(data, tx_data) data_to_sign = tx.get_sighash_all() private_key = self.manager.wallet.get_private_key(self.unspent_address) public_key_bytes, signature_bytes = self.manager.wallet.get_input_aux_data( data_to_sign, private_key) input_data = P2PKH.create_input_data(public_key_bytes, signature_bytes) tx.inputs[0].data = input_data # XXX: tx.resolve is a bit CPU intensive, but not so much as to make this test disabled by default tx.resolve(False) self.assertTrue(self.manager.propagate_tx(tx))
def setUp(self): _set_test_mode(TestMode.TEST_ALL_WEIGHT) self.tmpdirs = [] self.clock = Clock() self.clock.advance(time.time()) self.log = logger.new() self.reset_peer_id_pool() self.rng = Random()
def test_topological_sort_dfs(self): _set_test_mode(TestMode.TEST_ALL_WEIGHT) add_new_blocks(self.manager, 11, advance_clock=1) tx = add_new_transactions(self.manager, 1, advance_clock=1)[0] total = 0 for tx in self.cache_storage._topological_sort_dfs(root=tx, visited=dict()): total += 1 self.assertEqual(total, 5)
def create_peer(self, network, unlock_wallet=True): wallet = HDWallet(gap_limit=2) wallet._manually_initialize() _set_test_mode(TestMode.TEST_ALL_WEIGHT) manager = super().create_peer(network, wallet=wallet) manager.avg_time_between_blocks = 64 # Don't use it anywhere else. It is unsafe to generate mnemonic words like this. # It should be used only for testing purposes. m = Mnemonic('english') words = m.to_mnemonic(bytes(random.randint(0, 255) for _ in range(32))) wallet.unlock(words=words, tx_storage=manager.tx_storage) return manager
def test_topological_sort(self): _set_test_mode(TestMode.TEST_ALL_WEIGHT) _total = 0 blocks = add_new_blocks(self.manager, 1, advance_clock=1) _total += len(blocks) blocks = add_blocks_unlock_reward(self.manager) _total += len(blocks) add_new_transactions(self.manager, 1, advance_clock=1) total = 0 for tx in self.tx_storage._topological_sort(): total += 1 # added blocks + genesis txs + added tx self.assertEqual(total, _total + 3 + 1)
def test_daa_weight_decay_amount(self): _set_test_mode(TestMode.DISABLED) amount = settings.WEIGHT_DECAY_AMOUNT for distance in range(0, settings.WEIGHT_DECAY_ACTIVATE_DISTANCE, 10): self.assertEqual(get_weight_decay_amount(distance), 0) distance = settings.WEIGHT_DECAY_ACTIVATE_DISTANCE - 1 self.assertAlmostEqual(get_weight_decay_amount(distance), 0) distance = settings.WEIGHT_DECAY_ACTIVATE_DISTANCE for k in range(1, 11): for _ in range(settings.WEIGHT_DECAY_WINDOW_SIZE): self.assertAlmostEqual(get_weight_decay_amount(distance), k * amount) distance += 1 self.assertAlmostEqual(get_weight_decay_amount(distance), 11 * amount)
def test_genesis_weight(self): genesis = self.storage.get_all_genesis() genesis_blocks = [tx for tx in genesis if tx.is_block] genesis_block = genesis_blocks[0] genesis_txs = [tx for tx in genesis if not tx.is_block] genesis_tx = genesis_txs[0] # Validate the block and tx weight # in test mode weight is always 1 _set_test_mode(TestMode.TEST_ALL_WEIGHT) self.assertEqual(calculate_block_difficulty(genesis_block), 1) self.assertEqual(minimum_tx_weight(genesis_tx), 1) _set_test_mode(TestMode.DISABLED) self.assertEqual(calculate_block_difficulty(genesis_block), genesis_block.weight) self.assertEqual(minimum_tx_weight(genesis_tx), genesis_tx.weight)
def _test_daa_weight_decay_blocks(self): _set_test_mode(TestMode.DISABLED) manager = self.create_peer('testnet', tx_storage=self.tx_storage) amount = settings.WEIGHT_DECAY_AMOUNT from hathor import daa daa.AVG_TIME_BETWEEN_BLOCKS = settings.AVG_TIME_BETWEEN_BLOCKS daa.MIN_BLOCK_WEIGHT = 2 + 2 * settings.WEIGHT_DECAY_AMOUNT add_new_blocks(manager, 2 * settings.BLOCK_DIFFICULTY_N_BLOCKS, advance_clock=settings.AVG_TIME_BETWEEN_BLOCKS) daa.MIN_BLOCK_WEIGHT = 1 base_weight = manager.generate_mining_block().weight self.assertGreater(base_weight, daa.MIN_BLOCK_WEIGHT) add_new_blocks(manager, 20, advance_clock=settings.AVG_TIME_BETWEEN_BLOCKS) dt = settings.AVG_TIME_BETWEEN_BLOCKS # the latest call to add_new_blocks will advance the clock while dt < settings.WEIGHT_DECAY_ACTIVATE_DISTANCE: weight = manager.generate_mining_block().weight self.assertAlmostEqual(weight, base_weight) manager.reactor.advance(1) dt += 1 dt = 0 while dt < settings.WEIGHT_DECAY_WINDOW_SIZE: weight = manager.generate_mining_block().weight self.assertAlmostEqual(weight, base_weight - amount) manager.reactor.advance(1) dt += 1 dt = 0 while dt < settings.WEIGHT_DECAY_WINDOW_SIZE: weight = manager.generate_mining_block().weight self.assertAlmostEqual(weight, base_weight - 2 * amount) manager.reactor.advance(1) dt += 1 manager.reactor.advance(1) weight = manager.generate_mining_block().weight self.assertAlmostEqual(weight, daa.MIN_BLOCK_WEIGHT)
def test_tx_weight(self): _set_test_mode(TestMode.DISABLED) add_new_blocks(self.manager, 3, advance_clock=1) add_blocks_unlock_reward(self.manager) self.reactor.advance(3) # Unlocking wallet self.manager.wallet.unlock(b"MYPASS") data_json = { "outputs": [{ "address": self.get_address(0), "value": 505 }], "inputs": [], "weight": 1 } response = yield self.web.post("wallet/send_tokens", {'data': data_json}) data = response.json_value() self.assertFalse(data['success'])
def _apply_patches(cls): """ Applies global patches on modules that aren't easy/possible to configure otherwise. Patches: - disable pow verification - set DAA test-mode to DISABLED (will actually run the pow function, that won't actually verify the pow) - override AVG_TIME_BETWEEN_BLOCKS to 64 """ from hathor.transaction import BaseTransaction def verify_pow(self: BaseTransaction) -> None: assert self.hash is not None cls._original_verify_pow = BaseTransaction.verify_pow BaseTransaction.verify_pow = verify_pow _set_test_mode(TestMode.DISABLED) from hathor import daa cls._original_avg_time_between_blocks = daa.AVG_TIME_BETWEEN_BLOCKS daa.AVG_TIME_BETWEEN_BLOCKS = 64
def prepare(self, args: Namespace) -> None: import hathor from hathor.cli.util import check_or_exit from hathor.conf import HathorSettings from hathor.conf.get_settings import get_settings_module from hathor.daa import TestMode, _set_test_mode from hathor.manager import HathorManager from hathor.p2p.peer_discovery import BootstrapPeerDiscovery, DNSPeerDiscovery from hathor.p2p.peer_id import PeerId from hathor.p2p.utils import discover_hostname from hathor.transaction import genesis from hathor.transaction.storage import ( TransactionCacheStorage, TransactionCompactStorage, TransactionMemoryStorage, TransactionRocksDBStorage, TransactionStorage, ) from hathor.wallet import HDWallet, Wallet settings = HathorSettings() settings_module = get_settings_module() # only used for logging its location self.log = logger.new() from setproctitle import setproctitle setproctitle('{}hathor-core'.format(args.procname_prefix)) if args.recursion_limit: sys.setrecursionlimit(args.recursion_limit) else: sys.setrecursionlimit(5000) try: import resource except ModuleNotFoundError: pass else: (nofile_soft, _) = resource.getrlimit(resource.RLIMIT_NOFILE) if nofile_soft < 256: print('Maximum number of open file descriptors is too low. Minimum required is 256.') sys.exit(-2) if not args.peer: peer_id = PeerId() else: data = json.load(open(args.peer, 'r')) peer_id = PeerId.create_from_json(data) python = f'{platform.python_version()}-{platform.python_implementation()}' self.check_unsafe_arguments(args) self.log.info( 'hathor-core v{hathor}', hathor=hathor.__version__, pid=os.getpid(), genesis=genesis.GENESIS_HASH.hex()[:7], my_peer_id=str(peer_id.id), python=python, platform=platform.platform(), settings=settings_module.__file__, ) def create_wallet(): if args.wallet == 'hd': kwargs = { 'words': args.words, } if args.passphrase: wallet_passphrase = getpass.getpass(prompt='HD Wallet passphrase:') kwargs['passphrase'] = wallet_passphrase.encode() if args.data: kwargs['directory'] = args.data return HDWallet(**kwargs) elif args.wallet == 'keypair': print('Using KeyPairWallet') if args.data: wallet = Wallet(directory=args.data) else: wallet = Wallet() wallet.flush_to_disk_interval = 5 # seconds if args.unlock_wallet: wallet_passwd = getpass.getpass(prompt='Wallet password:'******'Invalid type for wallet') tx_storage: TransactionStorage if args.memory_storage: check_or_exit(not args.data, '--data should not be used with --memory-storage') # if using MemoryStorage, no need to have cache tx_storage = TransactionMemoryStorage() assert not args.x_rocksdb_indexes, 'RocksDB indexes require RocksDB data' self.log.info('with storage', storage_class=type(tx_storage).__name__) elif args.json_storage: check_or_exit(args.data, '--data is expected') assert not args.x_rocksdb_indexes, 'RocksDB indexes require RocksDB data' tx_storage = TransactionCompactStorage(path=args.data, with_index=(not args.cache)) else: check_or_exit(args.data, '--data is expected') if args.rocksdb_storage: self.log.warn('--rocksdb-storage is now implied, no need to specify it') cache_capacity = args.rocksdb_cache use_memory_indexes = not args.x_rocksdb_indexes tx_storage = TransactionRocksDBStorage(path=args.data, with_index=(not args.cache), cache_capacity=cache_capacity, use_memory_indexes=use_memory_indexes) self.log.info('with storage', storage_class=type(tx_storage).__name__, path=args.data) if args.cache: check_or_exit(not args.memory_storage, '--cache should not be used with --memory-storage') tx_storage = TransactionCacheStorage(tx_storage, reactor) if args.cache_size: tx_storage.capacity = args.cache_size if args.cache_interval: tx_storage.interval = args.cache_interval self.log.info('with cache', capacity=tx_storage.capacity, interval=tx_storage.interval) self.tx_storage = tx_storage self.log.info('with indexes', indexes_class=type(tx_storage.indexes).__name__) if args.wallet: self.wallet = create_wallet() self.log.info('with wallet', wallet=self.wallet, path=args.data) else: self.wallet = None if args.hostname and args.auto_hostname: print('You cannot use --hostname and --auto-hostname together.') sys.exit(-1) if not args.auto_hostname: hostname = args.hostname else: print('Trying to discover your hostname...') hostname = discover_hostname() if not hostname: print('Aborting because we could not discover your hostname.') print('Try again or run without --auto-hostname.') sys.exit(-1) print('Hostname discovered and set to {}'.format(hostname)) network = settings.NETWORK_NAME enable_sync_v1 = not args.x_sync_v2_only enable_sync_v2 = args.x_sync_v2_only or args.x_sync_bridge self.manager = HathorManager( reactor, peer_id=peer_id, network=network, hostname=hostname, tx_storage=self.tx_storage, wallet=self.wallet, wallet_index=args.wallet_index, stratum_port=args.stratum, ssl=True, checkpoints=settings.CHECKPOINTS, enable_sync_v1=enable_sync_v1, enable_sync_v2=enable_sync_v2, soft_voided_tx_ids=set(settings.SOFT_VOIDED_TX_IDS), ) if args.allow_mining_without_peers: self.manager.allow_mining_without_peers() if args.x_localhost_only: self.manager.connections.localhost_only = True dns_hosts = [] if settings.BOOTSTRAP_DNS: dns_hosts.extend(settings.BOOTSTRAP_DNS) if args.dns: dns_hosts.extend(args.dns) if dns_hosts: self.manager.add_peer_discovery(DNSPeerDiscovery(dns_hosts)) if args.bootstrap: self.manager.add_peer_discovery(BootstrapPeerDiscovery(args.bootstrap)) if args.test_mode_tx_weight: _set_test_mode(TestMode.TEST_TX_WEIGHT) if self.wallet: self.wallet.test_mode = True if args.x_full_verification: self.manager._full_verification = True if args.x_fast_init_beta: self.log.warn('--x-fast-init-beta is now the default, no need to specify it') for description in args.listen: self.manager.add_listen_address(description) self.start_manager(args) self.register_resources(args)