async def test_trinity_sync_from_trinity(unused_tcp_port_factory, prepopulated_datadir): port1 = unused_tcp_port_factory() listen_ip = '0.0.0.0' nodekey = load_nodekey(get_nodekey_path(prepopulated_datadir / 'mainnet')) serving_enode = f'enode://{remove_0x_prefix(nodekey.public_key.to_hex())}@{listen_ip}:{port1}' command1 = amend_command_for_unused_port( ('trinity', '--trinity-root-dir', str(prepopulated_datadir), '--disable-discovery'), port1) async with AsyncProcessRunner.run(command1, timeout_sec=120) as runner1: assert await contains_all(runner1.stderr, {serving_enode}) port2 = unused_tcp_port_factory() command2 = amend_command_for_unused_port( ('trinity', '--disable-discovery', '--preferred-node', serving_enode), port2) async with AsyncProcessRunner.run(command2, timeout_sec=120) as runner2: assert await contains_all(runner2.stderr, { "Adding ETHPeer", "Imported 192 headers", "Caught up to skeleton peer", }) # A weak assertion to try and ensure our nodes are actually talking to each other. assert await contains_all(runner1.stderr, { "Adding ETHPeer", })
async def run_command_and_detect_errors(command, time): """ Run the given ``command`` on the given ``async_process_runner`` for ``time`` seconds and throw an Exception in case any unresolved Exceptions are detected in the output of the command. """ async with AsyncProcessRunner.run(command, timeout_sec=time) as runner: await scan_for_errors(runner.stderr)
async def test_does_not_throw_errors_with_metrics_reporting_enabled( unused_tcp_port): init_time = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') enabled_metrics_command = ('trinity', '--enable-metrics', '--metrics-host=test', '--metrics-influx-server=localhost', '--metrics-influx-password=trinity') command = amend_command_for_unused_port(enabled_metrics_command, unused_tcp_port) async with AsyncProcessRunner.run(command, timeout_sec=120) as runner: assert await contains_all(runner.stderr, { 'MetricsService', 'Reporting metrics to localhost', }) encoded_init_time = urllib.parse.quote(f"'{init_time}'") start_time = time.monotonic() while (time.monotonic() - start_time) < 30: response = requests.get( 'http://localhost:8086/query?db=trinity&epoch=ns&q=select+%2a' '+from+%22trinity.p2p%2fpeers.counter%22' f'+WHERE+time+%3E%3D+{encoded_init_time}') json_response = response.json() if 'series' in json_response['results'][0]: break else: raise Exception('Influxdb request timeout') assert len(json_response['results'][0]['series'][0]['values']) > 0
async def test_expected_logs_for_light_mode(command): async with AsyncProcessRunner.run(command, timeout_sec=40) as runner: assert await contains_all(runner.stderr, { "Started DB server process", "Component started: Sync / PeerPool", "IPC started at", })
async def test_directory_generation(command, tmpdir): testnet_path = tmpdir / "testnet" testnet_path.mkdir() command = command + (f"--network-dir={testnet_path}", ) async with AsyncProcessRunner.run(command, timeout_sec=30) as runner: assert await contains_all(runner.stderr, { "Network generation completed", })
async def test_expected_logs_with_disabled_txpool(command): # Since this short-circuits on success, we can set the timeout high. # We only hit the timeout if the test fails. async with AsyncProcessRunner.run(command, timeout_sec=120) as runner: assert await contains_all(runner.stderr, { "Started DB server process", "Component started: Sync / PeerPool", "Transaction pool does not support light mode", })
async def test_expected_logs_with_disabled_txpool(command, unused_tcp_port): command = amend_command_for_unused_port(command, unused_tcp_port) # Since this short-circuits on success, we can set the timeout high. # We only hit the timeout if the test fails. async with AsyncProcessRunner.run(command, timeout_sec=120) as runner: assert await contains_all(runner.stderr, { "Started DB server process", "Starting components", "Components started", })
async def test_block_import(file, info, success): fixtures_path = pathlib.Path(__file__).parent / 'fixtures' / 'mainnet_blocks' import_file = fixtures_path / file assert import_file.exists() command = ('trinity', 'import', str(import_file),) async with AsyncProcessRunner.run(command, timeout_sec=40) as runner: assert await contains_all(runner.stderr, info)
async def test_expected_logs_for_full_mode_with_txpool_disabled(command): # Since this short-circuits on sucess, we can set the timeout high. # We only hit the timeout if the test fails. async with AsyncProcessRunner.run(command, timeout_sec=120) as runner: assert await contains_all(runner.stderr, { "Started DB server process", "Component started: Sync / PeerPool", "Transaction pool disabled", "Running server", "IPC started at", })
async def test_expected_logs_for_full_mode(command, unused_tcp_port): command = amend_command_for_unused_port(command, unused_tcp_port) # Since this short-circuits on success, we can set the timeout high. # We only hit the timeout if the test fails. async with AsyncProcessRunner.run(command, timeout_sec=120) as runner: assert await contains_all(runner.stderr, { "Started DB server process", "Component started: Sync / PeerPool", "Running Tx Pool", "Running server", "IPC started at", })
async def test_block_export(block_number, info, success): with tempfile.TemporaryDirectory() as export_path: export_file = pathlib.Path(export_path) / 'export.rlp' assert not export_file.exists() command = ('trinity', 'export', str(export_file), str(block_number)) async with AsyncProcessRunner.run(command, timeout_sec=40) as runner: assert await contains_all(runner.stderr, {info}) file_exists_after_export = export_file.exists() assert file_exists_after_export if success else not file_exists_after_export
async def test_logger_configuration(command, expected_stderr_logs, unexpected_stderr_logs, expected_file_logs, unexpected_file_logs, unused_tcp_port): command = amend_command_for_unused_port(command, unused_tcp_port) def contains_substring(iterable, substring): return any(substring in x for x in iterable) # Saw occasional (<25%, >5%) failures in CI at 30s because of slow machines or bad luck async with AsyncProcessRunner.run(command, timeout_sec=45) as runner: stderr_logs = [] # Collect logs up to the point when the sync begins so that we have enough logs # for assertions marker_seen_at = 0 async for line in runner.stderr: if marker_seen_at != 0 and time.time() - marker_seen_at > 3: break if "DiscoveryService" in line: marker_seen_at = time.time() stderr_logs.append(line) else: stderr_logs.append(line) for log in expected_stderr_logs: if not contains_substring(stderr_logs, log): raise AssertionError( f"Log should contain `{log}` but does not") for log in unexpected_stderr_logs: if contains_substring(stderr_logs, log): raise AssertionError( f"Log should not contain `{log}` but does") log_dir = TrinityConfig(app_identifier="eth1", network_id=1).log_dir log_file_path = max(log_dir.glob('*'), key=os.path.getctime) with open(log_file_path) as log_file: file_content = log_file.read() for log in expected_file_logs: if log not in file_content: raise AssertionError( f"Logfile should contain `{log}` but does not") for log in unexpected_file_logs: if log in file_content: raise AssertionError( f"Logfile should not contain `{log}` but does")
async def test_web3_commands_via_attached_console(command, expected_network_id, expected_genesis_hash, xdg_trinity_root, unused_tcp_port): command = tuple( fragment.replace('{trinity_root_path}', str(xdg_trinity_root)) for fragment in command) # The test mostly fails because the JSON-RPC requests time out. We slim down # services to make the application less busy and improve the overall answer rate. command += ('--sync-mode=none', '--disable-discovery', '--disable-upnp') command = amend_command_for_unused_port(command, unused_tcp_port) attach_cmd = list(command[1:] + ('attach', )) async with AsyncProcessRunner.run(command, timeout_sec=120) as runner: assert await contains_all( runner.stderr, { "Started DB server process", "Starting components", "Components started", "IPC started at", # Ensure we do not start making requests before Trinity is ready. # Waiting for the json-rpc-api event bus to connect to other endpoints # seems to be late enough in the process for this to be the case. "EventBus Endpoint bjson-rpc-api connecting to other Endpoints", }) attached_trinity = pexpect.spawn('trinity', attach_cmd, logfile=sys.stdout, encoding="utf-8") try: attached_trinity.expect( "An instance of Web3 connected to the running chain") attached_trinity.sendline("w3.net.version") attached_trinity.expect(f"'{expected_network_id}'") attached_trinity.sendline("w3") attached_trinity.expect("web3.main.Web3") attached_trinity.sendline("w3.eth.getBlock(0).number") attached_trinity.expect(str(GENESIS_BLOCK_NUMBER)) attached_trinity.sendline("w3.eth.getBlock(0).hash") attached_trinity.expect(expected_genesis_hash) except pexpect.TIMEOUT: raise Exception("Trinity attach timeout") finally: attached_trinity.close()
async def test_web3_commands_via_attached_console(command, expected_network_id, expected_genesis_hash, xdg_trinity_root, unused_tcp_port): command = tuple( fragment.replace('{trinity_root_path}', str(xdg_trinity_root)) for fragment in command) # use a random port each time, in case a previous run went awry and left behind a # trinity instance command += (f'--port={str(unused_tcp_port)}', ) attach_cmd = list(command[1:] + ('attach', )) async with AsyncProcessRunner.run(command, timeout_sec=120) as runner: assert await contains_all( runner.stderr, { "Started DB server process", "Component started: Sync / PeerPool", "IPC started at", "Component started: JSON-RPC API", # Ensure we do not start making requests before Trinity is ready. # Waiting for the json-rpc-api event bus to connect to other endpoints # seems to be late enough in the process for this to be the case. "EventBus Endpoint bjson-rpc-api connecting to other Endpoints", }) attached_trinity = pexpect.spawn('trinity', attach_cmd, logfile=sys.stdout, encoding="utf-8") try: attached_trinity.expect( "An instance of Web3 connected to the running chain") attached_trinity.sendline("w3.net.version") attached_trinity.expect(f"'{expected_network_id}'") attached_trinity.sendline("w3") attached_trinity.expect("web3.main.Web3") attached_trinity.sendline("w3.eth.getBlock(0).number") attached_trinity.expect(str(GENESIS_BLOCK_NUMBER)) attached_trinity.sendline("w3.eth.getBlock(0).hash") attached_trinity.expect(expected_genesis_hash) except pexpect.TIMEOUT: raise Exception("Trinity attach timeout") finally: attached_trinity.close()
async def run_then_shutdown_and_yield_output(): # This test spins up Trinity, waits until it has started syncing, sends a SIGINT and then # tries to scan the entire shutdown process for errors. It needs a little bit more time. async with AsyncProcessRunner.run(command, timeout_sec=50) as runner: # Somewhat arbitrary but we wait until the syncer starts before we trigger the shutdown. # At this point, most of the internals should be set up, leaving us with more room for # failure which is what we are looking for in this test. trigger = "BeamSyncService" triggered = False async for line in runner.stderr: if trigger in line: triggered = True runner.kill(signal.SIGINT) # We are only interested in the output that is created # after we initiate the shutdown if triggered: yield line
async def test_missing_genesis_time_arg(command): async with AsyncProcessRunner.run(command, timeout_sec=30) as runner: assert await contains_all(runner.stderr, { "one of the arguments --genesis-delay --genesis-time is required", })
async def test_web3_commands_via_attached_console(command, expected_network_id, expected_genesis_hash, expected_chain_id, xdg_trinity_root, unused_tcp_port): command = tuple( fragment.replace('{trinity_root_path}', str(xdg_trinity_root)) for fragment in command ) # The test mostly fails because the JSON-RPC requests time out. We slim down # services to make the application less busy and improve the overall answer rate. command += ('--sync-mode=none', '--disable-discovery', '--disable-upnp') command = amend_command_for_unused_port(command, unused_tcp_port) attach_cmd = list(command[1:] + ('attach',)) async with AsyncProcessRunner.run(command, timeout_sec=120) as runner: assert await contains_all(runner.stderr, { "Started DB server process", "Starting components", "Components started", "IPC started at", # Ensure we do not start making requests before Trinity is ready. # Waiting for the JSON-RPC API to be announced seems to be # late enough in the process for this to be the case. "New EventBus Endpoint connected bjson-rpc-api", }) attached_trinity = pexpect.spawn( 'trinity', attach_cmd, logfile=sys.stdout, encoding="utf-8") try: attached_trinity.expect_exact("An instance of Web3 connected to the running chain") attached_trinity.sendline("w3.net.version") attached_trinity.expect_exact(f"'{expected_network_id}'") attached_trinity.sendline("w3") attached_trinity.expect_exact("web3.main.Web3") attached_trinity.sendline("w3.eth.getBlock(0).number") attached_trinity.expect_exact(str(GENESIS_BLOCK_NUMBER)) attached_trinity.sendline("w3.eth.getBlock(0).hash") attached_trinity.expect_exact(expected_genesis_hash) # The following verifies the admin_nodeInfo API but doesn't check the exact return # value of every property. Some values are non deterministic such as the current head # which might vary depending on how fast the node starts syncing. attached_trinity.sendline("w3.geth.admin.node_info()") attached_trinity.expect_exact("'enode': 'enode://") attached_trinity.expect_exact("'ip': '::'") attached_trinity.expect_exact("'listenAddr': '[::]") attached_trinity.expect_exact("'name': 'Trinity/") attached_trinity.expect_exact("'ports': AttributeDict({") attached_trinity.expect_exact("'protocols': AttributeDict({'eth': AttributeDict({'version': 'eth/65'") # noqa: E501 attached_trinity.expect_exact("'difficulty': ") attached_trinity.expect_exact(f"'genesis': '{expected_genesis_hash}'") attached_trinity.expect_exact("'head': '0x") attached_trinity.expect_exact(f"'network': {expected_network_id}") attached_trinity.expect_exact("'config': AttributeDict({") attached_trinity.expect_exact(f"'chainId': {expected_chain_id}") except pexpect.TIMEOUT: raise Exception("Trinity attach timeout") finally: attached_trinity.close()
async def test_directory_generation(command, tmpdir): async with AsyncProcessRunner.run(command, timeout_sec=30) as runner: assert await contains_all(runner.stderr, {"Validator", "BCCReceiveServer"})
async def test_async_process_runner(command): async with AsyncProcessRunner.run(command, timeout_sec=1) as runner: assert not await contains_all(runner.stderr, {"Inexistent keyword"}) return raise AssertionError( "Unreachable: AsyncProcessRunner skipped the return statement")
async def test_component_boot(command): async with AsyncProcessRunner.run(command, timeout_sec=30) as runner: assert await contains_all(runner.stderr, { "Running server", })