def test_crawler_start_blockchain_db_already_present(new_influx_db, get_agent): mock_influxdb_client = new_influx_db.return_value mock_influxdb_client.get_list_database.return_value = [{ 'name': 'db1' }, { 'name': f'{Crawler.INFLUX_DB_NAME}' }, { 'name': 'db3' }] staking_agent = MagicMock(spec=StakingEscrowAgent) contract_agency = MockContractAgency(staking_agent=staking_agent) get_agent.side_effect = contract_agency.get_agent crawler = create_crawler(dont_set_teacher=True) try: crawler.start() assert crawler.is_running mock_influxdb_client.close.assert_not_called() # ensure table existence check run mock_influxdb_client.get_list_database.assert_called_once() # db not created since not present mock_influxdb_client.create_database.assert_not_called() mock_influxdb_client.create_retention_policy.assert_not_called() finally: crawler.stop() mock_influxdb_client.close.assert_called_once() assert not crawler.is_running
def test_crawler_learn_about_teacher(new_influx_db, get_agent, tempfile_path): mock_influxdb_client = new_influx_db.return_value staking_agent = MagicMock(spec=StakingEscrowAgent) contract_agency = MockContractAgency(staking_agent=staking_agent) get_agent.side_effect = contract_agency.get_agent crawler = create_crawler(node_db_filepath=tempfile_path) node_db_client = CrawlerStorageClient(db_filepath=tempfile_path) try: crawler.start() assert crawler.is_running # learn about teacher crawler.learn_from_teacher_node() current_teacher_checksum = node_db_client.get_current_teacher_checksum( ) assert current_teacher_checksum is not None known_nodes = node_db_client.get_known_nodes_metadata() assert len(known_nodes) > 0 assert current_teacher_checksum in known_nodes finally: crawler.stop() mock_influxdb_client.close.assert_called_once() assert not crawler.is_running
def test_crawler_init(get_agent): staking_agent = MagicMock(spec=StakingEscrowAgent) contract_agency = MockContractAgency(staking_agent=staking_agent) get_agent.side_effect = contract_agency.get_agent crawler = create_crawler() # crawler not yet started assert not crawler.is_running
def test_crawler_start_no_influx_db_connection(get_agent): staking_agent = MagicMock(spec=StakingEscrowAgent, autospec=True) contract_agency = MockContractAgency(staking_agent=staking_agent) get_agent.side_effect = contract_agency.get_agent crawler = create_crawler() try: with pytest.raises(ConnectionError): crawler.start() finally: crawler.stop()
def test_crawler_init(get_agent, get_economics): staking_agent = MagicMock(spec=StakingEscrowAgent) contract_agency = MockContractAgency(staking_agent=staking_agent) get_agent.side_effect = contract_agency.get_agent token_economics = StandardTokenEconomics() get_economics.return_value = token_economics crawler = create_crawler(dont_set_teacher=True) # crawler not yet started assert not crawler.is_running
def test_crawler_start_no_influx_db_connection(get_agent, get_economics): staking_agent = MagicMock(spec=StakingEscrowAgent, autospec=True) contract_agency = MockContractAgency(staking_agent=staking_agent) get_agent.side_effect = contract_agency.get_agent token_economics = StandardTokenEconomics() get_economics.return_value = token_economics crawler = create_crawler(dont_set_teacher=True) try: with pytest.raises(ConnectionError): crawler.start() finally: crawler.stop()
def test_monitor_dashboard_run(init_interface, get_agent, new_blockchain_db_client, click_runner): # mock BlockchainInterfaceFactory init_interface.return_value = MagicMock() # mock StakingEscrowAgent and ContractAgency staking_agent = MagicMock(spec=StakingEscrowAgent) contract_agency = MockContractAgency(staking_agent=staking_agent) get_agent.side_effect = contract_agency.get_agent dashboard_args = ('dashboard', '--dry-run') result = click_runner.invoke(monitor_cli, dashboard_args, catch_exceptions=False) assert MONITOR_BANNER.format(DASHBOARD) in result.output assert result.exit_code == 0
def test_monitor_crawl_run(init_interface, get_agent, new_crawler_node_storage, click_runner): # mock BlockchainInterfaceFactory init_interface.return_value = MagicMock() # mock StakingEscrowAgent and ContractAgency staking_agent = MagicMock(spec=StakingEscrowAgent) contract_agency = MockContractAgency(staking_agent=staking_agent) get_agent.side_effect = contract_agency.get_agent crawl_args = ('crawl', '--dry-run') result = click_runner.invoke(monitor_cli, crawl_args, catch_exceptions=False) assert MONITOR_BANNER.format(CRAWLER) in result.output assert result.exit_code == 0
def test_crawler_stop_before_start(new_influx_db, get_agent): mock_influxdb_client = new_influx_db.return_value staking_agent = MagicMock(spec=StakingEscrowAgent) contract_agency = MockContractAgency(staking_agent=staking_agent) get_agent.side_effect = contract_agency.get_agent crawler = create_crawler() crawler.stop() new_influx_db.assert_not_called( ) # db only initialized when crawler is started mock_influxdb_client.close.assert_not_called() # just to be sure assert not crawler.is_running
def test_crawler_start_then_stop(new_influx_db, get_agent): mock_influxdb_client = new_influx_db.return_value staking_agent = MagicMock(spec=StakingEscrowAgent) contract_agency = MockContractAgency(staking_agent=staking_agent) get_agent.side_effect = contract_agency.get_agent crawler = create_crawler(dont_set_teacher=True) try: crawler.start() assert crawler.is_running mock_influxdb_client.close.assert_not_called() finally: crawler.stop() mock_influxdb_client.close.assert_called_once() assert not crawler.is_running
def test_crawler_learn_about_nodes(new_influx_db, get_agent, get_economics, tempfile_path): mock_influxdb_client = new_influx_db.return_value mock_influxdb_client.write_points.return_value = True # TODO: issue with use of `agent.blockchain` causes spec=StakingEscrowAgent not to be specified in MagicMock # Get the following - AttributeError: Mock object has no attribute 'blockchain' staking_agent = MagicMock(autospec=True) contract_agency = MockContractAgency(staking_agent=staking_agent) get_agent.side_effect = contract_agency.get_agent token_economics = StandardTokenEconomics() get_economics.return_value = token_economics crawler = create_crawler(node_db_filepath=tempfile_path) node_db_client = CrawlerStorageClient(db_filepath=tempfile_path) try: crawler.start() assert crawler.is_running for i in range(0, 5): random_node = create_random_mock_node(generate_certificate=True) crawler.remember_node(node=random_node, record_fleet_state=True) known_nodes = node_db_client.get_known_nodes_metadata() assert len(known_nodes) > i assert random_node.checksum_address in known_nodes previous_states = node_db_client.get_previous_states_metadata() assert len(previous_states) > i # configure staking agent for blockchain calls tokens = NU(int(15000 + i * 2500), 'NU').to_nunits() current_period = datetime_to_period( maya.now(), token_economics.seconds_per_period) initial_period = current_period - i terminal_period = current_period + (i + 50) last_active_period = current_period - i staking_agent.get_worker_from_staker.side_effect = \ lambda staker_address: crawler.node_storage.get(federated_only=False, checksum_address=staker_address).worker_address configure_mock_staking_agent(staking_agent=staking_agent, tokens=tokens, current_period=current_period, initial_period=initial_period, terminal_period=terminal_period, last_active_period=last_active_period) # run crawler callable crawler._learn_about_nodes() # ensure data written to influx table mock_influxdb_client.write_points.assert_called_once() # expected db row added write_points_call_args_list = mock_influxdb_client.write_points.call_args_list influx_db_line_protocol_statement = str( write_points_call_args_list[0][0]) expected_arguments = [ f'staker_address={random_node.checksum_address}', f'worker_address="{random_node.worker_address}"', f'stake={float(NU.from_nunits(tokens).to_tokens())}', f'locked_stake={float(NU.from_nunits(tokens).to_tokens())}', f'current_period={current_period}i', f'last_confirmed_period={last_active_period}i', f'work_orders={len(random_node.work_orders())}i' ] for arg in expected_arguments: assert arg in influx_db_line_protocol_statement, \ f"{arg} in {influx_db_line_protocol_statement} for iteration {i}" mock_influxdb_client.reset_mock() finally: crawler.stop() mock_influxdb_client.close.assert_called_once() assert not crawler.is_running
def test_dashboard_render(new_blockchain_db_client, get_agent, tempfile_path, dash_duo): ############## SETUP ################ current_period = 18622 # create node metadata nodes_list, last_confirmed_period_dict = create_nodes( num_nodes=5, current_period=current_period) # create states states_list = create_states(num_states=3) # write node, teacher (first item in node list), and state data to storage node_storage = CrawlerNodeStorage(storage_filepath=tempfile_path) store_node_db_data(node_storage, nodes=nodes_list, states=states_list) # Setup StakingEscrowAgent and ContractAgency partitioned_stakers = (25, 5, 10) # confirmed, pending, inactive global_locked_tokens = NU(1000000, 'NU').to_nunits() staking_agent = create_mocked_staker_agent( partitioned_stakers=partitioned_stakers, current_period=current_period, global_locked_tokens=global_locked_tokens, last_confirmed_period_dict=last_confirmed_period_dict, nodes_list=nodes_list) contract_agency = MockContractAgency(staking_agent=staking_agent) get_agent.side_effect = contract_agency.get_agent # Setup Blockchain DB Client historical_staked_tokens, historical_stakers, historical_work_orders = \ create_blockchain_db_historical_data(days_in_past=5) mocked_blockchain_db_client = new_blockchain_db_client.return_value configure_mocked_blockchain_db_client( mocked_db_client=mocked_blockchain_db_client, historical_tokens=historical_staked_tokens, historical_stakers=historical_stakers, historical_work_orders=historical_work_orders) ############## RUN ################ server = Flask("monitor-dashboard") dashboard = monitor.dashboard.Dashboard( flask_server=server, route_url='/', registry=None, network='goerli', influx_host='localhost', influx_port=8086, node_storage_db_filepath=tempfile_path) dash_duo.start_server(dashboard.dash_app) # check version assert dash_duo.wait_for_element_by_id( 'version').text == f'v{nucypher.__version__}' # check current period assert dash_duo.wait_for_element_by_id('current-period-value').text == str( current_period) # check domain assert dash_duo.wait_for_element_by_id('domain-value').text == 'goerli' # check active ursulas confirmed, pending, inactive = partitioned_stakers assert dash_duo.wait_for_element_by_id('active-ursulas-value').text == \ f"{confirmed}/{confirmed + pending + inactive}" # check staked tokens assert dash_duo.wait_for_element_by_id('staked-tokens-value').text == str( NU.from_nunits(global_locked_tokens)) # # check dash components/tables - keeping it simple by simply checking text # TODO there might be an easier way to test this # # staker breakdown pie_chart_text = dash_duo.wait_for_element_by_id( 'staker-breakdown-graph').text for num in partitioned_stakers: assert str(num) in pie_chart_text # historical and future graphs are difficult to test - values aren't available through selenium WebElement # The revious tests (below) were incorrect - the `text` property only included the y-axis labels (not values) # - just so happened the test had the values as the same # -> Simply ensure that these graphs are loaded for now historical_stakers = dash_duo.wait_for_element_by_id( 'prev-stakers-graph').text historical_work_orders = dash_duo.wait_for_element_by_id( 'prev-orders-graph').text future_locked_stake = dash_duo.wait_for_element_by_id('locked-graph').text # check previous states state_table = dash_duo.wait_for_element_by_id('state-table') for state in states_list: verify_state_data_in_table(state, state_table) # check nodes node_table = dash_duo.wait_for_element_by_id('node-table') for node in nodes_list: verify_node_data_in_table( node=node, last_confirmed_period=last_confirmed_period_dict[ node.checksum_address], current_period=current_period, node_table=node_table) # # test refresh/update buttons # # add a node and update page - ensure new node is displayed new_node = create_random_mock_node(generate_certificate=False) last_confirmed_period_dict[new_node.checksum_address] = current_period nodes_list.append(new_node) # add new node to list node_storage.store_node_metadata(new_node) dash_duo.find_element("#node-update-button").click() node_table_updated = dash_duo.wait_for_element_by_id('node-table') # check for all nodes including new node assert new_node in nodes_list, "ensure new node in list to check" for node in nodes_list: verify_node_data_in_table( node=node, last_confirmed_period=last_confirmed_period_dict[ node.checksum_address], current_period=current_period, node_table=node_table_updated) # add a state and update page - ensure new state is displayed new_state = create_random_mock_state() states_list.append(new_state) # add state to list node_storage.store_state_metadata(new_state) dash_duo.find_element("#state-update-button").click() state_table_updated = dash_duo.wait_for_element_by_id('state-table') # check for all states including new state assert new_state in states_list, "ensure new state in list to check" for state in states_list: verify_state_data_in_table(state, state_table_updated)