Esempio n. 1
0
def test_node_client_get_node_metadata(tempfile_path):
    # Add some node data
    node_storage = CrawlerNodeStorage(storage_filepath=tempfile_path)
    node_1 = create_random_mock_node()
    node_2 = create_random_mock_node()
    node_3 = create_random_mock_node()
    node_4 = create_random_mock_node()
    node_5 = create_random_mock_node()

    node_list = [node_1, node_2, node_3, node_4, node_5]
    for node in node_list:
        node_storage.store_node_metadata(node=node)

    node_db_client = CrawlerStorageClient(db_filepath=tempfile_path)
    result = node_db_client.get_known_nodes_metadata()

    node_list.sort(
        key=lambda x: x.checksum_address)  # result is sorted by staker address
    assert len(result) == len(node_list)

    # "result" of form {staker_address -> {column_name -> column_value}}
    for idx, key in enumerate(result):
        node_info = result[key]

        expected_row = convert_node_to_db_row(node_list[idx])
        for info_idx, column in enumerate(CrawlerNodeStorage.NODE_DB_SCHEMA):
            assert node_info[
                column[0]] == expected_row[info_idx], f"{column[0]} matches"
Esempio n. 2
0
def create_nodes(num_nodes: int, current_period: int):
    nodes_list = []
    base_active_period = current_period + 1
    last_confirmed_period_dict = dict()
    for i in range(0, num_nodes):
        node = create_random_mock_node(generate_certificate=False)
        nodes_list.append(node)

        last_confirmed_period = base_active_period - random.randrange(0, 3)
        # some percentage of the time flag the node as never confirmed
        if random.random() > 0.9:
            last_confirmed_period = 0
        last_confirmed_period_dict[
            node.checksum_address] = last_confirmed_period

    return nodes_list, last_confirmed_period_dict
Esempio n. 3
0
def test_storage_db_clear_not_metadata():
    node_storage = CrawlerNodeStorage(storage_filepath=IN_MEMORY_FILEPATH)

    # store some data
    node = create_random_mock_node()
    node_storage.store_node_metadata(node=node)

    state = create_specific_mock_state()
    node_storage.store_state_metadata(state=state)

    teacher_checksum = '0x123456789'
    node_storage.store_current_teacher(teacher_checksum)

    verify_all_db_tables(node_storage.db_conn, expect_empty=False)

    # only clear certificates data
    node_storage.clear(metadata=False, certificates=True)

    # db tables should not have been cleared
    verify_all_db_tables(node_storage.db_conn, expect_empty=False)
Esempio n. 4
0
def test_storage_db_clear_only_metadata_not_certificates(sqlite_connection):
    node_storage = CrawlerNodeStorage(storage_filepath=IN_MEMORY_FILEPATH)

    # store some data
    node = create_random_mock_node()
    node_storage.store_node_metadata(node=node)

    state = create_specific_mock_state()
    node_storage.store_state_metadata(state=FleetSensor.abridged_state_details(state))

    teacher_checksum = '0x123456789'
    node_storage.store_current_teacher(teacher_checksum)

    verify_all_db_tables(sqlite_connection, expect_empty=False)

    # clear metadata tables
    node_storage.clear(metadata=True, certificates=False)

    # db tables should have been cleared
    verify_all_db_tables(sqlite_connection, expect_empty=True)
Esempio n. 5
0
def test_storage_db_clear():
    node_storage = CrawlerNodeStorage(storage_filepath=IN_MEMORY_FILEPATH)
    verify_all_db_tables_exist(node_storage.db_conn)

    # store some data
    node = create_random_mock_node()
    node_storage.store_node_metadata(node=node)

    state = create_specific_mock_state()
    node_storage.store_state_metadata(
        state=FleetStateTracker.abridged_state_details(state))

    teacher_checksum = '0x123456789'
    node_storage.store_current_teacher(teacher_checksum)

    verify_all_db_tables(node_storage.db_conn, expect_empty=False)

    # clear tables
    node_storage.clear()

    # db tables should have been cleared
    verify_all_db_tables(node_storage.db_conn, expect_empty=True)
Esempio n. 6
0
def test_crawler_learn_about_nodes(new_influx_db, get_agent, get_economics,
                                   tempfile_path):
    mock_influxdb_client = new_influx_db.return_value
    mock_influxdb_client.write_points.return_value = True

    # TODO: issue with use of `agent.blockchain` causes spec=StakingEscrowAgent not to be specified in MagicMock
    # Get the following - AttributeError: Mock object has no attribute 'blockchain'
    staking_agent = MagicMock(autospec=True)
    contract_agency = MockContractAgency(staking_agent=staking_agent)
    get_agent.side_effect = contract_agency.get_agent

    token_economics = StandardTokenEconomics()
    get_economics.return_value = token_economics

    crawler = create_crawler(node_db_filepath=tempfile_path)
    node_db_client = CrawlerStorageClient(db_filepath=tempfile_path)
    try:
        crawler.start()
        assert crawler.is_running

        for i in range(0, 5):
            random_node = create_random_mock_node(generate_certificate=True)
            crawler.remember_node(node=random_node, record_fleet_state=True)
            known_nodes = node_db_client.get_known_nodes_metadata()
            assert len(known_nodes) > i
            assert random_node.checksum_address in known_nodes

            previous_states = node_db_client.get_previous_states_metadata()
            assert len(previous_states) > i

            # configure staking agent for blockchain calls
            tokens = NU(int(15000 + i * 2500), 'NU').to_nunits()
            current_period = datetime_to_period(
                maya.now(), token_economics.seconds_per_period)
            initial_period = current_period - i
            terminal_period = current_period + (i + 50)
            last_active_period = current_period - i
            staking_agent.get_worker_from_staker.side_effect = \
                lambda staker_address: crawler.node_storage.get(federated_only=False,
                                                                checksum_address=staker_address).worker_address

            configure_mock_staking_agent(staking_agent=staking_agent,
                                         tokens=tokens,
                                         current_period=current_period,
                                         initial_period=initial_period,
                                         terminal_period=terminal_period,
                                         last_active_period=last_active_period)

            # run crawler callable
            crawler._learn_about_nodes()

            # ensure data written to influx table
            mock_influxdb_client.write_points.assert_called_once()

            # expected db row added
            write_points_call_args_list = mock_influxdb_client.write_points.call_args_list
            influx_db_line_protocol_statement = str(
                write_points_call_args_list[0][0])

            expected_arguments = [
                f'staker_address={random_node.checksum_address}',
                f'worker_address="{random_node.worker_address}"',
                f'stake={float(NU.from_nunits(tokens).to_tokens())}',
                f'locked_stake={float(NU.from_nunits(tokens).to_tokens())}',
                f'current_period={current_period}i',
                f'last_confirmed_period={last_active_period}i',
                f'work_orders={len(random_node.work_orders())}i'
            ]
            for arg in expected_arguments:
                assert arg in influx_db_line_protocol_statement, \
                    f"{arg} in {influx_db_line_protocol_statement} for iteration {i}"

            mock_influxdb_client.reset_mock()
    finally:
        crawler.stop()

    mock_influxdb_client.close.assert_called_once()
    assert not crawler.is_running
Esempio n. 7
0
def test_dashboard_render(new_blockchain_db_client, get_agent, tempfile_path,
                          dash_duo):
    ############## SETUP ################
    current_period = 18622

    # create node metadata
    nodes_list, last_confirmed_period_dict = create_nodes(
        num_nodes=5, current_period=current_period)

    # create states
    states_list = create_states(num_states=3)

    # write node, teacher (first item in node list), and state data to storage
    node_storage = CrawlerNodeStorage(storage_filepath=tempfile_path)
    store_node_db_data(node_storage, nodes=nodes_list, states=states_list)

    # Setup StakingEscrowAgent and ContractAgency
    partitioned_stakers = (25, 5, 10)  # confirmed, pending, inactive
    global_locked_tokens = NU(1000000, 'NU').to_nunits()
    staking_agent = create_mocked_staker_agent(
        partitioned_stakers=partitioned_stakers,
        current_period=current_period,
        global_locked_tokens=global_locked_tokens,
        last_confirmed_period_dict=last_confirmed_period_dict,
        nodes_list=nodes_list)
    contract_agency = MockContractAgency(staking_agent=staking_agent)
    get_agent.side_effect = contract_agency.get_agent

    # Setup Blockchain DB Client
    historical_staked_tokens, historical_stakers, historical_work_orders = \
        create_blockchain_db_historical_data(days_in_past=5)
    mocked_blockchain_db_client = new_blockchain_db_client.return_value
    configure_mocked_blockchain_db_client(
        mocked_db_client=mocked_blockchain_db_client,
        historical_tokens=historical_staked_tokens,
        historical_stakers=historical_stakers,
        historical_work_orders=historical_work_orders)

    ############## RUN ################
    server = Flask("monitor-dashboard")
    dashboard = monitor.dashboard.Dashboard(
        flask_server=server,
        route_url='/',
        registry=None,
        network='goerli',
        influx_host='localhost',
        influx_port=8086,
        node_storage_db_filepath=tempfile_path)
    dash_duo.start_server(dashboard.dash_app)

    # check version
    assert dash_duo.wait_for_element_by_id(
        'version').text == f'v{nucypher.__version__}'

    # check current period
    assert dash_duo.wait_for_element_by_id('current-period-value').text == str(
        current_period)

    # check domain
    assert dash_duo.wait_for_element_by_id('domain-value').text == 'goerli'

    # check active ursulas
    confirmed, pending, inactive = partitioned_stakers
    assert dash_duo.wait_for_element_by_id('active-ursulas-value').text == \
           f"{confirmed}/{confirmed + pending + inactive}"

    # check staked tokens
    assert dash_duo.wait_for_element_by_id('staked-tokens-value').text == str(
        NU.from_nunits(global_locked_tokens))

    #
    # check dash components/tables - keeping it simple by simply checking text
    # TODO there might be an easier way to test this
    #

    # staker breakdown
    pie_chart_text = dash_duo.wait_for_element_by_id(
        'staker-breakdown-graph').text
    for num in partitioned_stakers:
        assert str(num) in pie_chart_text

    # historical and future graphs are difficult to test - values aren't available through selenium WebElement
    # The revious tests (below) were incorrect - the `text` property only included the y-axis labels (not values)
    # - just so happened the test had the values as the same
    # -> Simply ensure that these graphs are loaded for now
    historical_stakers = dash_duo.wait_for_element_by_id(
        'prev-stakers-graph').text
    historical_work_orders = dash_duo.wait_for_element_by_id(
        'prev-orders-graph').text
    future_locked_stake = dash_duo.wait_for_element_by_id('locked-graph').text

    # check previous states
    state_table = dash_duo.wait_for_element_by_id('state-table')
    for state in states_list:
        verify_state_data_in_table(state, state_table)

    # check nodes
    node_table = dash_duo.wait_for_element_by_id('node-table')
    for node in nodes_list:
        verify_node_data_in_table(
            node=node,
            last_confirmed_period=last_confirmed_period_dict[
                node.checksum_address],
            current_period=current_period,
            node_table=node_table)

    #
    # test refresh/update buttons
    #

    # add a node and update page - ensure new node is displayed
    new_node = create_random_mock_node(generate_certificate=False)
    last_confirmed_period_dict[new_node.checksum_address] = current_period
    nodes_list.append(new_node)  # add new node to list
    node_storage.store_node_metadata(new_node)

    dash_duo.find_element("#node-update-button").click()

    node_table_updated = dash_duo.wait_for_element_by_id('node-table')
    # check for all nodes including new node
    assert new_node in nodes_list, "ensure new node in list to check"
    for node in nodes_list:
        verify_node_data_in_table(
            node=node,
            last_confirmed_period=last_confirmed_period_dict[
                node.checksum_address],
            current_period=current_period,
            node_table=node_table_updated)

    # add a state and update page - ensure new state is displayed
    new_state = create_random_mock_state()
    states_list.append(new_state)  # add state to list
    node_storage.store_state_metadata(new_state)

    dash_duo.find_element("#state-update-button").click()
    state_table_updated = dash_duo.wait_for_element_by_id('state-table')
    # check for all states including new state
    assert new_state in states_list, "ensure new state in list to check"
    for state in states_list:
        verify_state_data_in_table(state, state_table_updated)