示例#1
0
def get_node(nodes_so_far: List[NodeConfig],
             polkadot_api_data_wrapper: PolkadotApiWrapper,
             web_sockets_connected_to_api: List) -> Optional[NodeConfig]:
    # Get node's name
    node_names_so_far = [n.node_name for n in nodes_so_far]
    while True:
        node_name = input('Unique node name:\n')
        if node_name in node_names_so_far:
            print('Node name must be unique.')
        else:
            break

    # Get node's WS url
    while True:
        ws_url = input('Node\'s WS url (typically ws://NODE_IP:9944):\n')
        if ws_url in web_sockets_connected_to_api:
            print('Testing connection with node {}'.format(ws_url))
            try:
                polkadot_api_data_wrapper.ping_node(ws_url)
                print('Success.')
                break
            except Exception:
                if not yn_prompt('Failed to connect with node {}. Do you want '
                                 'to try again? (Y/n)\n'.format(ws_url)):
                    return None
        else:
            if not yn_prompt(
                    'Could not connect with the API Server at web socket '
                    '{}. Please make sure that the node was added in the '
                    'API\'s config. Do you want to try again? (Y/n)\n'.format(
                        ws_url, polkadot_api_data_wrapper.api_endpoint,
                        polkadot_api_data_wrapper.api_endpoint)):
                return None

    # Ask if node is a validator
    node_is_validator = yn_prompt('Is this node a validator? (Y/n)\n')

    # Ask if node is an archive node.
    # Note: if the node is a validator, it must also be an archive node.
    #       However, it was done this way in case of changes in future updates.
    node_is_archive_node = yn_prompt('Is this node an archive node? (Y/n)\n')

    # Get validator's stash account address.
    if node_is_validator:
        while True:
            stash_account_address = input('Please enter the validator\'s stash '
                                          'account address:\n')
            if not stash_account_address.strip():
                if not yn_prompt('You cannot leave the stash_account_address '
                                 'field empty for a validator. Do you want to '
                                 'try again? (Y/n)\n'):
                    return None
            else:
                break
    else:
        stash_account_address = ''

    # Return node
    return NodeConfig(node_name, ws_url, node_is_validator,
                      node_is_archive_node, True, True, stash_account_address)
示例#2
0
    def setUp(self) -> None:
        self.logger = logging.getLogger('dummy')
        self.wrapper = PolkadotApiWrapper(self.logger, self.api_endpoint)

        self.counter_channel = CounterChannel(self.logger)
        self.channel_set = ChannelSet([self.counter_channel], TestInternalConf)

        self.params = {'websocket': self.ws_url}
示例#3
0
    def __init__(self,
                 monitor_name: str,
                 channels: ChannelSet,
                 logger: logging.Logger,
                 node_monitor_max_catch_up_blocks: int,
                 redis: Optional[RedisApi],
                 node: Node,
                 archive_alerts_disabled: bool,
                 data_sources: List[Node],
                 polkadot_api_endpoint: str,
                 internal_conf: InternalConfig = InternalConf):
        super().__init__(monitor_name, channels, logger, redis, internal_conf)

        self._node = node
        self._data_wrapper = PolkadotApiWrapper(logger, polkadot_api_endpoint)
        self._node_monitor_max_catch_up_blocks = \
            node_monitor_max_catch_up_blocks

        self._redis_alive_key = \
            self._internal_conf.redis_node_monitor_alive_key_prefix + \
            self._monitor_name
        self._redis_alive_key_timeout = \
            self._internal_conf.redis_node_monitor_alive_key_timeout
        self._redis_last_height_key_timeout = \
            self._internal_conf.redis_node_monitor_last_height_key_timeout
        self._redis_session_index_key = \
            self._internal_conf.redis_node_monitor_session_index_key_prefix \
            + self._monitor_name
        self._redis_last_height_checked_key = \
            self._internal_conf. \
                redis_node_monitor_last_height_checked_key_prefix \
            + self._monitor_name

        # The data sources for indirect monitoring are all nodes from the same
        # chain which have been set as a data source in the config.
        self._indirect_monitoring_data_sources = data_sources

        # The data sources for archive monitoring are all archive nodes from
        # the same chain that have been set as data source in the config.
        self._archive_monitoring_data_sources = [
            node for node in data_sources if node.is_archive_node
        ]
        self.last_data_source_used = None
        self._last_height_checked = NONE
        self._session_index = None
        self._monitor_is_catching_up = False
        self._indirect_monitoring_disabled = len(data_sources) == 0
        self._no_live_archive_node_alert_sent = False
        self._archive_alerts_disabled = archive_alerts_disabled

        self.load_state()
示例#4
0
    def __init__(self, monitor_name: str, blockchain: Blockchain,
                 channels: ChannelSet, logger: logging.Logger,
                 redis: Optional[RedisApi], data_sources: List[Node],
                 polkadot_api_endpoint: str,
                 internal_conf: InternalConfig = InternalConf):
        super().__init__(monitor_name, channels, logger, redis, internal_conf)

        self._blockchain = blockchain
        self.data_sources = data_sources
        self._data_wrapper = PolkadotApiWrapper(logger, polkadot_api_endpoint)

        self.last_data_source_used = None

        self._redis_alive_key_timeout = \
            self._internal_conf.redis_blockchain_monitor_alive_key_timeout
示例#5
0
def setup_api(cp: ConfigParser) -> None:
    print('==== Polkadot API Server')
    print(
        'The Polkadot API Server is used by the alerter to get data from the '
        'nodes. It is important that before running both the alerter and '
        'this setup, the Polkadot API Server is set up and running.')

    if is_already_set_up(cp, 'api') and \
            not yn_prompt('The Polkadot API Server is already set up. Do you '
                          'wish to replace the current config? (Y/n)\n'):
        return

    reset_section('api', cp)
    cp['api']['polkadot_api_endpoint'] = ''

    while True:
        print('You will now be asked to input the API Server\'s address\n'
              'If you will be running PANIC using Docker, do not use '
              'localhost, instead use the full IP address (local or external) '
              'of the machine that the API container will be running on.\n'
              'You should also set the port to 3000. Otherwise, you must run '
              'the API Docker using -p <port>:3000.')
        polkadot_api_endpoint = input(
            'Please insert the API Server\'s address:'
            ' (default: http://localhost:3000)\n')
        polkadot_api_endpoint = 'http://localhost:3000' if \
            polkadot_api_endpoint == '' else polkadot_api_endpoint
        polkadot_api = PolkadotApiWrapper(DUMMY_LOGGER, polkadot_api_endpoint)
        print('Testing connection with endpoint {}'.format(
            polkadot_api_endpoint))
        try:
            polkadot_api.ping_api()
            print('Success.')
            break
        except Exception:
            if not yn_prompt('Failed to connect to endpoint. Do '
                             'you want to try again? (Y/n)\n'):
                return None

    cp['api']['polkadot_api_endpoint'] = polkadot_api_endpoint
示例#6
0
class TestPolkadotApi(unittest.TestCase):
    @classmethod
    def setUpClass(cls) -> None:
        cls.ws_url = 'the_ws'
        cls.api_endpoint = 'the_api'
        cls.block_hash = 'the_block_hash'
        cls.acc_addr = 'the_account_address'
        cls.validator_id = "the_validator_id"
        cls.block_no = 1
        cls.referendum_index = 2
        cls.session_index = 3
        cls.auth_index = 4

    def setUp(self) -> None:
        self.logger = logging.getLogger('dummy')
        self.wrapper = PolkadotApiWrapper(self.logger, self.api_endpoint)

        self.counter_channel = CounterChannel(self.logger)
        self.channel_set = ChannelSet([self.counter_channel], TestInternalConf)

        self.params = {'websocket': self.ws_url}

    def test_api_endpoint_returns_api_endpoint(self):
        self.assertEqual(self.api_endpoint, self.wrapper.api_endpoint)

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_block_hash(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/rpc/chain/getBlockHash'
        self.params['block_number'] = self.block_no
        api_call = 'chain/getBlockHash'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_block_hash(self.ws_url,
                                                    self.block_no))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_finalized_head(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/rpc/chain/getFinalizedHead'
        api_call = 'chain/getFinalizedHead'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_finalized_head(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_header(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/rpc/chain/getHeader'
        self.params['hash'] = self.block_hash
        api_call = 'chain/getHeader'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_header(self.ws_url, self.block_hash))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_system_chain(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/rpc/system/chain'
        api_call = 'system/chain'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_system_chain(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_system_health(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/rpc/system/health'
        api_call = 'system/health'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_system_health(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_council_members(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/council/members'
        api_call = 'council/members'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_council_members(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_council_proposal_count(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/council/proposalCount'
        api_call = 'council/proposalCount'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_council_proposal_count(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_public_proposal_count(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/democracy/publicPropCount'
        api_call = 'democracy/publicPropCount'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_public_proposal_count(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_referendum_count(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/democracy/referendumCount'
        api_call = 'democracy/referendumCount'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_referendum_count(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_referendum_info_of(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/democracy/referendumInfoOf'
        api_call = 'democracy/referendumInfoOf'
        self.params['referendum_index'] = self.referendum_index
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(
            self.wrapper.get_referendum_info_of(self.ws_url,
                                                self.referendum_index))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_authored_block(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/imOnline/authoredBlocks'
        self.params['validator_id'] = self.validator_id
        self.params['session_index'] = self.session_index
        api_call = 'imOnline/authoredBlocks'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(
            self.wrapper.get_authored_blocks(self.ws_url, self.session_index,
                                             self.validator_id))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_received_heartbeats(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/imOnline/receivedHeartbeats'
        self.params['session_index'] = self.session_index
        self.params['auth_index'] = self.auth_index
        api_call = 'imOnline/receivedHeartbeats'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(
            self.wrapper.get_received_heartbeats(self.ws_url,
                                                 self.session_index,
                                                 self.auth_index))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_current_index(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/session/currentIndex'
        api_call = 'session/currentIndex'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_current_index(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_disabled_validators(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/session/disabledValidators'
        api_call = 'session/disabledValidators'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_disabled_validators(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_session_validators(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/session/validators'
        api_call = 'session/validators'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_session_validators(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_derive_staking_validators(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/derive/staking/validators'
        api_call = 'staking/validators'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_derive_staking_validators(
            self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_eras_stakers(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/staking/erasStakers'
        self.params['account_id'] = self.acc_addr
        api_call = 'staking/erasStakers'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(
            self.wrapper.get_eras_stakers(self.ws_url, self.acc_addr))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_events(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/system/events'
        self.params['block_hash'] = self.block_hash
        api_call = 'system/events'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_events(self.ws_url, self.block_hash))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_slash_amount(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/custom/getSlashAmount'
        self.params['block_hash'] = self.block_hash
        self.params['account_address'] = self.acc_addr
        api_call = 'custom/getSlashAmount'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(
            self.wrapper.get_slash_amount(self.ws_url, self.block_hash,
                                          self.acc_addr))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_web_sockets_connected_to_an_api(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/getConnectionsList'
        api_call = ''
        mock.api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_web_sockets_connected_to_an_api())

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_ping_api(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/pingApi'
        self.params = {}
        api_call = ''
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.ping_api())

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_ping_node(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/pingNode'
        api_call = ''
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.ping_node(self.ws_url))

    def test_set_api_as_down_produces_alert_if_api_not_down(self):
        # By default, API not down

        # First time round, error alert is produced
        self.counter_channel.reset()
        self.wrapper.set_api_as_down("", self.channel_set)
        self.assertEqual(1, self.counter_channel.error_count)

        # Second time round, API is already down, so no alerts
        self.counter_channel.reset()
        self.wrapper.set_api_as_down("", self.channel_set)
        self.assertTrue(self.counter_channel.no_alerts())

    def test_set_api_as_up_produces_alert_if_api_is_down(self):
        # By default, API not down

        # First time round, no alert is produced
        self.counter_channel.reset()
        self.wrapper.set_api_as_up("", self.channel_set)
        self.assertTrue(self.counter_channel.no_alerts())

        # In between, we set the api as down
        self.wrapper.set_api_as_down("", self.channel_set)

        # API is now down, so info alert is produced
        self.counter_channel.reset()
        self.wrapper.set_api_as_up("", self.channel_set)
        self.assertEqual(1, self.counter_channel.info_count)
示例#7
0
class TestPolkadotApi(unittest.TestCase):
    @classmethod
    def setUpClass(cls) -> None:
        cls.ws_url = 'the_ws'
        cls.api_endpoint = 'the_api'
        cls.block_hash = 'the_block_hash'
        cls.acc_addr = 'the_account_address'
        cls.validator_id = "the_validator_id"
        cls.block_no = 1
        cls.referendum_index = 2
        cls.session_index = 3
        cls.auth_index = 4

    def setUp(self) -> None:
        self.logger = logging.getLogger('dummy')
        self.wrapper = PolkadotApiWrapper(self.logger, self.api_endpoint)

        self.max_time = 15
        self.max_time_less = self.max_time - 10
        self.max_time_more = self.max_time + 2

        self.counter_channel = CounterChannel(self.logger)
        self.channel_set = ChannelSet([self.counter_channel], TestInternalConf)

        self.params = {'websocket': self.ws_url}

    def test_api_endpoint_returns_api_endpoint(self):
        self.assertEqual(self.api_endpoint, self.wrapper.api_endpoint)

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_block_hash(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/rpc/chain/getBlockHash'
        self.params['block_number'] = self.block_no
        api_call = 'chain/getBlockHash'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(
            self.wrapper.get_block_hash(self.ws_url, self.block_no))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_finalized_head(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/rpc/chain/getFinalizedHead'
        api_call = 'chain/getFinalizedHead'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_finalized_head(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_header(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/rpc/chain/getHeader'
        self.params['hash'] = self.block_hash
        api_call = 'chain/getHeader'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_header(self.ws_url, self.block_hash))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_system_chain(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/rpc/system/chain'
        api_call = 'system/chain'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_system_chain(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_system_health(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/rpc/system/health'
        api_call = 'system/health'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_system_health(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_council_members(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/council/members'
        api_call = 'council/members'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_council_members(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_council_proposal_count(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/council/proposalCount'
        api_call = 'council/proposalCount'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_council_proposal_count(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_public_proposal_count(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/democracy/publicPropCount'
        api_call = 'democracy/publicPropCount'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_public_proposal_count(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_referendum_count(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/democracy/referendumCount'
        api_call = 'democracy/referendumCount'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_referendum_count(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_referendum_info_of(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/democracy/referendumInfoOf'
        api_call = 'democracy/referendumInfoOf'
        self.params['referendum_index'] = self.referendum_index
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_referendum_info_of(
            self.ws_url, self.referendum_index))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_authored_block(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/imOnline/authoredBlocks'
        self.params['validator_id'] = self.validator_id
        self.params['session_index'] = self.session_index
        api_call = 'imOnline/authoredBlocks'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_authored_blocks(
            self.ws_url, self.session_index, self.validator_id))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_received_heartbeats(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/imOnline/receivedHeartbeats'
        self.params['session_index'] = self.session_index
        self.params['auth_index'] = self.auth_index
        api_call = 'imOnline/receivedHeartbeats'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_received_heartbeats(
            self.ws_url, self.session_index, self.auth_index))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_current_index(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/session/currentIndex'
        api_call = 'session/currentIndex'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_current_index(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_disabled_validators(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/session/disabledValidators'
        api_call = 'session/disabledValidators'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_disabled_validators(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_session_validators(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/session/validators'
        api_call = 'session/validators'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_session_validators(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_derive_staking_validators(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/derive/staking/validators'
        api_call = 'staking/validators'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_derive_staking_validators(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_eras_stakers(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/staking/erasStakers'
        self.params['account_id'] = self.acc_addr
        api_call = 'staking/erasStakers'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(
            self.wrapper.get_eras_stakers(self.ws_url, self.acc_addr))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_active_era(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/staking/activeEra'
        api_call = 'staking/activeEra'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(
            self.wrapper.get_active_era(self.ws_url))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_events(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/query/system/events'
        self.params['block_hash'] = self.block_hash
        api_call = 'system/events'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_events(self.ws_url, self.block_hash))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_slash_amount(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/custom/getSlashAmount'
        self.params['block_hash'] = self.block_hash
        self.params['account_address'] = self.acc_addr
        api_call = 'custom/getSlashAmount'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_slash_amount(
            self.ws_url, self.block_hash, self.acc_addr))

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_get_web_sockets_connected_to_an_api(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/getConnectionsList'
        api_call = ''
        mock.api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.get_web_sockets_connected_to_an_api())

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_ping_api(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/pingApi'
        self.params = {}
        api_call = ''
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.ping_api())

    @patch(GET_POLKADOT_JSON_FUNCTION)
    def test_ping_node(self, mock):
        # Set up mock
        endpoint = self.api_endpoint + '/api/pingNode'
        api_call = 'pingNode'
        mock.side_effect = api_mock_generator(endpoint, self.params, api_call)

        self.assertTrue(self.wrapper.ping_node(self.ws_url))

    def test_api_not_down_by_default(self) -> None:
        self.assertFalse(self.wrapper.is_api_down)

    def test_set_api_as_down_produces_warning_alert_if_api_not_down(
            self) -> None:
        # Test for validator monitors
        self.wrapper.set_api_as_down("", True, self.channel_set)
        self.assertEqual(1, self.counter_channel.warning_count)
        self.assertIsInstance(self.counter_channel.latest_alert,
                              ApiIsDownAlert)

        # Set API up again, and clear state
        self.wrapper.set_api_as_up("", self.channel_set)
        self.counter_channel.reset()

        # Test for non-validator monitors
        self.wrapper.set_api_as_down("", False, self.channel_set)
        self.assertEqual(1, self.counter_channel.warning_count)
        self.assertIsInstance(self.counter_channel.latest_alert,
                              ApiIsDownAlert)

    def test_set_api_as_down_produces_no_warning_alert_if_api_down(
            self) -> None:
        # Test for validator monitors
        self.wrapper.set_api_as_down("", True, self.channel_set)
        self.counter_channel.reset()
        self.wrapper.set_api_as_down("", True, self.channel_set)
        self.assertEqual(0, self.counter_channel.warning_count)

        # Set API up again, and clear state
        self.wrapper.set_api_as_up("", self.channel_set)
        self.counter_channel.reset()

        # Test for non-validator monitors
        self.wrapper.set_api_as_down("", False, self.channel_set)
        self.counter_channel.reset()
        self.wrapper.set_api_as_down("", False, self.channel_set)
        self.assertEqual(0, self.counter_channel.warning_count)

    def test_set_api_as_down_sets_api_down(self) -> None:
        # Test for validator monitors - API previously not down
        self.wrapper.set_api_as_down("", True, self.channel_set)
        self.assertTrue(self.wrapper.is_api_down)

        # Test for validator monitors - API previously down
        self.wrapper.set_api_as_down("", True, self.channel_set)
        self.assertTrue(self.wrapper.is_api_down)

        # Set API up and reset state
        self.counter_channel.reset()
        self.wrapper.set_api_as_up("", self.channel_set)

        # Test for non-validator monitors - API previously not down
        self.wrapper.set_api_as_down("", False, self.channel_set)
        self.assertTrue(self.wrapper.is_api_down)

        # Test for non-validator monitors - API previously down
        self.wrapper.set_api_as_down("", False, self.channel_set)
        self.assertTrue(self.wrapper.is_api_down)

    def test_set_api_as_down_raises_critical_alert_for_val_monitors_if_conditions_are_met(
            self) -> None:
        # Declare API as down and start timer.
        self.wrapper.set_api_as_down("", False, self.channel_set)
        self.counter_channel.reset()

        # Enough time passed - no critical alert sent yet
        sleep(self.max_time_more)
        self.wrapper.set_api_as_down("", True, self.channel_set)
        self.assertEqual(1, self.counter_channel.critical_count)
        self.assertIsInstance(self.counter_channel.latest_alert,
                              ApiIsDownAlert)

        # Enough time passed - critical alert sent
        self.counter_channel.reset()
        self.wrapper.set_api_as_down("", True, self.channel_set)
        self.assertEqual(0, self.counter_channel.critical_count)

        # To reset state
        self.wrapper.set_api_as_up("", self.channel_set)
        self.counter_channel.reset()

        # Not enough time passed - no critical alert sent yet
        self.wrapper.set_api_as_down("", True, self.channel_set)
        self.assertEqual(0, self.counter_channel.critical_count)
        self.counter_channel.reset()

        # Not enough time passed - critical alert sent
        self.wrapper.set_api_as_down("", True, self.channel_set)
        self.assertEqual(0, self.counter_channel.critical_count)

    def test_set_api_as_down_raises_no_critical_alert_for_non_val_monitors(
            self) -> None:

        # Not enough time passed
        self.wrapper.set_api_as_down("", False, self.channel_set)
        self.assertEqual(0, self.counter_channel.critical_count)
        self.counter_channel.reset()

        # Enough time passed
        sleep(self.max_time_more)
        self.wrapper.set_api_as_down("", False, self.channel_set)
        self.assertEqual(0, self.counter_channel.critical_count)

    def test_set_api_as_up_produces_info_alert_if_api_is_down(self):
        self.wrapper.set_api_as_down("", True, self.channel_set)
        self.counter_channel.reset()
        self.wrapper.set_api_as_up("", self.channel_set)
        self.assertEqual(1, self.counter_channel.info_count)
        self.assertIsInstance(self.counter_channel.latest_alert,
                              ApiIsUpAgainAlert)

    def test_set_api_as_up_produces_no_alert_if_api_is_up(self):
        self.wrapper.set_api_as_up("", self.channel_set)
        self.assertTrue(self.counter_channel.no_alerts())
示例#8
0
    logger_redis = create_logger(InternalConf.redis_log_file, 'redis',
                                 InternalConf.logging_level)
    logger_mongo = create_logger(InternalConf.mongo_log_file, 'mongo',
                                 InternalConf.logging_level)
    logger_general = create_logger(InternalConf.general_log_file,
                                   'general',
                                   InternalConf.logging_level,
                                   rotating=True)
    logger_commands_telegram = create_logger(
        InternalConf.telegram_commands_general_log_file,
        'commands_telegram',
        InternalConf.logging_level,
        rotating=True)
    log_file_alerts = InternalConf.alerts_log_file
    polkadot_api_data_wrapper = \
        PolkadotApiWrapper(logger_general, UserConf.polkadot_api_endpoint)

    # Redis initialisation
    if UserConf.redis_enabled:
        REDIS = RedisApi(logger_redis,
                         InternalConf.redis_database,
                         UserConf.redis_host,
                         UserConf.redis_port,
                         password=UserConf.redis_password,
                         namespace=UserConf.unique_alerter_identifier)
    else:
        REDIS = None

    # Mongo DB initialisation
    if UserConf.mongo_enabled:
        MONGO = MongoApi(logger_mongo,
示例#9
0
class NodeMonitor(Monitor):
    def __init__(self,
                 monitor_name: str,
                 channels: ChannelSet,
                 logger: logging.Logger,
                 node_monitor_max_catch_up_blocks: int,
                 redis: Optional[RedisApi],
                 node: Node,
                 archive_alerts_disabled: bool,
                 data_sources: List[Node],
                 polkadot_api_endpoint: str,
                 internal_conf: InternalConfig = InternalConf):
        super().__init__(monitor_name, channels, logger, redis, internal_conf)

        self._node = node
        self._data_wrapper = PolkadotApiWrapper(logger, polkadot_api_endpoint)
        self._node_monitor_max_catch_up_blocks = \
            node_monitor_max_catch_up_blocks

        self._redis_alive_key = \
            self._internal_conf.redis_node_monitor_alive_key_prefix + \
            self._monitor_name
        self._redis_alive_key_timeout = \
            self._internal_conf.redis_node_monitor_alive_key_timeout
        self._redis_last_height_key_timeout = \
            self._internal_conf.redis_node_monitor_last_height_key_timeout
        self._redis_session_index_key = \
            self._internal_conf.redis_node_monitor_session_index_key_prefix \
            + self._monitor_name
        self._redis_last_height_checked_key = \
            self._internal_conf. \
                redis_node_monitor_last_height_checked_key_prefix \
            + self._monitor_name

        # The data sources for indirect monitoring are all nodes from the same
        # chain which have been set as a data source in the config.
        self._indirect_monitoring_data_sources = data_sources

        # The data sources for archive monitoring are all archive nodes from
        # the same chain that have been set as data source in the config.
        self._archive_monitoring_data_sources = [
            node for node in data_sources if node.is_archive_node
        ]
        self.last_data_source_used = None
        self._last_height_checked = NONE
        self._session_index = None
        self._monitor_is_catching_up = False
        self._indirect_monitoring_disabled = len(data_sources) == 0
        self._no_live_archive_node_alert_sent = False
        self._archive_alerts_disabled = archive_alerts_disabled

        self.load_state()

    def is_catching_up(self) -> bool:
        return self._monitor_is_catching_up

    @property
    def indirect_monitoring_disabled(self) -> bool:
        return self._indirect_monitoring_disabled

    @property
    def node(self) -> Node:
        return self._node

    @property
    def session_index(self) -> int:
        return self._session_index

    @property
    def last_height_checked(self) -> int:
        return self._last_height_checked

    @property
    def no_live_archive_node_alert_sent(self) -> bool:
        return self._no_live_archive_node_alert_sent

    @property
    def data_wrapper(self) -> PolkadotApiWrapper:
        return self._data_wrapper

    @property
    def indirect_monitoring_data_sources(self) -> List[Node]:
        return self._indirect_monitoring_data_sources

    @property
    def archive_monitoring_data_sources(self) -> List[Node]:
        return self._archive_monitoring_data_sources

    # The data_source_indirect function returns a node for the indirect
    # monitoring. Since indirect monitoring does not require data from past
    # chain state, the data_source_indirect function may return a node which is
    # not an archive node.
    @property
    def data_source_indirect(self) -> Node:
        nodes_connected_to_an_api = \
            self.data_wrapper.get_web_sockets_connected_to_an_api()
        # Get one of the nodes to use as data source
        for n in self._indirect_monitoring_data_sources:
            if n.ws_url in nodes_connected_to_an_api and not n.is_down:
                self.last_data_source_used = n
                self._data_wrapper.ping_node(n.ws_url)
                return n
        raise NoLiveNodeConnectedWithAnApiServerException()

    # The data_source_archive function returns a node for archive monitoring.
    # Since archive monitoring requires data from past chain state, the
    # data_source_archive function returns only nodes which are archive nodes.
    @property
    def data_source_archive(self) -> Node:
        nodes_connected_to_an_api = \
            self.data_wrapper.get_web_sockets_connected_to_an_api()
        # Get one of the archive nodes to use as data source
        for n in self._archive_monitoring_data_sources:
            if n.ws_url in nodes_connected_to_an_api and not n.is_down:
                self.last_data_source_used = n
                self._data_wrapper.ping_node(n.ws_url)
                return n
        raise NoLiveArchiveNodeConnectedWithAnApiServerException()

    def load_state(self) -> None:
        # If Redis is enabled, load the session index, and last height checked
        # for slashing if any.
        if self.redis_enabled:
            self._session_index = self.redis.get_int(
                self._redis_session_index_key, None)
            self._last_height_checked = self.redis.get_int(
                self._redis_last_height_checked_key, NONE)

            self.logger.debug('Restored %s state: %s=%s, %s=%s',
                              self._monitor_name,
                              self._redis_session_index_key,
                              self._session_index,
                              self._redis_last_height_checked_key,
                              self._last_height_checked)

    def save_state(self) -> None:
        # If Redis is enabled, save the current time, indicating that the node
        # monitor was alive at this time, the current session index,
        # and the last height checked.
        if self.redis_enabled:
            self.logger.debug('Saving node monitor state: %s=%s, %s=%s',
                              self._monitor_name,
                              self._redis_session_index_key,
                              self._session_index,
                              self._redis_last_height_checked_key,
                              self._last_height_checked)

            # Set session index key
            self.redis.set(self._redis_session_index_key, self._session_index)

            # Set last height checked key
            key = self._redis_last_height_checked_key
            until = timedelta(seconds=self._redis_last_height_key_timeout)
            self.redis.set_for(key, self._last_height_checked, until)

            # Set alive key (to be able to query latest update from Telegram)
            key = self._redis_alive_key
            until = timedelta(seconds=self._redis_alive_key_timeout)
            self.redis.set_for(key, str(datetime.now()), until)

    def status(self) -> str:
        if self._node.is_validator:
            return self._node.status() + \
                   ', session_index={}, last_height_checked={}' \
                       .format(self._session_index, self._last_height_checked)
        else:
            return self._node.status()

    def monitor_direct(self) -> None:
        # Check if node is accessible
        self._logger.debug('Checking if %s is alive', self._node)
        self._data_wrapper.ping_node(self._node.ws_url)
        self._node.set_as_up(self.channels, self.logger)

        # Get system_health
        system_health = self.data_wrapper.get_system_health(self._node.ws_url)

        # Get finalized block header
        finalized_head = self.data_wrapper.get_finalized_head(self.node.ws_url)
        finalized_block_header = self.data_wrapper.get_header(
            self.node.ws_url, finalized_head)

        # Set is-syncing
        is_syncing = system_health['isSyncing']
        self._logger.debug('%s is syncing: %s', self._node, is_syncing)
        self._node.set_is_syncing(is_syncing, self.channels, self.logger)

        # Set number of peers
        no_of_peers = system_health['peers']
        self._logger.debug('%s no. of peers: %s', self._node, no_of_peers)
        self._node.set_no_of_peers(no_of_peers, self.channels, self.logger)

        # Update finalized block
        finalized_block_height = parse_int_from_string(
            str(finalized_block_header['number']))
        self._logger.debug('%s finalized_block_height: %s', self._node,
                           finalized_block_height)
        self._node.update_finalized_block_height(finalized_block_height,
                                                 self.logger, self.channels)

        # Set API as up
        self.data_wrapper.set_api_as_up(self.monitor_name, self.channels)

    def _check_for_slashing(self, height_to_check: int, archive_node: Node) \
            -> None:
        block_hash = self.data_wrapper.get_block_hash(archive_node.ws_url,
                                                      height_to_check)
        slash_amount = self.data_wrapper.get_slash_amount(
            archive_node.ws_url, block_hash, self.node.stash_account_address)

        if slash_amount > 0:
            scaled_slash_amount = round(scale_to_pico(slash_amount), 3)
            self.node.slash(scaled_slash_amount, self.channels, self.logger)

    def _check_for_new_session(self, new_session_index: int) -> None:

        self._logger.debug('%s session_index: %s', self._node,
                           new_session_index)

        if self._session_index is None:
            self._session_index = new_session_index
        elif self._session_index < new_session_index:
            self._session_index = new_session_index
            self._node.set_time_of_last_block(NONE, self.channels, self.logger)
            self._node.reset_no_of_blocks_authored(self.channels, self.logger)
            self._node.blocks_authored_alert_limiter.did_task()
            self._node.set_is_authoring(True, self.channels, self.logger)
            self._node.set_time_of_last_block_check_activity(
                NONE, self.channels, self.logger)

    def _monitor_archive_state(self) -> None:
        # Check for slashing
        # Data source must be saved to avoid situations where
        # last_height_to_check < finalized_block_height
        archive_node = self.data_source_archive
        last_height_to_check = archive_node.finalized_block_height
        if self._last_height_checked == NONE:
            self._last_height_checked = last_height_to_check - 1
        height_to_check = self._last_height_checked + 1

        # If the data source node's finalized height is less than the height
        # already checked, there is no need to check that block.
        if last_height_to_check < height_to_check:
            pass
        elif last_height_to_check - self._last_height_checked > \
                self._node_monitor_max_catch_up_blocks:
            height_to_check = last_height_to_check - \
                              self._node_monitor_max_catch_up_blocks
            self._check_for_slashing(height_to_check, archive_node)
            self._last_height_checked = height_to_check
        elif height_to_check <= last_height_to_check:
            self._check_for_slashing(height_to_check, archive_node)
            self._last_height_checked = height_to_check

        if last_height_to_check - self._last_height_checked > 2:
            self._monitor_is_catching_up = True
        else:
            self._monitor_is_catching_up = False

        # Unset, so that if in the next monitoring round an archive node is not
        # found, the operator is informed accordingly.
        if self._no_live_archive_node_alert_sent:
            self._no_live_archive_node_alert_sent = False
            self.channels.alert_info(
                FoundLiveArchiveNodeAgainAlert(self.monitor_name))

    def _monitor_indirect_validator(self) -> None:
        session_validators = self.data_wrapper.get_session_validators(
            self.data_source_indirect.ws_url)
        stakers_json = self.data_wrapper.get_stakers(
            self.data_source_indirect.ws_url, self._node.stash_account_address)
        council_members = self.data_wrapper.get_council_members(
            self.data_source_indirect.ws_url)
        elected_validators = self.data_wrapper.get_current_elected(
            self.data_source_indirect.ws_url)
        new_session_index = self.data_wrapper.get_current_index(
            self.data_source_indirect.ws_url)
        new_number_of_blocks_authored = self.data_wrapper. \
            get_authored_blocks(self.data_source_indirect.ws_url,
                                new_session_index,
                                self.node.stash_account_address)
        disabled_validators = self.data_wrapper.get_disabled_validators(
            self.data_source_indirect.ws_url)

        # Set active
        is_active = self._node.stash_account_address in session_validators
        self._logger.debug('%s active: %s', self._node, is_active)
        self.node.set_active(is_active, self.channels, self.logger)

        # Set auth_index
        if self._node.is_active:
            new_auth_index = session_validators.index(
                self.node.stash_account_address)
            self._logger.debug('%s auth_index: %s', self._node, new_auth_index)
            self._node.set_auth_index(new_auth_index, self.logger)

        # Set disabled
        is_disabled = self.node.auth_index in disabled_validators
        self._logger.debug('%s disabled: %s', self._node, is_disabled)
        self.node.set_disabled(is_disabled, new_session_index, self.channels,
                               self.logger)

        # Set elected
        is_elected = self._node.stash_account_address in elected_validators
        self._logger.debug('%s elected: %s', self._node, is_elected)
        self.node.set_elected(is_elected, self.channels, self.logger)

        # Set bonded_balance
        bonded_balance = parse_int_from_string(str(stakers_json['total']))
        self._logger.debug('%s bonded_balance: %s', self._node, bonded_balance)
        self._node.set_bonded_balance(bonded_balance, self.channels,
                                      self.logger)

        # Set council_member
        is_council_member = self._node.stash_account_address in council_members
        self._logger.debug('%s is council member: %s', self._node,
                           is_council_member)
        self.node.set_council_member(is_council_member, self.channels,
                                     self.logger)

        # Set session index
        self._check_for_new_session(new_session_index)

        # Set number of blocks authored
        self._logger.debug('%s number_of_blocks_authored: %s', self._node,
                           new_number_of_blocks_authored)
        self._node.set_no_of_blocks_authored(self.channels, self.logger,
                                             new_number_of_blocks_authored,
                                             self._session_index)

        if not self._archive_alerts_disabled:
            self._monitor_archive_state()

    def _monitor_indirect_full_node(self) -> None:
        # These are not needed for full nodes, and thus must be given a
        # dummy value since NoneTypes cannot be saved in redis.
        self._session_index = NONE

        # Set bonded balance
        balance = 0
        self._logger.debug('%s balance: %s', self._node, balance)
        self._node.set_bonded_balance(balance, self.channels, self.logger)

        # Set active
        self._logger.debug('%s is active: %s', self._node, False)
        self._node.set_active(False, self.channels, self.logger)

        # Set disabled
        self._logger.debug('%s is disabled: %s', self._node, False)
        self._node.set_disabled(False, self._session_index, self.channels,
                                self.logger)

        # Set elected
        self._logger.debug('%s is elected: %s', self._node, False)
        self._node.set_elected(False, self.channels, self.logger)

        # Set council_member
        self._logger.debug('%s is council member: %s', self._node, False)
        self._node.set_council_member(False, self.channels, self.logger)

    def monitor_indirect(self) -> None:
        if self._node.is_validator:
            self._monitor_indirect_validator()

            # Set API as up
            self.data_wrapper.set_api_as_up(self.monitor_name, self.channels)
        else:
            self._monitor_indirect_full_node()

    def monitor(self) -> None:
        # Monitor part of the node state by querying the node directly
        self.monitor_direct()
        # Monitor part of the node state by querying the node indirectly if
        # indirect monitoring is enabled.
        if not self.indirect_monitoring_disabled:
            self.monitor_indirect()

        # Output status
        self._logger.info('%s status: %s', self._node, self.status())
def setup_nodes(cp: ConfigParser, api_endpoint: str) -> None:
    print('==== Nodes')
    print(
        'To produce alerts, the alerter needs something to monitor! The list '
        'of nodes to be included in the monitoring will now be set up. This '
        'includes validators, sentries, and whether these nodes can be used '
        'as data sources to monitor a node\'s state indirectly. You may '
        'include nodes from multiple substrate chains in any order. PANIC '
        'will group them up automatically. Node names must be unique!\n\n'
        'Note that you will be asked whether a node is an archive node or '
        'not. This is done because for archive monitoring (which includes '
        '(alerting for)/detecting slashing events), the alerter needs '
        'blockchain data from the past. You do not need any archive data '
        'source nodes to run PANIC, but for archive monitoring to be enabled '
        'for a chain you must have at least one for that chain.')

    # Check if list already set up
    if len(cp.sections()) > 0 and \
            not yn_prompt('The list of nodes is already set up. Do you wish to '
                          'clear this list? You will then be asked to set up a '
                          'new list of nodes, if you wish to do so (Y/n)\n'):
        return

    # Clear config and initialise new list
    cp.clear()
    nodes = []

    # Ask if they want to set it up
    if not yn_prompt('Do you wish to set up the list of nodes? (Y/n)\n'):
        return

    # Get node details and append them to the list of nodes
    while True:
        # Check that API is running by retrieving some data which will be used.
        polkadot_api_data_wrapper = PolkadotApiWrapper(DUMMY_LOGGER,
                                                       api_endpoint)
        while True:
            try:
                web_sockets_connected_to_api = polkadot_api_data_wrapper. \
                    get_web_sockets_connected_to_an_api()
                break
            except Exception:
                if not yn_prompt(
                        'Could not connect with the API Server at '
                        '\'{}\'. Please make sure that the API Server '
                        'is running at the provided IP before '
                        'proceeding further. Do you want to try '
                        'again? (Y/n)\n'.format(api_endpoint)):
                    return
        node = get_node(nodes, polkadot_api_data_wrapper,
                        web_sockets_connected_to_api)
        if node is not None:
            nodes.append(node)
            if node.node_is_validator:
                print('Successfully added validator node.')
            else:
                print('Successfully added full node.')

        if not yn_prompt('Do you want to add another node? (Y/n)\n'):
            break

    # Add nodes to config
    for i, node in enumerate(nodes):
        section = 'node_' + str(i)
        cp.add_section(section)
        cp[section]['node_name'] = node.node_name
        cp[section]['chain_name'] = node.chain_name
        cp[section]['node_ws_url'] = node.node_ws_url
        cp[section]['node_is_validator'] = str(node.node_is_validator)
        cp[section]['is_archive_node'] = str(node.is_archive_node)
        cp[section]['monitor_node'] = str(node.monitor_node)
        cp[section]['use_as_data_source'] = str(node.use_as_data_source)
        cp[section]['stash_account_address'] = node.stash_account_address
示例#11
0
class BlockchainMonitor(Monitor):
    def __init__(self,
                 monitor_name: str,
                 blockchain: Blockchain,
                 channels: ChannelSet,
                 logger: logging.Logger,
                 redis: Optional[RedisApi],
                 data_sources: List[Node],
                 polkadot_api_endpoint: str,
                 internal_conf: InternalConfig = InternalConf):
        super().__init__(monitor_name, channels, logger, redis, internal_conf)

        self._blockchain = blockchain
        self.data_sources = data_sources
        self._data_wrapper = PolkadotApiWrapper(logger, polkadot_api_endpoint)

        self.last_data_source_used = None

        self._redis_alive_key_timeout = \
            self._internal_conf.redis_blockchain_monitor_alive_key_timeout

    @property
    def data_wrapper(self) -> PolkadotApiWrapper:
        return self._data_wrapper

    @property
    def blockchain(self) -> Blockchain:
        return self._blockchain

    def save_state(self) -> None:
        # If Redis is enabled save the current time, indicating that the monitor
        # was alive at this time.
        if self.redis_enabled:
            self.logger.debug('Saving %s state', self._monitor_name)

            # Set alive key (to be able to query latest update from Telegram)
            key = Keys.get_blockchain_monitor_alive(self.monitor_name)
            until = timedelta(seconds=self._redis_alive_key_timeout)
            self.redis.set_for(key, str(datetime.now().timestamp()), until)

    @property
    def data_source(self) -> Node:
        nodes_connected_to_an_api = \
            self.data_wrapper.get_web_sockets_connected_to_an_api()
        # Get one of the nodes to use as data source
        for n in self.data_sources:
            if n.ws_url in nodes_connected_to_an_api and not n.is_down:
                self.last_data_source_used = n
                self._data_wrapper.ping_node(n.ws_url)
                return n
        raise NoLiveNodeConnectedWithAnApiServerException()

    def status(self) -> str:
        return self.blockchain.status()

    def _check_for_new_referendums(self, new_referendum_count: int) -> None:

        if self.blockchain.referendum_count is None:
            self.blockchain.set_referendum_count(new_referendum_count,
                                                 self.channels, self.logger)
            return

        while self.blockchain.referendum_count < new_referendum_count:
            referendum_info = self._data_wrapper.get_referendum_info_of(
                self.data_source.ws_url, self.blockchain.referendum_count)
            self.blockchain.set_referendum_count(
                self.blockchain.referendum_count + 1, self.channels,
                self.logger, referendum_info)

    def monitor(self) -> None:
        # Get new data.
        new_referendum_count = self._data_wrapper.get_referendum_count(
            self.data_source.ws_url)
        new_council_prop_count = self._data_wrapper.get_council_proposal_count(
            self.data_source.ws_url)
        new_public_prop_count = self._data_wrapper.get_public_proposal_count(
            self.data_source.ws_url)
        session_validators = self._data_wrapper.get_session_validators(
            self.data_source.ws_url)
        new_validator_set_size = len(session_validators)

        # Check for referendums
        self._logger.debug('%s referendum_count: %s', self.blockchain,
                           new_referendum_count)
        self._check_for_new_referendums(new_referendum_count)

        # Set council prop count
        self._logger.debug('%s council_prop_count: %s', self.blockchain,
                           new_council_prop_count)
        self.blockchain.set_council_prop_count(new_council_prop_count,
                                               self.channels, self.logger)

        # Set public prop count
        self._logger.debug('%s public_prop_count: %s', self.blockchain,
                           new_public_prop_count)
        self.blockchain.set_public_prop_count(new_public_prop_count,
                                              self.channels, self.logger)

        # Set validator set size
        self._logger.debug('%s validator_set_size: %s', self.blockchain,
                           new_validator_set_size)
        self.blockchain.set_validator_set_size(new_validator_set_size,
                                               self.channels, self.logger)

        # Set API as up
        self.data_wrapper.set_api_as_up(self.monitor_name, self.channels)

        self.logger.info('%s status: %s', self._monitor_name, self.status())