Example #1
0
def get_external_ip_from_centralized_source(log: Logger = IP_DETECTION_LOGGER) -> Union[str, None]:
    """Use hardcoded URL to determine the external IP address of this host."""
    ip = _request(url=CENTRALIZED_IP_ORACLE_URL)
    if ip:
        log.info(f'Fetched external IP address ({ip}) from centralized source ({CENTRALIZED_IP_ORACLE_URL}).')
    return ip
Example #2
0
class Porter(Learner):

    BANNER = r"""

 ______
(_____ \           _
 _____) )__   ____| |_  ____  ____
|  ____/ _ \ / ___)  _)/ _  )/ ___)
| |   | |_| | |   | |_( (/ /| |
|_|    \___/|_|    \___)____)_|

the Pipe for nucypher network operations
"""

    APP_NAME = "Porter"

    _SHORT_LEARNING_DELAY = 2
    _LONG_LEARNING_DELAY = 30
    _ROUNDS_WITHOUT_NODES_AFTER_WHICH_TO_SLOW_DOWN = 25

    DEFAULT_EXECUTION_TIMEOUT = 10  # 10s

    DEFAULT_PORT = 9155

    _interface_class = PorterInterface

    class UrsulaInfo(NamedTuple):
        """Simple object that stores relevant Ursula information resulting from sampling."""
        checksum_address: ChecksumAddress
        uri: str
        encrypting_key: PublicKey

    def __init__(self,
                 domain: str = None,
                 registry: BaseContractRegistry = None,
                 controller: bool = True,
                 federated_only: bool = False,
                 node_class: object = Ursula,
                 provider_uri: str = None,
                 *args,
                 **kwargs):
        self.federated_only = federated_only

        if not self.federated_only:
            if not provider_uri:
                raise ValueError(
                    'Provider URI is required for decentralized Porter.')

            if not BlockchainInterfaceFactory.is_interface_initialized(
                    provider_uri=provider_uri):
                BlockchainInterfaceFactory.initialize_interface(
                    provider_uri=provider_uri)

            self.registry = registry or InMemoryContractRegistry.from_latest_publication(
                network=domain)
            self.staking_agent = ContractAgency.get_agent(
                StakingEscrowAgent, registry=self.registry)
        else:
            self.registry = NO_BLOCKCHAIN_CONNECTION.bool_value(False)
            node_class.set_federated_mode(federated_only)

        super().__init__(save_metadata=True,
                         domain=domain,
                         node_class=node_class,
                         *args,
                         **kwargs)

        self.log = Logger(self.__class__.__name__)

        # Controller Interface
        self.interface = self._interface_class(porter=self)
        self.controller = NO_CONTROL_PROTOCOL
        if controller:
            # TODO need to understand this better - only made it analogous to what was done for characters
            self.make_cli_controller()
        self.log.info(self.BANNER)

    def get_ursulas(
        self,
        quantity: int,
        duration_periods: int = None,  # optional for federated mode
        exclude_ursulas: Optional[Sequence[ChecksumAddress]] = None,
        include_ursulas: Optional[Sequence[ChecksumAddress]] = None
    ) -> List[UrsulaInfo]:
        reservoir = self._make_staker_reservoir(quantity, duration_periods,
                                                exclude_ursulas,
                                                include_ursulas)
        value_factory = PrefetchStrategy(reservoir, quantity)

        def get_ursula_info(ursula_address) -> Porter.UrsulaInfo:
            if ursula_address not in self.known_nodes:
                raise ValueError(f"{ursula_address} is not known")

            ursula = self.known_nodes[ursula_address]
            try:
                # verify node is valid
                self.network_middleware.client.verify_and_parse_node_or_host_and_port(
                    node_or_sprout=ursula, host=None, port=None)

                return Porter.UrsulaInfo(
                    checksum_address=ursula_address,
                    uri=f"{ursula.rest_interface.formal_uri}",
                    encrypting_key=ursula.public_keys(DecryptingPower))
            except Exception as e:
                self.log.debug(
                    f"Unable to obtain Ursula information ({ursula_address}): {str(e)}"
                )
                raise

        self.block_until_number_of_known_nodes_is(
            quantity,
            timeout=self.DEFAULT_EXECUTION_TIMEOUT,
            learn_on_this_thread=True,
            eager=True)

        worker_pool = WorkerPool(worker=get_ursula_info,
                                 value_factory=value_factory,
                                 target_successes=quantity,
                                 timeout=self.DEFAULT_EXECUTION_TIMEOUT,
                                 stagger_timeout=1,
                                 threadpool_size=quantity)
        worker_pool.start()
        successes = worker_pool.block_until_target_successes()
        ursulas_info = successes.values()
        return list(ursulas_info)

    def retrieve_cfrags(
        self,
        treasure_map: TreasureMap,
        retrieval_kits: Sequence[RetrievalKit],
        alice_verifying_key: PublicKey,
        bob_encrypting_key: PublicKey,
        bob_verifying_key: PublicKey,
    ) -> List[RetrievalResult]:
        client = RetrievalClient(self)
        return client.retrieve_cfrags(treasure_map, retrieval_kits,
                                      alice_verifying_key, bob_encrypting_key,
                                      bob_verifying_key)

    def _make_staker_reservoir(
            self,
            quantity: int,
            duration_periods: int = None,  # optional for federated mode
            exclude_ursulas: Optional[Sequence[ChecksumAddress]] = None,
            include_ursulas: Optional[Sequence[ChecksumAddress]] = None):
        if self.federated_only:
            sample_size = quantity - (len(include_ursulas)
                                      if include_ursulas else 0)
            if not self.block_until_number_of_known_nodes_is(
                    sample_size,
                    timeout=self.DEFAULT_EXECUTION_TIMEOUT,
                    learn_on_this_thread=True):
                raise ValueError("Unable to learn about sufficient Ursulas")
            return make_federated_staker_reservoir(
                known_nodes=self.known_nodes,
                exclude_addresses=exclude_ursulas,
                include_addresses=include_ursulas)
        else:
            if not duration_periods:
                raise ValueError(
                    "Duration periods must be provided in decentralized mode")
            return make_decentralized_staker_reservoir(
                staking_agent=self.staking_agent,
                duration_periods=duration_periods,
                exclude_addresses=exclude_ursulas,
                include_addresses=include_ursulas)

    def make_cli_controller(self, crash_on_error: bool = False):
        controller = PorterCLIController(app_name=self.APP_NAME,
                                         crash_on_error=crash_on_error,
                                         interface=self.interface)
        self.controller = controller
        return controller

    def make_rpc_controller(self, crash_on_error: bool = False):
        controller = JSONRPCController(app_name=self.APP_NAME,
                                       crash_on_error=crash_on_error,
                                       interface=self.interface)

        self.controller = controller
        return controller

    def make_web_controller(self,
                            crash_on_error: bool = False,
                            htpasswd_filepath: Path = None,
                            cors_allow_origins_list: List[str] = None):
        controller = WebController(
            app_name=self.APP_NAME,
            crash_on_error=crash_on_error,
            interface=self._interface_class(porter=self))
        self.controller = controller

        # Register Flask Decorator
        porter_flask_control = controller.make_control_transport()

        # CORS origins
        if cors_allow_origins_list:
            try:
                from flask_cors import CORS
            except ImportError:
                raise ImportError(
                    'Porter installation is required for to specify CORS origins '
                    '- run "pip install nucypher[porter]" and try again.')
            _ = CORS(app=porter_flask_control, origins=cors_allow_origins_list)

        # Basic Auth
        if htpasswd_filepath:
            try:
                from flask_htpasswd import HtPasswdAuth
            except ImportError:
                raise ImportError(
                    'Porter installation is required for basic authentication '
                    '- run "pip install nucypher[porter]" and try again.')

            porter_flask_control.config['FLASK_HTPASSWD_PATH'] = str(
                htpasswd_filepath.absolute())
            # ensure basic auth required for all endpoints
            porter_flask_control.config['FLASK_AUTH_ALL'] = True
            _ = HtPasswdAuth(app=porter_flask_control)

        #
        # Porter Control HTTP Endpoints
        #
        @porter_flask_control.route('/get_ursulas', methods=['GET'])
        def get_ursulas() -> Response:
            """Porter control endpoint for sampling Ursulas on behalf of Alice."""
            response = controller(method_name='get_ursulas',
                                  control_request=request)
            return response

        @porter_flask_control.route("/revoke", methods=['POST'])
        def revoke():
            """Porter control endpoint for off-chain revocation of a policy on behalf of Alice."""
            response = controller(method_name='revoke',
                                  control_request=request)
            return response

        @porter_flask_control.route("/retrieve_cfrags", methods=['POST'])
        def retrieve_cfrags() -> Response:
            """Porter control endpoint for executing a PRE work order on behalf of Bob."""
            response = controller(method_name='retrieve_cfrags',
                                  control_request=request)
            return response

        return controller
Example #3
0
class BlockchainInterface:
    """
    Interacts with a solidity compiler and a registry in order to instantiate compiled
    ethereum contracts with the given web3 provider backend.
    """

    TIMEOUT = 600  # seconds  # TODO: Correlate with the gas strategy - #2070

    DEFAULT_GAS_STRATEGY = 'fast'
    GAS_STRATEGIES = WEB3_GAS_STRATEGIES

    Web3 = Web3  # TODO: This is name-shadowing the actual Web3. Is this intentional?

    _CONTRACT_FACTORY = VersionedContract

    class InterfaceError(Exception):
        pass

    class NoProvider(InterfaceError):
        pass

    class UnsupportedProvider(InterfaceError):
        pass

    class ConnectionFailed(InterfaceError):
        pass

    class UnknownContract(InterfaceError):
        pass

    REASONS = {
        INSUFFICIENT_ETH: 'insufficient funds for gas * price + value',
    }

    class TransactionFailed(InterfaceError):

        IPC_CODE = -32000

        def __init__(self, message: str, transaction_dict: dict,
                     contract_function: Union[ContractFunction,
                                              ContractConstructor], *args):

            self.base_message = message
            self.name = get_transaction_name(
                contract_function=contract_function)
            self.payload = transaction_dict
            self.contract_function = contract_function
            self.failures = {
                BlockchainInterface.REASONS[INSUFFICIENT_ETH]:
                self.insufficient_eth
            }
            self.message = self.failures.get(self.base_message, self.default)
            super().__init__(self.message, *args)

        @property
        def default(self) -> str:
            sender = self.payload["from"]
            message = f'{self.name} from {sender[:6]}... \n' \
                      f'Sender balance: {prettify_eth_amount(self.get_balance())} \n' \
                      f'Reason: {self.base_message} \n' \
                      f'Transaction: {self.payload}'
            return message

        def get_balance(self):
            blockchain = BlockchainInterfaceFactory.get_interface()
            balance = blockchain.client.get_balance(
                account=self.payload['from'])
            return balance

        @property
        def insufficient_eth(self) -> str:
            try:
                transaction_fee = self.payload['gas'] * self.payload['gasPrice']
            except KeyError:
                return self.default
            else:
                cost = transaction_fee + self.payload.get('value', 0)
                message = f'{self.name} from {self.payload["from"][:8]} - {self.base_message}.' \
                          f'Calculated cost is {prettify_eth_amount(cost)},' \
                          f'but sender only has {prettify_eth_amount(self.get_balance())}.'
            return message

    def __init__(
            self,
            emitter=None,  # TODO # 1754
            poa: bool = None,
            light: bool = False,
            provider_uri: str = NO_BLOCKCHAIN_CONNECTION,
            provider: BaseProvider = NO_BLOCKCHAIN_CONNECTION,
            gas_strategy: Optional[Union[str, Callable]] = None,
            max_gas_price: Optional[int] = None):
        """
        TODO: #1502 - Move to API docs.

         Filesystem          Configuration           Node              Client                  EVM
        ================ ====================== =============== =====================  ===========================

         Solidity Files -- SolidityCompiler -                      --- HTTPProvider ------ ...
                                            |                    |
                                            |                    |
                                            |                    |
                                            - *BlockchainInterface* -- IPCProvider ----- External EVM (geth, parity...)
                                                       |         |
                                                       |         |
                                                 TestProvider ----- EthereumTester -------------
                                                                                                |
                                                                                                |
                                                                                        PyEVM (Development Chain)

         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

         Runtime Files --                 --BlockchainInterface ----> Registry
                        |                |             ^
                        |                |             |
                        |                |             |
         Key Files ------ CharacterConfiguration     Agent                          ... (Contract API)
                        |                |             ^
                        |                |             |
                        |                |             |
                        |                |           Actor                          ...Blockchain-Character API)
                        |                |             ^
                        |                |             |
                        |                |             |
         Config File ---                  --------- Character                       ... (Public API)
                                                       ^
                                                       |
                                                     Human


        The Blockchain is the junction of the solidity compiler, a contract registry, and a collection of
        web3 network providers as a means of interfacing with the ethereum blockchain to execute
        or deploy contract code on the network.


        Compiler and Registry Usage
        -----------------------------

        Contracts are freshly re-compiled if an instance of SolidityCompiler is passed; otherwise,
        The registry will read contract data saved to disk that is be used to retrieve contact address and op-codes.
        Optionally, A registry instance can be passed instead.


        Provider Usage
        ---------------
        https: // github.com / ethereum / eth - tester     # available-backends


        * HTTP Provider - Web3 HTTP provider, typically JSON RPC 2.0 over HTTP
        * Websocket Provider - Web3 WS provider, typically JSON RPC 2.0 over WS, supply endpoint uri and websocket=True
        * IPC Provider - Web3 File based IPC provider transported over standard I/O
        * Custom Provider - A pre-initialized web3.py provider instance to attach to this interface

        """

        self.log = Logger('Blockchain')
        self.poa = poa
        self.provider_uri = provider_uri
        self._provider = provider
        self.w3 = NO_BLOCKCHAIN_CONNECTION
        self.client = NO_BLOCKCHAIN_CONNECTION
        self.is_light = light

        # TODO: Not ready to give users total flexibility. Let's stick for the moment to known values. See #2447
        if gas_strategy not in ('slow', 'medium', 'fast', 'free',
                                None):  # FIXME: What is 'None' doing here?
            raise ValueError(f"'{gas_strategy}' is an invalid gas strategy")
        self.gas_strategy = gas_strategy or self.DEFAULT_GAS_STRATEGY
        self.max_gas_price = max_gas_price

    def __repr__(self):
        r = '{name}({uri})'.format(name=self.__class__.__name__,
                                   uri=self.provider_uri)
        return r

    def get_blocktime(self):
        return self.client.get_blocktime()

    @property
    def is_connected(self) -> bool:
        """
        https://web3py.readthedocs.io/en/stable/__provider.html#examples-using-automated-detection
        """
        if self.client is NO_BLOCKCHAIN_CONNECTION:
            return False
        return self.client.is_connected

    @classmethod
    def get_gas_strategy(cls,
                         gas_strategy: Union[str,
                                             Callable] = None) -> Callable:
        try:
            gas_strategy = cls.GAS_STRATEGIES[gas_strategy]
        except KeyError:
            if gas_strategy:
                if not callable(gas_strategy):
                    raise ValueError(
                        f"{gas_strategy} must be callable to be a valid gas strategy."
                    )
            else:
                gas_strategy = cls.GAS_STRATEGIES[cls.DEFAULT_GAS_STRATEGY]
        return gas_strategy

    def attach_middleware(self):
        chain_id = int(self.client.chain_id)
        if self.poa is None:  # If POA is not set explicitly, try to autodetect from chain id
            self.poa = chain_id in POA_CHAINS

        self.log.debug(
            f'Ethereum chain: {self.client.chain_name} (chain_id={chain_id}, poa={self.poa})'
        )

        # For use with Proof-Of-Authority test-blockchains
        if self.poa is True:
            self.log.debug('Injecting POA middleware at layer 0')
            self.client.inject_middleware(geth_poa_middleware, layer=0)

        self.client.add_middleware(middleware.time_based_cache_middleware)
        # self.client.add_middleware(middleware.latest_block_based_cache_middleware)  # TODO: This line causes failed tests and nonce reuse in tests. See #2348.
        self.client.add_middleware(middleware.simple_cache_middleware)

        self.configure_gas_strategy()

    def configure_gas_strategy(self,
                               gas_strategy: Optional[Callable] = None
                               ) -> None:

        if gas_strategy:
            reported_gas_strategy = f"fixed/{gas_strategy.name}"

        elif isinstance(self.client, InfuraClient):
            gas_strategy = construct_datafeed_median_strategy(
                speed=self.gas_strategy)
            reported_gas_strategy = f"datafeed/{self.gas_strategy}"

        else:
            reported_gas_strategy = f"web3/{self.gas_strategy}"
            gas_strategy = self.get_gas_strategy(self.gas_strategy)

        configuration_message = f"Using gas strategy '{reported_gas_strategy}'"

        if self.max_gas_price:
            __price = Web3.toWei(self.max_gas_price,
                                 'gwei')  # from gwei to wei
            gas_strategy = max_price_gas_strategy_wrapper(
                gas_strategy=gas_strategy, max_gas_price_wei=__price)
            configuration_message += f", with a max price of {self.max_gas_price} gwei."

        self.client.set_gas_strategy(gas_strategy=gas_strategy)

        # TODO: This line must not be called prior to establishing a connection
        #        Move it down to a lower layer, near the client.
        # gwei_gas_price = Web3.fromWei(self.client.gas_price_for_transaction(), 'gwei')

        self.log.info(configuration_message)
        # self.log.debug(f"Gas strategy currently reports a gas price of {gwei_gas_price} gwei.")

    def connect(self):

        provider_uri = self.provider_uri
        self.log.info(f"Using external Web3 Provider '{self.provider_uri}'")

        # Attach Provider
        self._attach_provider(provider=self._provider,
                              provider_uri=provider_uri)
        self.log.info("Connecting to {}".format(self.provider_uri))
        if self._provider is NO_BLOCKCHAIN_CONNECTION:
            raise self.NoProvider(
                "There are no configured blockchain providers")

        # Connect if not connected
        try:
            self.w3 = self.Web3(provider=self._provider)
            self.client = EthereumClient.from_w3(w3=self.w3)
        except requests.ConnectionError:  # RPC
            raise self.ConnectionFailed(
                f'Connection Failed - {str(self.provider_uri)} - is RPC enabled?'
            )
        except FileNotFoundError:  # IPC File Protocol
            raise self.ConnectionFailed(
                f'Connection Failed - {str(self.provider_uri)} - is IPC enabled?'
            )
        else:
            self.attach_middleware()

        return self.is_connected

    @property
    def provider(self) -> BaseProvider:
        return self._provider

    def _attach_provider(self,
                         provider: Optional[BaseProvider] = None,
                         provider_uri: str = None) -> None:
        """
        https://web3py.readthedocs.io/en/latest/providers.html#providers
        """

        if not provider_uri and not provider:
            raise self.NoProvider("No URI or provider instances supplied.")

        if provider_uri and not provider:
            uri_breakdown = urlparse(provider_uri)

            if uri_breakdown.scheme == 'tester':
                providers = {
                    'pyevm': _get_pyevm_test_provider,
                    'mock': _get_mock_test_provider
                }
                provider_scheme = uri_breakdown.netloc

            else:
                providers = {
                    'auto': _get_auto_provider,
                    'ipc': _get_IPC_provider,
                    'file': _get_IPC_provider,
                    'ws': _get_websocket_provider,
                    'wss': _get_websocket_provider,
                    'http': _get_HTTP_provider,
                    'https': _get_HTTP_provider,
                }
                provider_scheme = uri_breakdown.scheme

            # auto-detect for file based ipc
            if not provider_scheme:
                if os.path.exists(provider_uri):
                    # file is available - assume ipc/file scheme
                    provider_scheme = 'file'
                    self.log.info(
                        f"Auto-detected provider scheme as 'file://' for provider {provider_uri}"
                    )

            try:
                self._provider = providers[provider_scheme](provider_uri)
            except KeyError:
                raise self.UnsupportedProvider(
                    f"{provider_uri} is an invalid or unsupported blockchain provider URI"
                )
            else:
                self.provider_uri = provider_uri or NO_BLOCKCHAIN_CONNECTION
        else:
            self._provider = provider

    @classmethod
    def _handle_failed_transaction(cls,
                                   exception: Exception,
                                   transaction_dict: dict,
                                   contract_function: Union[
                                       ContractFunction, ContractConstructor],
                                   logger: Logger = None) -> None:
        """
        Re-raising error handler and context manager for transaction broadcast or
        build failure events at the interface layer. This method is a last line of defense
        against unhandled exceptions caused by transaction failures and must raise an exception.
        # TODO: #1504 - Additional Handling of validation failures (gas limits, invalid fields, etc.)
        """

        response = exception.args[0]

        # Assume this error is formatted as an RPC response
        try:
            code = int(response['code'])
            message = response['message']
        except Exception:
            # TODO: #1504 - Try even harder to determine if this is insufficient funds causing the issue,
            #               This may be best handled at the agent or actor layer for registry and token interactions.
            # Worst case scenario - raise the exception held in context implicitly
            raise exception

        if code != cls.TransactionFailed.IPC_CODE:
            # Only handle client-specific exceptions
            # https://www.jsonrpc.org/specification Section 5.1
            raise exception

        if logger:
            logger.critical(message)  # simple context

        transaction_failed = cls.TransactionFailed(
            message=message,  # rich error (best case)
            contract_function=contract_function,
            transaction_dict=transaction_dict)
        raise transaction_failed from exception

    def __log_transaction(self, transaction_dict: dict,
                          contract_function: ContractFunction):
        """
        Format and log a transaction dict and return the transaction name string.
        This method *must not* mutate the original transaction dict.
        """
        # Do not mutate the original transaction dict
        tx = dict(transaction_dict).copy()

        # Format
        if tx.get('to'):
            tx['to'] = to_checksum_address(contract_function.address)
        try:
            tx['selector'] = contract_function.selector
        except AttributeError:
            pass
        tx['from'] = to_checksum_address(tx['from'])
        tx.update({
            f: prettify_eth_amount(v)
            for f, v in tx.items() if f in ('gasPrice', 'value')
        })
        payload_pprint = ', '.join("{}: {}".format(k, v)
                                   for k, v in tx.items())

        # Log
        transaction_name = get_transaction_name(
            contract_function=contract_function)
        self.log.debug(f"[TX-{transaction_name}] | {payload_pprint}")

    @validate_checksum_address
    def build_payload(
        self,
        sender_address: str,
        payload: dict = None,
        transaction_gas_limit: int = None,
        use_pending_nonce: bool = True,
    ) -> dict:

        nonce = self.client.get_transaction_count(account=sender_address,
                                                  pending=use_pending_nonce)
        base_payload = {
            'chainId': int(self.client.chain_id),
            'nonce': nonce,
            'from': sender_address
        }

        # Aggregate
        if not payload:
            payload = {}
        payload.update(base_payload)
        # Explicit gas override - will skip gas estimation in next operation.
        if transaction_gas_limit:
            payload['gas'] = int(transaction_gas_limit)
        return payload

    @validate_checksum_address
    def build_contract_transaction(
        self,
        contract_function: ContractFunction,
        sender_address: str,
        payload: dict = None,
        transaction_gas_limit: Optional[int] = None,
        gas_estimation_multiplier: Optional[float] = None,
        use_pending_nonce: Optional[bool] = None,
    ) -> dict:

        # Sanity checks for the gas estimation multiplier
        if gas_estimation_multiplier is not None:
            if not 1 <= gas_estimation_multiplier <= 3:  # TODO: Arbitrary upper bound.
                raise ValueError(
                    f"The gas estimation multiplier should be a float between 1 and 3, "
                    f"but we received {gas_estimation_multiplier}.")
            elif transaction_gas_limit is not None:
                raise ValueError(
                    "'transaction_gas_limit' and 'gas_estimation_multiplier' can't be used together."
                )

        payload = self.build_payload(
            sender_address=sender_address,
            payload=payload,
            transaction_gas_limit=transaction_gas_limit,
            use_pending_nonce=use_pending_nonce)
        self.__log_transaction(transaction_dict=payload,
                               contract_function=contract_function)
        try:
            if 'gas' not in payload:
                # As web3 buildTransaction() will estimate gas with block identifier "pending" by default,
                # explicitly estimate gas here with block identifier 'latest' if not otherwise specified
                # as a pending transaction can cause gas estimation to fail, notably in case of worklock refunds.
                payload['gas'] = contract_function.estimateGas(
                    payload, block_identifier='latest')
            transaction_dict = contract_function.buildTransaction(payload)
        except (TestTransactionFailed, ValidationError, ValueError) as error:
            # Note: Geth (1.9.15) raises ValueError in the same condition that pyevm raises ValidationError here.
            # Treat this condition as "Transaction Failed" during gas estimation.
            raise self._handle_failed_transaction(
                exception=error,
                transaction_dict=payload,
                contract_function=contract_function,
                logger=self.log)

        # Overestimate the transaction gas limit according to the gas estimation multiplier, if any
        if gas_estimation_multiplier:
            gas_estimation = transaction_dict['gas']
            overestimation = int(
                math.ceil(gas_estimation * gas_estimation_multiplier))
            self.log.debug(
                f"Gas limit for this TX was increased from {gas_estimation} to {overestimation}, "
                f"using a multiplier of {gas_estimation_multiplier}.")
            transaction_dict['gas'] = overestimation
            # TODO: What if we're going over the block limit? Not likely, but perhaps worth checking (NRN)

        return transaction_dict

    def sign_and_broadcast_transaction(
            self,
            transacting_power: TransactingPower,
            transaction_dict: TransactionDict,
            transaction_name: str = "",
            confirmations: int = 0,
            fire_and_forget: bool = False) -> Union[TxReceipt, HexBytes]:
        """
        Takes a transaction dictionary, signs it with the configured signer, then broadcasts the signed
        transaction using the ethereum provider's eth_sendRawTransaction RPC endpoint.
        Optionally blocks for receipt and confirmation with 'confirmations', and 'fire_and_forget' flags.

        If 'fire and forget' is True this method returns the transaction hash only, without waiting for a receipt -
        otherwise return the transaction receipt.

        """
        #
        # Setup
        #

        # TODO # 1754 - Move this to singleton - I do not approve... nor does Bogdan?
        if GlobalLoggerSettings._json_ipc:
            emitter = JSONRPCStdoutEmitter()
        else:
            emitter = StdoutEmitter()

        #
        # Sign
        #

        # TODO: Show the USD Price:  https://api.coinmarketcap.com/v1/ticker/ethereum/
        price = transaction_dict['gasPrice']
        price_gwei = Web3.fromWei(price, 'gwei')
        cost_wei = price * transaction_dict['gas']
        cost = Web3.fromWei(cost_wei, 'ether')

        if transacting_power.is_device:
            emitter.message(
                f'Confirm transaction {transaction_name} on hardware wallet... '
                f'({cost} ETH @ {price_gwei} gwei)',
                color='yellow')
        signed_raw_transaction = transacting_power.sign_transaction(
            transaction_dict)

        #
        # Broadcast
        #
        emitter.message(
            f'Broadcasting {transaction_name} Transaction ({cost} ETH @ {price_gwei} gwei)',
            color='yellow')
        try:
            txhash = self.client.send_raw_transaction(
                signed_raw_transaction)  # <--- BROADCAST
            emitter.message(f'TXHASH {txhash.hex()}', color='yellow')
        except (TestTransactionFailed, ValueError):
            raise  # TODO: Unify with Transaction failed handling -- Entry point for _handle_failed_transaction
        else:
            if fire_and_forget:
                return txhash

        #
        # Receipt
        #

        try:  # TODO: Handle block confirmation exceptions
            waiting_for = 'receipt'
            if confirmations:
                waiting_for = f'{confirmations} confirmations'
            emitter.message(
                f'Waiting {self.TIMEOUT} seconds for {waiting_for}',
                color='yellow')
            receipt = self.client.wait_for_receipt(txhash,
                                                   timeout=self.TIMEOUT,
                                                   confirmations=confirmations)
        except TimeExhausted:
            # TODO: #1504 - Handle transaction timeout
            raise
        else:
            self.log.debug(
                f"[RECEIPT-{transaction_name}] | txhash: {receipt['transactionHash'].hex()}"
            )

        #
        # Confirmations
        #

        # Primary check
        transaction_status = receipt.get('status', UNKNOWN_TX_STATUS)
        if transaction_status == 0:
            failure = f"Transaction transmitted, but receipt returned status code 0. " \
                      f"Full receipt: \n {pprint.pformat(receipt, indent=2)}"
            raise self.InterfaceError(failure)

        if transaction_status is UNKNOWN_TX_STATUS:
            self.log.info(
                f"Unknown transaction status for {txhash} (receipt did not contain a status field)"
            )

            # Secondary check
            tx = self.client.get_transaction(txhash)
            if tx["gas"] == receipt["gasUsed"]:
                raise self.InterfaceError(
                    f"Transaction consumed 100% of transaction gas."
                    f"Full receipt: \n {pprint.pformat(receipt, indent=2)}")

        return receipt

    @validate_checksum_address
    def send_transaction(
        self,
        contract_function: Union[ContractFunction, ContractConstructor],
        transacting_power: TransactingPower,
        payload: dict = None,
        transaction_gas_limit: Optional[int] = None,
        gas_estimation_multiplier: Optional[float] = None,
        confirmations: int = 0,
        fire_and_forget: bool = False,  # do not wait for receipt.  See #2385
        replace: bool = False,
    ) -> Union[TxReceipt, HexBytes]:

        if fire_and_forget:
            if confirmations > 0:
                raise ValueError(
                    "Transaction Prevented: "
                    "Cannot use 'confirmations' and 'fire_and_forget' options together."
                )

            use_pending_nonce = False  # TODO: #2385
        else:
            use_pending_nonce = replace  # TODO: #2385

        transaction = self.build_contract_transaction(
            contract_function=contract_function,
            sender_address=transacting_power.account,
            payload=payload,
            transaction_gas_limit=transaction_gas_limit,
            gas_estimation_multiplier=gas_estimation_multiplier,
            use_pending_nonce=use_pending_nonce)

        # Get transaction name
        try:
            transaction_name = contract_function.fn_name.upper()
        except AttributeError:
            transaction_name = 'DEPLOY' if isinstance(
                contract_function, ContractConstructor) else 'UNKNOWN'

        txhash_or_receipt = self.sign_and_broadcast_transaction(
            transacting_power=transacting_power,
            transaction_dict=transaction,
            transaction_name=transaction_name,
            confirmations=confirmations,
            fire_and_forget=fire_and_forget)
        return txhash_or_receipt

    def get_contract_by_name(
            self,
            registry: BaseContractRegistry,
            contract_name: str,
            contract_version: str = None,
            enrollment_version: Union[int, str] = None,
            proxy_name: str = None,
            use_proxy_address: bool = True) -> VersionedContract:
        """
        Instantiate a deployed contract from registry data,
        and assimilate it with its proxy if it is upgradeable.
        """
        target_contract_records = registry.search(
            contract_name=contract_name, contract_version=contract_version)

        if not target_contract_records:
            raise self.UnknownContract(
                f"No such contract records with name {contract_name}:{contract_version}."
            )

        if proxy_name:

            # Lookup proxies; Search for a published proxy that targets this contract record
            proxy_records = registry.search(contract_name=proxy_name)

            results = list()
            for proxy_name, proxy_version, proxy_address, proxy_abi in proxy_records:
                proxy_contract = self.client.w3.eth.contract(
                    abi=proxy_abi,
                    address=proxy_address,
                    version=proxy_version,
                    ContractFactoryClass=self._CONTRACT_FACTORY)

                # Read this dispatcher's target address from the blockchain
                proxy_live_target_address = proxy_contract.functions.target(
                ).call()
                for target_name, target_version, target_address, target_abi in target_contract_records:

                    if target_address == proxy_live_target_address:
                        if use_proxy_address:
                            triplet = (proxy_address, target_version,
                                       target_abi)
                        else:
                            triplet = (target_address, target_version,
                                       target_abi)
                    else:
                        continue

                    results.append(triplet)

            if len(results) > 1:
                address, _version, _abi = results[0]
                message = "Multiple {} deployments are targeting {}".format(
                    proxy_name, address)
                raise self.InterfaceError(message.format(contract_name))

            else:
                try:
                    selected_address, selected_version, selected_abi = results[
                        0]
                except IndexError:
                    raise self.UnknownContract(
                        f"There are no Dispatcher records targeting '{contract_name}':{contract_version}"
                    )

        else:
            # TODO: use_proxy_address doesnt' work in this case. Should we raise if used?

            # NOTE: 0 must be allowed as a valid version number
            if len(target_contract_records) != 1:
                if enrollment_version is None:
                    m = f"{len(target_contract_records)} records enrolled " \
                        f"for contract {contract_name}:{contract_version} " \
                        f"and no version index was supplied."
                    raise self.InterfaceError(m)
                enrollment_version = self.__get_enrollment_version_index(
                    name=contract_name,
                    contract_version=contract_version,
                    version_index=enrollment_version,
                    enrollments=len(target_contract_records))

            else:
                enrollment_version = -1  # default

            _contract_name, selected_version, selected_address, selected_abi = target_contract_records[
                enrollment_version]

        # Create the contract from selected sources
        unified_contract = self.client.w3.eth.contract(
            abi=selected_abi,
            address=selected_address,
            version=selected_version,
            ContractFactoryClass=self._CONTRACT_FACTORY)

        return unified_contract

    @staticmethod
    def __get_enrollment_version_index(version_index: Union[int, str],
                                       enrollments: int, name: str,
                                       contract_version: str):
        version_names = {'latest': -1, 'earliest': 0}
        try:
            version = version_names[version_index]
        except KeyError:
            try:
                version = int(version_index)
            except ValueError:
                what_is_this = version_index
                raise ValueError(
                    f"'{what_is_this}' is not a valid enrollment version number"
                )
            else:
                if version > enrollments - 1:
                    message = f"Version index '{version}' is larger than the number of enrollments " \
                              f"for {name}:{contract_version}."
                    raise ValueError(message)
        return version
Example #4
0
    def __init__(self,
                 emitter = None,  # TODO # 1754
                 poa: bool = None,
                 light: bool = False,
                 provider_process=NO_PROVIDER_PROCESS,
                 provider_uri: str = NO_BLOCKCHAIN_CONNECTION,
                 provider: Web3Providers = NO_BLOCKCHAIN_CONNECTION,
                 gas_strategy: Union[str, Callable] = DEFAULT_GAS_STRATEGY):

        """
        TODO: #1502 - Move to API docs.

         Filesystem          Configuration           Node              Client                  EVM
        ================ ====================== =============== =====================  ===========================

         Solidity Files -- SolidityCompiler -                      --- HTTPProvider ------ ...
                                            |                    |
                                            |                    |
                                            |                    |
                                            - *BlockchainInterface* -- IPCProvider ----- External EVM (geth, parity...)
                                                       |         |
                                                       |         |
                                                 TestProvider ----- EthereumTester -------------
                                                                                                |
                                                                                                |
                                                                                        PyEVM (Development Chain)

         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

         Runtime Files --                 --BlockchainInterface ----> Registry
                        |                |             ^
                        |                |             |
                        |                |             |
         Key Files ------ CharacterConfiguration     Agent                          ... (Contract API)
                        |                |             ^
                        |                |             |
                        |                |             |
                        |                |           Actor                          ...Blockchain-Character API)
                        |                |             ^
                        |                |             |
                        |                |             |
         Config File ---                  --------- Character                       ... (Public API)
                                                       ^
                                                       |
                                                     Human


        The Blockchain is the junction of the solidity compiler, a contract registry, and a collection of
        web3 network providers as a means of interfacing with the ethereum blockchain to execute
        or deploy contract code on the network.


        Compiler and Registry Usage
        -----------------------------

        Contracts are freshly re-compiled if an instance of SolidityCompiler is passed; otherwise,
        The registry will read contract data saved to disk that is be used to retrieve contact address and op-codes.
        Optionally, A registry instance can be passed instead.


        Provider Usage
        ---------------
        https: // github.com / ethereum / eth - tester     # available-backends


        * HTTP Provider - Web3 HTTP provider, typically JSON RPC 2.0 over HTTP
        * Websocket Provider - Web3 WS provider, typically JSON RPC 2.0 over WS, supply endpoint uri and websocket=True
        * IPC Provider - Web3 File based IPC provider transported over standard I/O
        * Custom Provider - A pre-initialized web3.py provider instance to attach to this interface

        """

        self.log = Logger('Blockchain')
        self.poa = poa
        self.provider_uri = provider_uri
        self._provider = provider
        self._provider_process = provider_process
        self.w3 = NO_BLOCKCHAIN_CONNECTION
        self.client = NO_BLOCKCHAIN_CONNECTION         # type: EthereumClient
        self.transacting_power = READ_ONLY_INTERFACE
        self.is_light = light
        self.gas_strategy = self.get_gas_strategy(gas_strategy)
Example #5
0
class EthereumClient:
    is_local = False

    GETH = 'Geth'
    PARITY = 'Parity'
    ALT_PARITY = 'Parity-Ethereum'
    GANACHE = 'EthereumJS TestRPC'

    ETHEREUM_TESTER = 'EthereumTester'  # (PyEVM)
    CLEF = 'Clef'  # Signer-only

    BLOCK_CONFIRMATIONS_POLLING_TIME = 3  # seconds
    TRANSACTION_POLLING_TIME = 0.5  # seconds
    COOLING_TIME = 5  # seconds
    STALECHECK_ALLOWABLE_DELAY = 30  # seconds

    class ConnectionNotEstablished(RuntimeError):
        pass

    class SyncTimeout(RuntimeError):
        pass

    class UnknownAccount(ValueError):
        pass

    class TransactionBroadcastError(RuntimeError):
        pass

    class NotEnoughConfirmations(TransactionBroadcastError):
        pass

    class TransactionTimeout(TransactionBroadcastError):
        pass

    class ChainReorganizationDetected(TransactionBroadcastError):
        """Raised when block confirmations logic detects that a TX was lost due to a chain reorganization"""

        error_message = ("Chain re-organization detected: Transaction {transaction_hash} was reported to be in "
                         "block {block_hash}, but it's not there anymore")

        def __init__(self, receipt):
            self.receipt = receipt
            self.message = self.error_message.format(transaction_hash=Web3.toHex(receipt['transactionHash']),
                                                     block_hash=Web3.toHex(receipt['blockHash']))
            super().__init__(self.message)

    def __init__(self,
                 w3,
                 node_technology: str,
                 version: str,
                 platform: str,
                 backend: str):

        self.w3 = w3
        self.node_technology = node_technology
        self.node_version = version
        self.platform = platform
        self.backend = backend
        self.log = Logger(self.__class__.__name__)

        self._add_default_middleware()

    def _add_default_middleware(self):
        # default retry functionality
        self.log.debug('Adding RPC retry middleware to client')
        self.add_middleware(RetryRequestMiddleware)

    @classmethod
    def _get_variant(cls, w3):
        return cls

    @classmethod
    def from_w3(cls, w3: Web3) -> 'EthereumClient':
        """

        Client version strings:

        Geth    -> 'Geth/v1.4.11-stable-fed692f6/darwin/go1.7'
        Parity  -> 'Parity-Ethereum/v2.5.1-beta-e0141f8-20190510/x86_64-linux-gnu/rustc1.34.1'
        Ganache -> 'EthereumJS TestRPC/v2.1.5/ethereum-js'
        PyEVM   -> 'EthereumTester/0.1.0b39/linux/python3.6.7'
        """
        clients = {

            # Geth
            cls.GETH: GethClient,

            # Parity
            cls.PARITY: ParityClient,
            cls.ALT_PARITY: ParityClient,

            # Test Clients
            cls.GANACHE: GanacheClient,
            cls.ETHEREUM_TESTER: EthereumTesterClient,
        }

        try:
            client_data = w3.clientVersion.split('/')
            node_technology = client_data[0]
            ClientSubclass = clients[node_technology]

        except (ValueError, IndexError):
            raise ValueError(f"Invalid client version string. Got '{w3.clientVersion}'")

        except KeyError:
            raise NotImplementedError(f'{w3.clientVersion} is not a supported ethereum client')

        client_kwargs = {
            'node_technology': node_technology,
            'version': client_data[1],
            'backend': client_data[-1],
            'platform': client_data[2] if len(client_data) == 4 else None  # Platform is optional
        }

        instance = ClientSubclass._get_variant(w3)(w3, **client_kwargs)
        return instance

    @property
    def peers(self):
        raise NotImplementedError

    @property
    def chain_name(self) -> str:
        chain_inventory = LOCAL_CHAINS if self.is_local else PUBLIC_CHAINS
        name = chain_inventory.get(self.chain_id, UNKNOWN_DEVELOPMENT_CHAIN_ID)
        return name

    def lock_account(self, account) -> bool:
        if self.is_local:
            return True
        return NotImplemented

    def unlock_account(self, account, password, duration=None) -> bool:
        if self.is_local:
            return True
        return NotImplemented

    @property
    def is_connected(self):
        return self.w3.isConnected()

    @property
    def etherbase(self) -> str:
        return self.w3.eth.accounts[0]

    @property
    def accounts(self):
        return self.w3.eth.accounts

    def get_balance(self, account):
        return self.w3.eth.getBalance(account)

    def inject_middleware(self, middleware, **kwargs):
        self.w3.middleware_onion.inject(middleware, **kwargs)

    def add_middleware(self, middleware):
        self.w3.middleware_onion.add(middleware)

    def set_gas_strategy(self, gas_strategy):
        self.w3.eth.setGasPriceStrategy(gas_strategy)

    @property
    def chain_id(self) -> int:
        try:
            # from hex-str
            return int(self.w3.eth.chainId, 16)
        except TypeError:
            # from str
            return int(self.w3.eth.chainId)

    @property
    def net_version(self) -> int:
        return int(self.w3.net.version)

    def get_contract(self, **kwargs) -> Contract:
        return self.w3.eth.contract(**kwargs)

    @property
    def gas_price(self) -> Wei:
        """
        Returns client's gas price. Underneath, it uses the eth_gasPrice JSON-RPC method
        """
        return self.w3.eth.gasPrice

    def gas_price_for_transaction(self, transaction=None) -> Wei:
        """
        Obtains a gas price via the current gas strategy, if any; otherwise, it resorts to the client's gas price.
        This method mirrors the behavior of web3._utils.transactions when building transactions.
        """
        return self.w3.eth.generateGasPrice(transaction) or self.gas_price

    @property
    def block_number(self) -> BlockNumber:
        return self.w3.eth.blockNumber

    @property
    def coinbase(self) -> ChecksumAddress:
        return self.w3.eth.coinbase

    def wait_for_receipt(self,
                         transaction_hash: str,
                         timeout: float,
                         confirmations: int = 0) -> TxReceipt:
        receipt: TxReceipt = None
        if confirmations:
            # If we're waiting for confirmations, we may as well let pass some time initially to make everything easier
            time.sleep(self.COOLING_TIME)

            # We'll keep trying to get receipts until there are enough confirmations or the timeout happens
            with Timeout(seconds=timeout, exception=self.TransactionTimeout) as timeout_context:
                while not receipt:
                    try:
                        receipt = self.block_until_enough_confirmations(transaction_hash=transaction_hash,
                                                                        timeout=timeout,
                                                                        confirmations=confirmations)
                    except (self.ChainReorganizationDetected, self.NotEnoughConfirmations, TimeExhausted):
                        timeout_context.sleep(self.BLOCK_CONFIRMATIONS_POLLING_TIME)
                        continue

        else:
            # If not asking for confirmations, just use web3 and assume the returned receipt is final
            try:
                receipt = self.w3.eth.waitForTransactionReceipt(transaction_hash=transaction_hash,
                                                                timeout=timeout,
                                                                poll_latency=self.TRANSACTION_POLLING_TIME)
            except TimeExhausted:
                raise  # TODO: #1504 - Handle transaction timeout

        return receipt

    def block_until_enough_confirmations(self, transaction_hash: str, timeout: float, confirmations: int) -> dict:

        receipt: TxReceipt = self.w3.eth.waitForTransactionReceipt(transaction_hash=transaction_hash,
                                                                   timeout=timeout,
                                                                   poll_latency=self.TRANSACTION_POLLING_TIME)

        preliminary_block_hash = Web3.toHex(receipt['blockHash'])
        tx_block_number = Web3.toInt(receipt['blockNumber'])
        self.log.info(f"Transaction {Web3.toHex(transaction_hash)} is preliminarily included in "
                      f"block {preliminary_block_hash}")

        confirmations_timeout = self._calculate_confirmations_timeout(confirmations)
        confirmations_so_far = 0
        with Timeout(seconds=confirmations_timeout, exception=self.NotEnoughConfirmations) as timeout_context:
            while confirmations_so_far < confirmations:
                timeout_context.sleep(self.BLOCK_CONFIRMATIONS_POLLING_TIME)
                self.check_transaction_is_on_chain(receipt=receipt)
                confirmations_so_far = self.block_number - tx_block_number
                self.log.info(f"We have {confirmations_so_far} confirmations. "
                              f"Waiting for {confirmations - confirmations_so_far} more.")
            return receipt

    @staticmethod
    def _calculate_confirmations_timeout(confirmations):
        confirmations_timeout = 3 * AVERAGE_BLOCK_TIME_IN_SECONDS * confirmations
        return confirmations_timeout

    def check_transaction_is_on_chain(self, receipt: TxReceipt) -> bool:
        transaction_hash = Web3.toHex(receipt['transactionHash'])
        try:
            new_receipt = self.w3.eth.getTransactionReceipt(transaction_hash)
        except TransactionNotFound:
            reorg_detected = True
        else:
            reorg_detected = receipt['blockHash'] != new_receipt['blockHash']

        if reorg_detected:
            exception = self.ChainReorganizationDetected(receipt=receipt)
            self.log.info(exception.message)
            raise exception
            # TODO: Consider adding an optional param in this exception to include extra info (e.g. new block)
        return True

    def sign_transaction(self, transaction_dict: dict) -> bytes:
        raise NotImplementedError

    def get_transaction(self, transaction_hash) -> dict:
        return self.w3.eth.getTransaction(transaction_hash)

    def get_transaction_receipt(self, transaction_hash) -> Union[dict, None]:
        return self.w3.eth.getTransactionReceipt(transaction_hash)

    def get_transaction_count(self, account: str, pending: bool) -> int:
        block_identifier = 'pending' if pending else 'latest'
        return self.w3.eth.getTransactionCount(account, block_identifier)

    def send_transaction(self, transaction_dict: dict) -> str:
        return self.w3.eth.sendTransaction(transaction_dict)

    def send_raw_transaction(self, transaction_bytes: bytes) -> str:
        return self.w3.eth.sendRawTransaction(transaction_bytes)

    def sign_message(self, account: str, message: bytes) -> str:
        """
        Calls the appropriate signing function for the specified account on the
        backend. If the backend is based on eth-tester, then it uses the
        eth-tester signing interface to do so.
        """
        return self.w3.eth.sign(account, data=message)

    def get_blocktime(self):
        highest_block = self.w3.eth.getBlock('latest')
        now = highest_block['timestamp']
        return now

    def _has_latest_block(self) -> bool:
        # TODO: Investigate using `web3.middleware.make_stalecheck_middleware` #2060
        # check that our local chain data is up to date
        return (time.time() - self.get_blocktime()) < self.STALECHECK_ALLOWABLE_DELAY

    def parse_transaction_data(self, transaction):
        return transaction.input
Example #6
0
class CharacterConfiguration(BaseConfiguration):
    """
    'Sideways Engagement' of Character classes; a reflection of input parameters.
    """

    VERSION = 2  # bump when static payload scheme changes

    CHARACTER_CLASS = NotImplemented
    DEFAULT_CONTROLLER_PORT = NotImplemented
    DEFAULT_DOMAIN = NetworksInventory.DEFAULT
    DEFAULT_NETWORK_MIDDLEWARE = RestMiddleware
    TEMP_CONFIGURATION_DIR_PREFIX = 'tmp-nucypher'

    # When we begin to support other threshold schemes, this will be one of the concepts that makes us want a factory.  #571
    known_node_class = Ursula

    # Gas
    DEFAULT_GAS_STRATEGY = 'fast'

    _CONFIG_FIELDS = ('config_root', 'poa', 'light', 'provider_uri',
                      'registry_filepath', 'gas_strategy', 'signer_uri')

    def __init__(
            self,

            # Base
            emitter=None,
            config_root: str = None,
            filepath: str = None,

            # Mode
            dev_mode: bool = False,
            federated_only: bool = False,

            # Identity
            checksum_address: str = None,
            crypto_power: CryptoPower = None,

            # Keyring
            keyring: NucypherKeyring = None,
            keyring_root: str = None,

            # Learner
            learn_on_same_thread: bool = False,
            abort_on_learning_error: bool = False,
            start_learning_now: bool = True,

            # Network
            controller_port: int = None,
            domain: str = DEFAULT_DOMAIN,
            interface_signature: Signature = None,
            network_middleware: RestMiddleware = None,
            lonely: bool = False,

            # Node Storage
            known_nodes: set = None,
            node_storage: NodeStorage = None,
            reload_metadata: bool = True,
            save_metadata: bool = True,

            # Blockchain
            poa: bool = None,
            light: bool = False,
            sync: bool = False,
            provider_uri: str = None,
            provider_process=None,
            gas_strategy: Union[Callable, str] = DEFAULT_GAS_STRATEGY,
            signer_uri: str = None,

            # Registry
            registry: BaseContractRegistry = None,
            registry_filepath: str = None,

            # Deployed Workers
            worker_data: dict = None):

        self.log = Logger(self.__class__.__name__)
        UNINITIALIZED_CONFIGURATION.bool_value(False)

        # Identity
        # NOTE: NodeConfigurations can only be used with Self-Characters
        self.is_me = True
        self.checksum_address = checksum_address

        # Keyring
        self.crypto_power = crypto_power
        self.keyring = keyring or NO_KEYRING_ATTACHED
        self.keyring_root = keyring_root or UNINITIALIZED_CONFIGURATION

        # Contract Registry
        if registry and registry_filepath:
            if registry.filepath != registry_filepath:
                error = f"Inconsistent registry filepaths for '{registry.filepath}' and '{registry_filepath}'."
                raise ValueError(error)
            else:
                self.log.warn(
                    f"Registry and registry filepath were both passed.")
        self.registry = registry or NO_BLOCKCHAIN_CONNECTION.bool_value(False)
        self.registry_filepath = registry_filepath or UNINITIALIZED_CONFIGURATION

        # Blockchain
        self.poa = poa
        self.is_light = light
        self.provider_uri = provider_uri or NO_BLOCKCHAIN_CONNECTION
        self.provider_process = provider_process or NO_BLOCKCHAIN_CONNECTION
        self.signer_uri = signer_uri or None

        # Learner
        self.federated_only = federated_only
        self.domain = domain
        self.learn_on_same_thread = learn_on_same_thread
        self.abort_on_learning_error = abort_on_learning_error
        self.start_learning_now = start_learning_now
        self.save_metadata = save_metadata
        self.reload_metadata = reload_metadata
        self.known_nodes = known_nodes or set()  # handpicked
        self.lonely = lonely

        # Configuration
        self.__dev_mode = dev_mode
        self.config_file_location = filepath or UNINITIALIZED_CONFIGURATION
        self.config_root = UNINITIALIZED_CONFIGURATION

        # Deployed Workers
        self.worker_data = worker_data

        #
        # Federated vs. Blockchain arguments consistency
        #

        #
        # Federated
        #

        if self.federated_only:
            # Check for incompatible values
            blockchain_args = {
                'filepath': registry_filepath,
                'poa': poa,
                'provider_process': provider_process,
                'provider_uri': provider_uri,
                'gas_strategy': gas_strategy
            }
            if any(blockchain_args.values()):
                bad_args = ", ".join(f"{arg}={val}"
                                     for arg, val in blockchain_args.items()
                                     if val)
                self.log.warn(
                    f"Arguments {bad_args} are incompatible with federated_only. "
                    f"Overridden with a sane default.")

                # Clear decentralized attributes to ensure consistency with a
                # federated configuration.
                self.poa = False
                self.is_light = False
                self.provider_uri = None
                self.provider_process = None
                self.registry_filepath = None
                self.gas_strategy = None

        #
        # Decentralized
        #

        else:
            self.gas_strategy = gas_strategy
            is_initialized = BlockchainInterfaceFactory.is_interface_initialized(
                provider_uri=self.provider_uri)
            if not is_initialized and provider_uri:
                BlockchainInterfaceFactory.initialize_interface(
                    provider_uri=self.provider_uri,
                    poa=self.poa,
                    light=self.is_light,
                    provider_process=self.provider_process,
                    sync=sync,
                    emitter=emitter,
                    gas_strategy=gas_strategy)
            else:
                self.log.warn(
                    f"Using existing blockchain interface connection ({self.provider_uri})."
                )

            if not self.registry:
                # TODO: These two code blocks are untested.
                if not self.registry_filepath:  # TODO: Registry URI  (goerli://speedynet.json) :-)
                    self.log.info(f"Fetching latest registry from source.")
                    self.registry = InMemoryContractRegistry.from_latest_publication(
                        network=self.domain)
                else:
                    self.registry = LocalContractRegistry(
                        filepath=self.registry_filepath)
                    self.log.info(f"Using local registry ({self.registry}).")

        if dev_mode:
            self.__temp_dir = UNINITIALIZED_CONFIGURATION
            self.__setup_node_storage()
            self.initialize(password=DEVELOPMENT_CONFIGURATION)
        else:
            self.__temp_dir = LIVE_CONFIGURATION
            self.config_root = config_root or self.DEFAULT_CONFIG_ROOT
            self._cache_runtime_filepaths()
            self.__setup_node_storage(node_storage=node_storage)

        # Network
        self.controller_port = controller_port or self.DEFAULT_CONTROLLER_PORT
        self.network_middleware = network_middleware or self.DEFAULT_NETWORK_MIDDLEWARE(
            registry=self.registry)
        self.interface_signature = interface_signature

        super().__init__(filepath=self.config_file_location,
                         config_root=self.config_root)

    def __call__(self, **character_kwargs):
        return self.produce(**character_kwargs)

    @classmethod
    def checksum_address_from_filepath(cls, filepath: str) -> str:

        pattern = re.compile(
            r'''
                             (^\w+)-
                             (0x{1}         # Then, 0x the start of the string, exactly once
                             [0-9a-fA-F]{40}) # Followed by exactly 40 hex chars
                             ''', re.VERBOSE)

        filename = os.path.basename(filepath)
        match = pattern.match(filename)

        if match:
            character_name, checksum_address = match.groups()

        else:
            # Extract from default by "peeking" inside the configuration file.
            default_name = cls.generate_filename()
            if filename == default_name:
                checksum_address = cls.peek(filepath=filepath,
                                            field='checksum_address')

                ###########
                # TODO: Cleanup and deprecate worker_address in config files, leaving only checksum_address
                from nucypher.config.characters import UrsulaConfiguration
                if isinstance(cls, UrsulaConfiguration):
                    federated = bool(
                        cls.peek(filepath=filepath, field='federated_only'))
                    if not federated:
                        checksum_address = cls.peek(filepath=cls.filepath,
                                                    field='worker_address')
                ###########

            else:
                raise ValueError(
                    f"Cannot extract checksum from filepath '{filepath}'")

        if not is_checksum_address(checksum_address):
            raise RuntimeError(
                f"Invalid checksum address detected in configuration file at '{filepath}'."
            )
        return checksum_address

    def update(self, **kwargs) -> None:
        """
        A facility for updating existing attributes on existing configuration instances.

        Warning: This method allows mutation and may result in an inconsistent configuration.
        """
        return super().update(modifier=self.checksum_address,
                              filepath=self.config_file_location,
                              **kwargs)

    @classmethod
    def generate(cls, password: str, *args, **kwargs):
        """Shortcut: Hook-up a new initial installation and write configuration file to the disk"""
        node_config = cls(dev_mode=False, *args, **kwargs)
        node_config.initialize(password=password)
        node_config.to_configuration_file()
        return node_config

    def cleanup(self) -> None:
        if self.__dev_mode:
            self.__temp_dir.cleanup()

    @property
    def dev_mode(self) -> bool:
        return self.__dev_mode

    def __setup_node_storage(self, node_storage=None) -> None:
        if self.dev_mode:
            node_storage = ForgetfulNodeStorage(
                registry=self.registry, federated_only=self.federated_only)
        elif not node_storage:
            node_storage = LocalFileBasedNodeStorage(
                registry=self.registry,
                config_root=self.config_root,
                federated_only=self.federated_only)
        self.node_storage = node_storage

    def forget_nodes(self) -> None:
        self.node_storage.clear()
        message = "Removed all stored node node metadata and certificates"
        self.log.debug(message)

    def destroy(self) -> None:
        """Parse a node configuration and remove all associated files from the filesystem"""
        self.attach_keyring()
        self.keyring.destroy()
        os.remove(self.config_file_location)

    def generate_parameters(self, **overrides) -> dict:
        """
        Warning: This method allows mutation and may result in an inconsistent configuration.
        """
        merged_parameters = {
            **self.static_payload(),
            **self.dynamic_payload,
            **overrides
        }
        character_init_params = filter(
            lambda t: t[0] not in self._CONFIG_FIELDS,
            merged_parameters.items())
        return dict(character_init_params)

    def produce(self, **overrides) -> CHARACTER_CLASS:
        """Initialize a new character instance and return it."""
        merged_parameters = self.generate_parameters(**overrides)
        character = self.CHARACTER_CLASS(**merged_parameters)
        return character

    @classmethod
    def assemble(cls, filepath: str = None, **overrides) -> dict:
        """
        Warning: This method allows mutation and may result in an inconsistent configuration.
        """
        payload = cls._read_configuration_file(filepath=filepath)
        node_storage = cls.load_node_storage(
            storage_payload=payload['node_storage'],
            federated_only=payload['federated_only'])
        domain = payload['domain']

        # Assemble
        payload.update(dict(node_storage=node_storage, domain=domain))
        # Filter out None values from **overrides to detect, well, overrides...
        # Acts as a shim for optional CLI flags.
        overrides = {k: v for k, v in overrides.items() if v is not None}
        payload = {**payload, **overrides}
        return payload

    @classmethod
    def from_configuration_file(
            cls,
            filepath: str = None,
            provider_process=None,
            **overrides  # < ---- Inlet for CLI Flags
    ) -> 'CharacterConfiguration':
        """Initialize a CharacterConfiguration from a JSON file."""
        filepath = filepath or cls.default_filepath()
        assembled_params = cls.assemble(filepath=filepath, **overrides)
        node_configuration = cls(filepath=filepath,
                                 provider_process=provider_process,
                                 **assembled_params)
        return node_configuration

    def validate(self) -> bool:

        # Top-level
        if not os.path.exists(self.config_root):
            raise self.ConfigurationError(
                f'No configuration directory found at {self.config_root}.')

        # Sub-paths
        filepaths = self.runtime_filepaths
        for field, path in filepaths.items():
            if path and not os.path.exists(path):
                message = 'Missing configuration file or directory: {}.'
                if 'registry' in path:
                    message += ' Did you mean to pass --federated-only?'
                raise CharacterConfiguration.InvalidConfiguration(
                    message.format(path))
        return True

    def static_payload(self) -> dict:
        """Exported static configuration values for initializing Ursula"""

        payload = dict(

            # Identity
            federated_only=self.federated_only,
            checksum_address=self.checksum_address,
            keyring_root=self.keyring_root,

            # Behavior
            domain=self.domain,
            learn_on_same_thread=self.learn_on_same_thread,
            abort_on_learning_error=self.abort_on_learning_error,
            start_learning_now=self.start_learning_now,
            save_metadata=self.save_metadata,
            node_storage=self.node_storage.payload(),
            lonely=self.lonely,
        )

        # Optional values (mode)
        if not self.federated_only:
            if self.provider_uri:
                if not self.signer_uri:
                    self.signer_uri = self.provider_uri
                payload.update(
                    dict(provider_uri=self.provider_uri,
                         poa=self.poa,
                         light=self.is_light,
                         signer_uri=self.signer_uri))
            if self.registry_filepath:
                payload.update(dict(registry_filepath=self.registry_filepath))

            # Gas Price
            payload.update(dict(gas_strategy=self.gas_strategy))

        # Merge with base payload
        base_payload = super().static_payload()
        base_payload.update(payload)

        return payload

    @property  # TODO: Graduate to a method and "derive" dynamic from static payload.
    def dynamic_payload(self) -> dict:
        """Exported dynamic configuration values for initializing Ursula"""
        payload = dict()
        if not self.federated_only:
            testnet = self.domain != NetworksInventory.MAINNET
            signer = Signer.from_signer_uri(self.signer_uri, testnet=testnet)
            payload.update(dict(registry=self.registry, signer=signer))

        payload.update(
            dict(network_middleware=self.network_middleware
                 or self.DEFAULT_NETWORK_MIDDLEWARE(),
                 known_nodes=self.known_nodes,
                 node_storage=self.node_storage,
                 crypto_power_ups=self.derive_node_power_ups()))
        return payload

    def generate_filepath(self,
                          filepath: str = None,
                          modifier: str = None,
                          override: bool = False) -> str:
        modifier = modifier or self.checksum_address
        filepath = super().generate_filepath(filepath=filepath,
                                             modifier=modifier,
                                             override=override)
        return filepath

    @property
    def runtime_filepaths(self) -> dict:
        filepaths = dict(config_root=self.config_root,
                         keyring_root=self.keyring_root,
                         registry_filepath=self.registry_filepath)
        return filepaths

    @classmethod
    def generate_runtime_filepaths(cls, config_root: str) -> dict:
        """Dynamically generate paths based on configuration root directory"""
        filepaths = dict(config_root=config_root,
                         config_file_location=os.path.join(
                             config_root, cls.generate_filename()),
                         keyring_root=os.path.join(config_root, 'keyring'))
        return filepaths

    def _cache_runtime_filepaths(self) -> None:
        """Generate runtime filepaths and cache them on the config object"""
        filepaths = self.generate_runtime_filepaths(
            config_root=self.config_root)
        for field, filepath in filepaths.items():
            if getattr(self, field) is UNINITIALIZED_CONFIGURATION:
                setattr(self, field, filepath)

    def attach_keyring(self,
                       checksum_address: str = None,
                       *args,
                       **kwargs) -> None:
        account = checksum_address or self.checksum_address
        if not account:
            raise self.ConfigurationError(
                "No account specified to unlock keyring")
        if self.keyring is not NO_KEYRING_ATTACHED:
            if self.keyring.checksum_address != account:
                raise self.ConfigurationError(
                    "There is already a keyring attached to this configuration."
                )
            return
        self.keyring = NucypherKeyring(keyring_root=self.keyring_root,
                                       account=account,
                                       *args,
                                       **kwargs)

    def derive_node_power_ups(self) -> List[CryptoPowerUp]:
        power_ups = list()
        if self.is_me and not self.dev_mode:
            for power_class in self.CHARACTER_CLASS._default_crypto_powerups:
                power_up = self.keyring.derive_crypto_power(power_class)
                power_ups.append(power_up)
        return power_ups

    def initialize(self, password: str) -> str:
        """Initialize a new configuration and write installation files to disk."""

        # Development
        if self.dev_mode:
            self.__temp_dir = TemporaryDirectory(
                prefix=self.TEMP_CONFIGURATION_DIR_PREFIX)
            self.config_root = self.__temp_dir.name

        # Persistent
        else:
            self._ensure_config_root_exists()
            self.write_keyring(password=password)

        self._cache_runtime_filepaths()
        self.node_storage.initialize()

        # Validate
        if not self.__dev_mode:
            self.validate()

        # Success
        message = "Created nucypher installation files at {}".format(
            self.config_root)
        self.log.debug(message)
        return self.config_root

    def write_keyring(self,
                      password: str,
                      checksum_address: str = None,
                      **generation_kwargs) -> NucypherKeyring:

        if self.federated_only:
            checksum_address = FEDERATED_ADDRESS

        elif not checksum_address:

            # Note: It is assumed the blockchain interface is not yet connected.
            if self.provider_process:

                # Generate Geth's "datadir"
                if not os.path.exists(self.provider_process.data_dir):
                    os.mkdir(self.provider_process.data_dir)

                # Get or create wallet address
                if not self.checksum_address:
                    self.checksum_address = self.provider_process.ensure_account_exists(
                        password=password)
                elif self.checksum_address not in self.provider_process.accounts(
                ):
                    raise self.ConfigurationError(
                        f'Unknown Account {self.checksum_address}')

            elif not self.checksum_address:
                raise self.ConfigurationError(
                    f'No checksum address provided for decentralized configuration.'
                )

            checksum_address = self.checksum_address

        self.keyring = NucypherKeyring.generate(
            password=password,
            keyring_root=self.keyring_root,
            checksum_address=checksum_address,
            **generation_kwargs)

        if self.federated_only:
            self.checksum_address = self.keyring.checksum_address

        return self.keyring

    @classmethod
    def load_node_storage(cls, storage_payload: dict, federated_only: bool):
        from nucypher.config.storages import NodeStorage
        node_storage_subclasses = {
            storage._name: storage
            for storage in NodeStorage.__subclasses__()
        }
        storage_type = storage_payload[NodeStorage._TYPE_LABEL]
        storage_class = node_storage_subclasses[storage_type]
        node_storage = storage_class.from_payload(
            payload=storage_payload, federated_only=federated_only)
        return node_storage
Example #7
0
from typing import Sequence

import requests
from constant_sorrow.constants import EXEMPT_FROM_VERIFICATION
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.x509 import Certificate
from nucypher_core import MetadataRequest, FleetStateChecksum, NodeMetadata
from requests.exceptions import SSLError

from nucypher.blockchain.eth.registry import BaseContractRegistry
from nucypher.config.storages import ForgetfulNodeStorage
from nucypher.network.exceptions import NodeSeemsToBeDown
from nucypher.utilities.logging import Logger

SSL_LOGGER = Logger('ssl-middleware')
EXEMPT_FROM_VERIFICATION.bool_value(False)


class NucypherMiddlewareClient:
    library = requests
    timeout = 1.2

    def __init__(self,
                 registry: Optional['BaseContractRegistry'] = None,
                 eth_provider_uri: Optional[str] = None,
                 storage: Optional['NodeStorage'] = None,
                 *args,
                 **kwargs):

        self.registry = registry
Example #8
0
class FleetSensor:
    """
    A representation of a fleet of NuCypher nodes.

    If `this_node` is provided, it will be included in the state checksum
    (but not returned during iteration/lookups).
    """
    log = Logger("Learning")

    def __init__(self, domain: str, this_node: Optional['Ursula'] = None):

        self._domain = domain

        self._current_state = FleetState.new(this_node)
        self._archived_states = [self._current_state.archived()]
        self.remote_states = {}

        # temporary accumulator for new nodes to avoid updating the fleet state every time
        self._nodes_to_add = set()
        self._nodes_to_remove = set()  # Beginning of bucketing.

        self._auto_update_state = False

    def record_node(self, node: 'Ursula'):

        if node.domain == self._domain:
            self._nodes_to_add.add(node)

            if self._auto_update_state:
                self.log.info(f"Updating fleet state after saving node {node}")
                self.record_fleet_state()
        else:
            msg = f"Rejected node {node} because its domain is '{node.domain}' but we're only tracking '{self._domain}'"
            self.log.warn(msg)

    def __getitem__(self, item):
        return self._current_state[item]

    def __bool__(self):
        return bool(self._current_state)

    def __contains__(self, item):
        """
        Checks if the node *with the same metadata* is recorded in the current state.
        Does not compare ``item`` with the owner node of this FleetSensor.
        """
        return item in self._current_state

    def __iter__(self):
        yield from self._current_state

    def __len__(self):
        return len(self._current_state)

    def __repr__(self):
        return f"FleetSensor({self._current_state.__repr__()})"

    @property
    def current_state(self):
        return self._current_state

    @property
    def checksum(self):
        return self._current_state.checksum

    @property
    def population(self):
        return self._current_state.population

    @property
    def nickname(self):
        return self._current_state.nickname

    @property
    def icon(self) -> str:
        return self._current_state.icon

    @property
    def timestamp(self):
        return self._current_state.timestamp

    def items(self):
        return self._current_state.items()

    def values(self):
        return self._current_state.values()

    def latest_states(self, quantity: int) -> List[ArchivedFleetState]:
        """
        Returns at most ``quantity`` latest archived states (including the current one),
        in chronological order.
        """
        latest = self._archived_states[-min(len(self._archived_states), quantity):]
        return latest

    def addresses(self):
        return self._current_state.addresses()

    def snapshot(self):
        return self._current_state.snapshot()

    @staticmethod
    def unpack_snapshot(data):
        return FleetState.unpack_snapshot(data)

    def record_fleet_state(self, skip_this_node: bool = False):
        new_state = self._current_state.with_updated_nodes(nodes_to_add=self._nodes_to_add,
                                                           nodes_to_remove=self._nodes_to_remove,
                                                           skip_this_node=skip_this_node)

        self._nodes_to_add = set()
        self._nodes_to_remove = set()
        self._current_state = new_state

        # TODO: set a limit on the number of archived states?
        # Two ways to collect archived states:
        # 1. (current) add a state to the archive every time it changes
        # 2. (possible) keep a dictionary of known states
        #    and bump the timestamp of a previously encountered one
        if new_state.checksum != self._archived_states[-1].checksum:
            self._archived_states.append(new_state.archived())

    def shuffled(self):
        return self._current_state.shuffled()

    def mark_as(self, label: Exception, node: 'Ursula'):
        # TODO: for now we're not using `label` in any way, so we're just ignoring it
        self._nodes_to_remove.add(node.checksum_address)

    def record_remote_fleet_state(self,
                                  checksum_address: ChecksumAddress,
                                  state_checksum: str,
                                  timestamp: maya.MayaDT,
                                  population: int):
        nickname = Nickname.from_seed(state_checksum, length=1)
        self.remote_states[checksum_address] = ArchivedFleetState(checksum=state_checksum,
                                                                  nickname=nickname,
                                                                  timestamp=timestamp,
                                                                  population=population)
Example #9
0
class NodeEngagementMutex:
    """
    TODO: Does this belong on middleware?

    TODO: There are a couple of ways this can break.  If one fo the jobs hangs, the whole thing will hang.  Also,
       if there are fewer successfully completed than percent_to_complete_before_release, the partial queue will never
       release.

    TODO: Make registry per... I guess Policy?  It's weird to be able to accidentally enact again.
    """
    log = Logger("Policy")

    def __init__(
            self,
            callable_to_engage,  # TODO: typing.Protocol
            nodes,
            network_middleware,
            percent_to_complete_before_release=5,
            note=None,
            threadpool_size=120,
            timeout=20,
            *args,
            **kwargs):
        self.f = callable_to_engage
        self.nodes = nodes
        self.network_middleware = network_middleware
        self.args = args
        self.kwargs = kwargs

        self.completed = {}
        self.failed = {}

        self._started = False
        self._finished = False
        self.timeout = timeout

        self.percent_to_complete_before_release = percent_to_complete_before_release
        self._partial_queue = Queue()
        self._completion_queue = Queue()
        self._block_until_this_many_are_complete = math.ceil(
            len(nodes) * self.percent_to_complete_before_release / 100)
        self.nodes_contacted_during_partial_block = False
        self.when_complete = Deferred(
        )  # TODO: Allow cancelling via KB Interrupt or some other way?

        if note is None:
            self._repr = f"{callable_to_engage} to {len(nodes)} nodes"
        else:
            self._repr = f"{note}: {callable_to_engage} to {len(nodes)} nodes"

        self._threadpool = ThreadPool(minthreads=threadpool_size,
                                      maxthreads=threadpool_size,
                                      name=self._repr)
        self.log.info(f"NEM spinning up {self._threadpool}")
        self._threadpool.callInThread(self._bail_on_timeout)

    def __repr__(self):
        return self._repr

    def _bail_on_timeout(self):
        while True:
            if self.when_complete.called:
                return
            duration = datetime.datetime.now() - self._started
            if duration.seconds >= self.timeout:
                try:
                    self._threadpool.stop()
                except AlreadyQuit:
                    raise RuntimeError(
                        "Is there a race condition here?  If this line is being hit, it's a bug."
                    )
                raise RuntimeError(
                    f"Timed out.  Nodes completed: {self.completed}")
            time.sleep(.5)

    def block_until_success_is_reasonably_likely(self):
        """
        https://www.youtube.com/watch?v=OkSLswPSq2o
        """
        if len(self.completed) < self._block_until_this_many_are_complete:
            try:
                completed_for_reasonable_likelihood_of_success = self._partial_queue.get(
                    timeout=self.timeout)  # TODO: Shorter timeout here?
            except Empty:
                raise RuntimeError(
                    f"Timed out.  Nodes completed: {self.completed}")
            self.log.debug(
                f"{len(self.completed)} nodes were contacted while blocking for a little while."
            )
            return completed_for_reasonable_likelihood_of_success
        else:
            return self.completed

    def block_until_complete(self):
        if self.total_disposed() < len(self.nodes):
            try:
                _ = self._completion_queue.get(
                    timeout=self.timeout
                )  # Interesting opportuntiy to pass some data, like the list of contacted nodes above.
            except Empty:
                raise RuntimeError(
                    f"Timed out.  Nodes completed: {self.completed}")
        if not reactor.running and not self._threadpool.joined:
            # If the reactor isn't running, the user *must* call this, because this is where we stop.
            self._threadpool.stop()

    def _handle_success(self, response, node):
        if response.status_code == 201:
            self.completed[node] = response
        else:
            assert False  # TODO: What happens if this is a 300 or 400 level response?  (A 500 response will propagate as an error and be handled in the errback chain.)
        if self.nodes_contacted_during_partial_block:
            self._consider_finalizing()
        else:
            if len(self.completed) >= self._block_until_this_many_are_complete:
                contacted = tuple(self.completed.keys())
                self.nodes_contacted_during_partial_block = contacted
                self.log.debug(
                    f"Blocked for a little while, completed {contacted} nodes")
                self._partial_queue.put(contacted)
        return response

    def _handle_error(self, failure, node):
        self.failed[node] = failure  # TODO: Add a failfast mode?
        self._consider_finalizing()
        self.log.warn(f"{node} failed: {failure}")

    def total_disposed(self):
        return len(self.completed) + len(self.failed)

    def _consider_finalizing(self):
        if not self._finished:
            if self.total_disposed() == len(self.nodes):
                # TODO: Consider whether this can possibly hang.
                self._finished = True
                if reactor.running:
                    reactor.callInThread(self._threadpool.stop)
                self._completion_queue.put(self.completed)
                self.when_complete.callback(self.completed)
                self.log.info(f"{self} finished.")
        else:
            raise RuntimeError("Already finished.")

    def _engage_node(self, node):
        maybe_coro = self.f(node,
                            network_middleware=self.network_middleware,
                            *self.args,
                            **self.kwargs)

        d = ensureDeferred(maybe_coro)
        d.addCallback(self._handle_success, node)
        d.addErrback(self._handle_error, node)
        return d

    def start(self):
        if self._started:
            raise RuntimeError("Already started.")
        self._started = datetime.datetime.now()
        self.log.info(f"NEM Starting {self._threadpool}")
        for node in self.nodes:
            self._threadpool.callInThread(self._engage_node, node)
        self._threadpool.start()
Example #10
0
def make_rest_app(
    db_filepath: str,
    this_node,
    serving_domains,
    log=Logger("http-application-layer")) -> Tuple:

    forgetful_node_storage = ForgetfulNodeStorage(
        federated_only=this_node.federated_only)

    from nucypher.datastore import datastore

    log.info("Starting datastore {}".format(db_filepath))
    datastore = datastore.Datastore(db_filepath)

    from nucypher.characters.lawful import Alice, Ursula
    _alice_class = Alice
    _node_class = Ursula

    rest_app = Flask("ursula-service")
    rest_app.config['MAX_CONTENT_LENGTH'] = MAX_UPLOAD_CONTENT_LENGTH

    @rest_app.route("/public_information")
    def public_information():
        """
        REST endpoint for public keys and address.
        """
        response = Response(response=bytes(this_node),
                            mimetype='application/octet-stream')

        return response

    @rest_app.route("/ping", methods=['POST'])
    def ping():
        """
        Asks this node: "Can you access my public information endpoint"?
        """

        try:
            requesting_ursula = Ursula.from_bytes(request.data,
                                                  registry=this_node.registry)
            requesting_ursula.mature()
        except ValueError:  # (ValueError)
            return Response({'error': 'Invalid Ursula'}, status=400)
        else:
            initiator_address, initiator_port = tuple(
                requesting_ursula.rest_interface)

        # Compare requester and posted Ursula information
        request_address = request.environ['REMOTE_ADDR']
        if request_address != initiator_address:
            return Response({'error': 'Suspicious origin address'}, status=400)

        #
        # Make a Sandwich
        #

        try:
            # Fetch and store initiator's teacher certificate.
            certificate = this_node.network_middleware.get_certificate(
                host=initiator_address, port=initiator_port)
            certificate_filepath = this_node.node_storage.store_node_certificate(
                certificate=certificate)
            requesting_ursula_bytes = this_node.network_middleware.client.node_information(
                host=initiator_address,
                port=initiator_port,
                certificate_filepath=certificate_filepath)
        except NodeSeemsToBeDown:
            return Response({'error': 'Unreachable node'},
                            status=400)  # ... toasted

        # Compare the results of the outer POST with the inner GET... yum
        if requesting_ursula_bytes == request.data:
            return Response(status=200)
        else:
            return Response({'error': 'Suspicious node'}, status=400)

    @rest_app.route('/node_metadata', methods=["GET"])
    def all_known_nodes():
        headers = {'Content-Type': 'application/octet-stream'}

        if this_node.known_nodes.checksum is NO_KNOWN_NODES:
            return Response(b"", headers=headers, status=204)

        known_nodes_bytestring = this_node.bytestring_of_known_nodes()
        signature = this_node.stamp(known_nodes_bytestring)
        return Response(bytes(signature) + known_nodes_bytestring,
                        headers=headers)

    @rest_app.route('/node_metadata', methods=["POST"])
    def node_metadata_exchange():
        # If these nodes already have the same fleet state, no exchange is necessary.

        learner_fleet_state = request.args.get('fleet')
        if learner_fleet_state == this_node.known_nodes.checksum:
            log.debug(
                "Learner already knew fleet state {}; doing nothing.".format(
                    learner_fleet_state))
            headers = {'Content-Type': 'application/octet-stream'}
            payload = this_node.known_nodes.snapshot() + bytes(
                FLEET_STATES_MATCH)
            signature = this_node.stamp(payload)
            return Response(bytes(signature) + payload, headers=headers)

        sprouts = _node_class.batch_from_bytes(request.data)

        for node in sprouts:
            this_node.remember_node(node)

        # TODO: What's the right status code here?  202?  Different if we already knew about the node(s)?
        return all_known_nodes()

    @rest_app.route('/consider_arrangement', methods=['POST'])
    def consider_arrangement():
        from nucypher.policy.policies import Arrangement
        arrangement = Arrangement.from_bytes(request.data)

        # TODO: Look at the expiration and figure out if we're even staking that long.  1701
        with datastore.describe(PolicyArrangement,
                                arrangement.id.hex(),
                                writeable=True) as new_policy_arrangement:
            new_policy_arrangement.arrangement_id = arrangement.id.hex(
            ).encode()
            new_policy_arrangement.expiration = arrangement.expiration
            new_policy_arrangement.alice_verifying_key = arrangement.alice.stamp.as_umbral_pubkey(
            )

        # TODO: Fine, we'll add the arrangement here, but if we never hear from Alice again to enact it,
        # we need to prune it at some point.  #1700

        headers = {'Content-Type': 'application/octet-stream'}
        # TODO: Make this a legit response #234.
        return Response(
            b"This will eventually be an actual acceptance of the arrangement.",
            headers=headers)

    @rest_app.route("/kFrag/<id_as_hex>", methods=['POST'])
    def set_policy(id_as_hex):
        """
        REST endpoint for setting a kFrag.
        """
        policy_message_kit = UmbralMessageKit.from_bytes(request.data)

        alices_verifying_key = policy_message_kit.sender_verifying_key
        alice = _alice_class.from_public_keys(
            verifying_key=alices_verifying_key)

        try:
            cleartext = this_node.verify_from(alice,
                                              policy_message_kit,
                                              decrypt=True)
        except InvalidSignature:
            # TODO: Perhaps we log this?  Essentially 355.
            return Response(status_code=400)

        if not this_node.federated_only:
            # This splitter probably belongs somewhere canonical.
            transaction_splitter = BytestringSplitter(32)
            tx, kfrag_bytes = transaction_splitter(cleartext,
                                                   return_remainder=True)

            try:
                # Get all of the arrangements and verify that we'll be paid.
                # TODO: We'd love for this part to be impossible to reduce the risk of collusion.  #1274
                arranged_addresses = this_node.policy_agent.fetch_arrangement_addresses_from_policy_txid(
                    tx, timeout=this_node.synchronous_query_timeout)
            except TimeExhausted:
                # Alice didn't pay.  Return response with that weird status code.
                this_node.suspicious_activities_witnessed['freeriders'].append(
                    (alice, f"No transaction matching {tx}."))
                return Response(status=402)

            this_node_has_been_arranged = this_node.checksum_address in arranged_addresses
            if not this_node_has_been_arranged:
                this_node.suspicious_activities_witnessed['freeriders'].append(
                    (alice,
                     f"The transaction {tx} does not list me as a Worker - it lists {arranged_addresses}."
                     ))
                return Response(status=402)
        else:
            _tx = NO_BLOCKCHAIN_CONNECTION
            kfrag_bytes = cleartext
        kfrag = KFrag.from_bytes(kfrag_bytes)

        if not kfrag.verify(signing_pubkey=alices_verifying_key):
            raise InvalidSignature("{} is invalid".format(kfrag))

        with datastore.describe(PolicyArrangement, id_as_hex,
                                writeable=True) as policy_arrangement:
            if not policy_arrangement.alice_verifying_key == alice.stamp.as_umbral_pubkey(
            ):
                raise alice.SuspiciousActivity
            policy_arrangement.kfrag = kfrag

        # TODO: Sign the arrangement here.  #495
        return ""  # TODO: Return A 200, with whatever policy metadata.

    @rest_app.route('/kFrag/<id_as_hex>', methods=["DELETE"])
    def revoke_arrangement(id_as_hex):
        """
        REST endpoint for revoking/deleting a KFrag from a node.
        """
        from nucypher.policy.collections import Revocation

        revocation = Revocation.from_bytes(request.data)
        log.info("Received revocation: {} -- for arrangement {}".format(
            bytes(revocation).hex(), id_as_hex))

        # Check that the request is the same for the provided revocation
        if not id_as_hex == revocation.arrangement_id.hex():
            log.debug("Couldn't identify an arrangement with id {}".format(
                id_as_hex))
            return Response(status_code=400)

        try:
            with datastore.describe(PolicyArrangement,
                                    id_as_hex,
                                    writeable=True) as policy_arrangement:
                if revocation.verify_signature(
                        policy_arrangement.alice_verifying_key):
                    policy_arrangement.delete()
        except (DatastoreTransactionError, InvalidSignature) as e:
            log.debug("Exception attempting to revoke: {}".format(e))
            return Response(
                response='KFrag not found or revocation signature is invalid.',
                status=404)
        else:
            log.info("KFrag successfully removed.")
            return Response(response='KFrag deleted!', status=200)

    @rest_app.route('/kFrag/<id_as_hex>/reencrypt', methods=["POST"])
    def reencrypt_via_rest(id_as_hex):

        # Get Policy Arrangement
        try:
            arrangement_id = binascii.unhexlify(id_as_hex)
        except (binascii.Error, TypeError):
            return Response(response=b'Invalid arrangement ID', status=405)
        try:
            # Get KFrag
            # TODO: Yeah, well, what if this arrangement hasn't been enacted?  1702
            with datastore.describe(PolicyArrangement,
                                    id_as_hex) as policy_arrangement:
                kfrag = policy_arrangement.kfrag
                alice_verifying_key = policy_arrangement.alice_verifying_key
        except RecordNotFound:
            return Response(response=arrangement_id, status=404)

        # Get Work Order
        from nucypher.policy.collections import WorkOrder  # Avoid circular import
        alice_address = canonical_address_from_umbral_key(alice_verifying_key)
        work_order_payload = request.data
        work_order = WorkOrder.from_rest_payload(
            arrangement_id=arrangement_id,
            rest_payload=work_order_payload,
            ursula=this_node,
            alice_address=alice_address)
        log.info(
            f"Work Order from {work_order.bob}, signed {work_order.receipt_signature}"
        )

        # Re-encrypt
        response = this_node._reencrypt(
            kfrag=kfrag,
            work_order=work_order,
            alice_verifying_key=alice_verifying_key)

        # Now, Ursula saves this workorder to her database...
        # Note: we give the work order a random ID to store it under.
        with datastore.describe(Workorder, str(uuid.uuid4()),
                                writeable=True) as new_workorder:
            new_workorder.arrangement_id = work_order.arrangement_id
            new_workorder.bob_verifying_key = work_order.bob.stamp.as_umbral_pubkey(
            )
            new_workorder.bob_signature = work_order.receipt_signature

        headers = {'Content-Type': 'application/octet-stream'}
        return Response(headers=headers, response=response)

    @rest_app.route('/treasure_map/<treasure_map_id>')
    def provide_treasure_map(treasure_map_id):
        headers = {'Content-Type': 'application/octet-stream'}

        treasure_map_index = bytes.fromhex(treasure_map_id)

        try:

            treasure_map = this_node.treasure_maps[treasure_map_index]
            response = Response(bytes(treasure_map), headers=headers)
            log.info("{} providing TreasureMap {}".format(
                this_node.nickname, treasure_map_id))

        except KeyError:
            log.info("{} doesn't have requested TreasureMap {}".format(
                this_node.stamp, treasure_map_id))
            response = Response(
                "No Treasure Map with ID {}".format(treasure_map_id),
                status=404,
                headers=headers)

        return response

    @rest_app.route('/treasure_map/<treasure_map_id>', methods=['POST'])
    def receive_treasure_map(treasure_map_id):
        # TODO: Any of the codepaths that trigger 4xx Responses here are also SuspiciousActivity.
        if not this_node.federated_only:
            from nucypher.policy.collections import SignedTreasureMap as _MapClass
        else:
            from nucypher.policy.collections import TreasureMap as _MapClass

        try:
            treasure_map = _MapClass.from_bytes(
                bytes_representation=request.data, verify=True)
        except _MapClass.InvalidSignature:
            log.info("Bad TreasureMap HRAC Signature; not storing {}".format(
                treasure_map_id))
            return Response("This TreasureMap's HRAC is not properly signed.",
                            status=401)

        treasure_map_index = bytes.fromhex(treasure_map_id)

        # First let's see if we already have this map.

        try:
            previously_saved_map = this_node.treasure_maps[treasure_map_index]
        except KeyError:
            pass  # We don't have the map.  We'll validate and perhaps save it.
        else:
            if previously_saved_map == treasure_map:
                return Response("Already have this map.", status=303)
                # Otherwise, if it's a different map with the same ID, we move on to validation.

        if treasure_map.public_id() == treasure_map_id:
            do_store = True
        else:
            return Response("Can't save a TreasureMap with this ID from you.",
                            status=409)

        if do_store and not this_node.federated_only:
            alice_checksum_address = this_node.policy_agent.contract.functions.getPolicyOwner(
                treasure_map._hrac[:16]).call()
            do_store = treasure_map.verify_blockchain_signature(
                checksum_address=alice_checksum_address)

        if do_store:
            log.info("{} storing TreasureMap {}".format(
                this_node, treasure_map_id))
            this_node.treasure_maps[treasure_map_index] = treasure_map
            return Response(bytes(treasure_map), status=202)
        else:
            log.info(
                "Bad TreasureMap ID; not storing {}".format(treasure_map_id))
            return Response("This TreasureMap doesn't match a paid Policy.",
                            status=402)

    @rest_app.route('/status/', methods=['GET'])
    def status():

        if request.args.get('json'):
            payload = this_node.abridged_node_details()
            response = jsonify(payload)
            return response

        else:
            headers = {"Content-Type": "text/html", "charset": "utf-8"}
            previous_states = list(
                reversed(this_node.known_nodes.states.values()))[:5]
            # Mature every known node before rendering.
            for node in this_node.known_nodes:
                node.mature()

            try:
                content = status_template.render(
                    this_node=this_node,
                    known_nodes=this_node.known_nodes,
                    previous_states=previous_states,
                    domains=serving_domains,
                    version=nucypher.__version__,
                    checksum_address=this_node.checksum_address)
            except Exception as e:
                log.debug("Template Rendering Exception: ".format(str(e)))
                raise TemplateError(str(e)) from e
            return Response(response=content, headers=headers)

    return rest_app, datastore
Example #11
0
class Policy(ABC):
    """
    An edict by Alice, arranged with n Ursulas, to perform re-encryption for a specific Bob.
    """

    POLICY_ID_LENGTH = 16

    log = Logger("Policy")

    class NotEnoughUrsulas(Exception):
        """
        Raised when a Policy has been used to generate Arrangements with Ursulas insufficient number
        such that we don't have enough KFrags to give to each Ursula.
        """

    class EnactmentError(Exception):
        """
        Raised if one or more Ursulas failed to enact the policy.
        """

    def __init__(
        self,
        alice: Alice,
        label: bytes,
        expiration: maya.MayaDT,
        bob: 'Bob',
        kfrags: Sequence[KFrag],
        public_key: UmbralPublicKey,
        m: int,
    ):
        """
        :param kfrags:  A list of KFrags to distribute per this Policy.
        :param label: The identity of the resource to which Bob is granted access.
        """

        self.m = m
        self.n = len(kfrags)
        self.alice = alice
        self.label = label
        self.bob = bob
        self.kfrags = kfrags
        self.public_key = public_key
        self.expiration = expiration

        self._id = construct_policy_id(self.label, bytes(self.bob.stamp))
        """
        # TODO: #180 - This attribute is hanging on for dear life.
        After 180 is closed, it can be completely deprecated.

        The "hashed resource authentication code".

        A hash of:
        * Alice's public key
        * Bob's public key
        * the label

        Alice and Bob have all the information they need to construct this.
        Ursula does not, so we share it with her.
        """
        self.hrac = keccak_digest(
            bytes(self.alice.stamp) + bytes(self.bob.stamp) +
            self.label)[:HRAC_LENGTH]

    def __repr__(self):
        return f"{self.__class__.__name__}:{self._id.hex()[:6]}"

    def _propose_arrangement(
        self,
        address: ChecksumAddress,
        network_middleware: RestMiddleware,
    ) -> Tuple[Ursula, Arrangement]:
        """
        Attempt to propose an arrangement to the node with the given address.
        """

        if address not in self.alice.known_nodes:
            raise RuntimeError(f"{address} is not known")

        ursula = self.alice.known_nodes[address]
        arrangement = Arrangement.from_alice(alice=self.alice,
                                             expiration=self.expiration)

        self.log.debug(f"Proposing arrangement {arrangement} to {ursula}")
        negotiation_response = network_middleware.propose_arrangement(
            ursula, arrangement)
        status = negotiation_response.status_code

        if status == 200:
            self.log.debug(f"Arrangement accepted by {ursula}")
        else:
            message = f"Proposing arrangement to {ursula} failed with {status}"
            self.log.debug(message)
            raise RuntimeError(message)

        # We could just return the arrangement and get the Ursula object
        # from `known_nodes` later, but when we introduce slashing in FleetSensor,
        # the address can already disappear from `known_nodes` by that time.
        return (ursula, arrangement)

    @abstractmethod
    def _make_reservoir(
            self, handpicked_addresses: Sequence[ChecksumAddress]
    ) -> MergedReservoir:
        """
        Builds a `MergedReservoir` to use for drawing addresses to send proposals to.
        """
        raise NotImplementedError

    def _make_arrangements(
        self,
        network_middleware: RestMiddleware,
        handpicked_ursulas: Optional[Iterable[Ursula]] = None,
        timeout: int = 10,
    ) -> Dict[Ursula, Arrangement]:
        """
        Pick some Ursula addresses and send them arrangement proposals.
        Returns a dictionary of Ursulas to Arrangements if it managed to get `n` responses.
        """

        if handpicked_ursulas is None:
            handpicked_ursulas = []
        handpicked_addresses = [
            ursula.checksum_address for ursula in handpicked_ursulas
        ]

        reservoir = self._make_reservoir(handpicked_addresses)
        value_factory = PrefetchStrategy(reservoir, self.n)

        def worker(address):
            return self._propose_arrangement(address, network_middleware)

        self.alice.block_until_number_of_known_nodes_is(
            self.n, learn_on_this_thread=True, eager=True)

        worker_pool = WorkerPool(worker=worker,
                                 value_factory=value_factory,
                                 target_successes=self.n,
                                 timeout=timeout,
                                 stagger_timeout=1,
                                 threadpool_size=self.n)
        worker_pool.start()
        try:
            successes = worker_pool.block_until_target_successes()
        except (WorkerPool.OutOfValues, WorkerPool.TimedOut):
            # It's possible to raise some other exceptions here,
            # but we will use the logic below.
            successes = worker_pool.get_successes()
        finally:
            worker_pool.cancel()
            worker_pool.join()

        accepted_arrangements = {
            ursula: arrangement
            for ursula, arrangement in successes.values()
        }
        failures = worker_pool.get_failures()

        accepted_addresses = ", ".join(ursula.checksum_address
                                       for ursula in accepted_arrangements)

        if len(accepted_arrangements) < self.n:

            rejected_proposals = "\n".join(
                f"{address}: {exception}"
                for address, exception in failures.items())

            self.log.debug(
                "Could not find enough Ursulas to accept proposals.\n"
                f"Accepted: {accepted_addresses}\n"
                f"Rejected:\n{rejected_proposals}")
            raise self._not_enough_ursulas_exception()
        else:
            self.log.debug(
                f"Finished proposing arrangements; accepted: {accepted_addresses}"
            )

        return accepted_arrangements

    def _enact_arrangements(
        self,
        network_middleware: RestMiddleware,
        arrangements: Dict[Ursula, Arrangement],
        publication_transaction: Optional[HexBytes] = None,
        publish_treasure_map: bool = True,
        timeout: int = 10,
    ):
        """
        Attempts to distribute kfrags to Ursulas that accepted arrangements earlier.
        """
        def worker(ursula_and_kfrag):
            ursula, kfrag = ursula_and_kfrag
            arrangement = arrangements[ursula]

            # TODO: seems like it would be enough to just encrypt this with Ursula's public key,
            # and not create a whole capsule.
            # Can't change for now since it's node protocol.
            payload = self._make_enactment_payload(publication_transaction,
                                                   kfrag)
            message_kit, _signature = self.alice.encrypt_for(ursula, payload)

            try:
                # TODO: Concurrency
                response = network_middleware.enact_policy(
                    ursula, arrangement.id, message_kit.to_bytes())
            except network_middleware.UnexpectedResponse as e:
                status = e.status
            else:
                status = response.status_code

            return status

        value_factory = AllAtOnceFactory(list(zip(arrangements, self.kfrags)))
        worker_pool = WorkerPool(worker=worker,
                                 value_factory=value_factory,
                                 target_successes=self.n,
                                 timeout=timeout,
                                 threadpool_size=self.n)

        worker_pool.start()

        # Block until everything is complete. We need all the workers to finish.
        worker_pool.join()

        successes = worker_pool.get_successes()

        if len(successes) != self.n:
            raise Policy.EnactmentError()

        # TODO: Enable re-tries?
        statuses = {
            ursula_and_kfrag[0].checksum_address: status
            for ursula_and_kfrag, status in successes.items()
        }
        if not all(status == 200 for status in statuses.values()):
            report = "\n".join(f"{address}: {status}"
                               for address, status in statuses.items())
            self.log.debug(
                f"Policy enactment failed. Request statuses:\n{report}")

            # OK, let's check: if two or more Ursulas claimed we didn't pay,
            # we need to re-evaulate our situation here.
            number_of_claims_of_freeloading = sum(
                status == 402 for status in statuses.values())

            # TODO: a better exception here?
            if number_of_claims_of_freeloading > 2:
                raise self.alice.NotEnoughNodes

            # otherwise just raise a more generic error
            raise Policy.EnactmentError()

    def _make_treasure_map(
        self,
        network_middleware: RestMiddleware,
        arrangements: Dict[Ursula, Arrangement],
    ) -> 'TreasureMap':
        """
        Creates a treasure map for given arrangements.
        """

        treasure_map = self._treasure_map_class(m=self.m)

        for ursula, arrangement in arrangements.items():
            treasure_map.add_arrangement(ursula, arrangement)

        treasure_map.prepare_for_publication(
            bob_encrypting_key=self.bob.public_keys(DecryptingPower),
            bob_verifying_key=self.bob.public_keys(SigningPower),
            alice_stamp=self.alice.stamp,
            label=self.label)

        return treasure_map

    def _make_publisher(
        self,
        treasure_map: 'TreasureMap',
        network_middleware: RestMiddleware,
    ) -> TreasureMapPublisher:

        # TODO (#2516): remove hardcoding of 8 nodes
        self.alice.block_until_number_of_known_nodes_is(
            8, timeout=2, learn_on_this_thread=True)
        target_nodes = self.bob.matching_nodes_among(self.alice.known_nodes)
        treasure_map_bytes = bytes(
            treasure_map)  # prevent the closure from holding the reference

        def put_treasure_map_on_node(node):
            try:
                response = network_middleware.put_treasure_map_on_node(
                    node=node, map_payload=treasure_map_bytes)
            except Exception as e:
                self.log.warn(f"Putting treasure map on {node} failed: {e}")
                raise

            if response.status_code == 201:
                return response
            else:
                message = f"Putting treasure map on {node} failed with response status: {response.status}"
                self.log.warn(message)
                # TODO: What happens if this is a 300 or 400 level response?
                raise Exception(message)

        return TreasureMapPublisher(worker=put_treasure_map_on_node,
                                    nodes=target_nodes)

    def enact(
        self,
        network_middleware: RestMiddleware,
        handpicked_ursulas: Optional[Iterable[Ursula]] = None,
        publish_treasure_map: bool = True,
    ) -> 'EnactedPolicy':
        """
        Attempts to enact the policy, returns an `EnactedPolicy` object on success.
        """

        arrangements = self._make_arrangements(
            network_middleware=network_middleware,
            handpicked_ursulas=handpicked_ursulas)

        self._enact_arrangements(network_middleware=network_middleware,
                                 arrangements=arrangements,
                                 publish_treasure_map=publish_treasure_map)

        treasure_map = self._make_treasure_map(
            network_middleware=network_middleware, arrangements=arrangements)
        treasure_map_publisher = self._make_publisher(
            treasure_map=treasure_map, network_middleware=network_middleware)
        revocation_kit = RevocationKit(treasure_map, self.alice.stamp)

        enacted_policy = EnactedPolicy(self._id, self.hrac, self.label,
                                       self.public_key, treasure_map,
                                       treasure_map_publisher, revocation_kit,
                                       self.alice.stamp)

        if publish_treasure_map is True:
            enacted_policy.publish_treasure_map()

        return enacted_policy

    @abstractmethod
    def _not_enough_ursulas_exception(self) -> Type[Exception]:
        """
        Returns an exception to raise when there were not enough Ursulas
        to distribute arrangements to.
        """
        raise NotImplementedError

    @abstractmethod
    def _make_enactment_payload(self,
                                publication_transaction: Optional[HexBytes],
                                kfrag: KFrag) -> bytes:
        """
        Serializes a given kfrag and policy publication transaction to send to Ursula.
        """
        raise NotImplementedError
Example #12
0
    def __init__(self, json_ipc: bool, verbose: bool, quiet: bool,
                 no_logs: bool, console_logs: bool, file_logs: bool,
                 sentry_logs: bool, log_level: bool, debug: bool):

        self.log = Logger(self.__class__.__name__)

        # Session Emitter for pre and post character control engagement.
        if verbose and quiet:
            raise click.BadOptionUsage(
                option_name="quiet",
                message="--verbose and --quiet are mutually exclusive "
                "and cannot be used at the same time.")

        if verbose:
            GroupGeneralConfig.verbosity = 2
        elif quiet:
            GroupGeneralConfig.verbosity = 0
        else:
            GroupGeneralConfig.verbosity = 1

        if json_ipc:
            GlobalLoggerSettings._json_ipc = True  # TODO #1754
            emitter = JSONRPCStdoutEmitter(
                verbosity=GroupGeneralConfig.verbosity)
        else:
            emitter = StdoutEmitter(verbosity=GroupGeneralConfig.verbosity)

        self.emitter = emitter

        if verbose:
            self.emitter.message("Verbose mode is enabled", color='blue')

        # Logging
        if debug and no_logs:
            message = "--debug and --no-logs cannot be used at the same time."
            raise click.BadOptionUsage(option_name="no-logs", message=message)

        # Defaults
        if file_logs is None:
            file_logs = self.log_to_file
        if sentry_logs is None:
            sentry_logs = self.log_to_sentry

        if debug:
            console_logs = True
            file_logs = True
            sentry_logs = False
            log_level = 'debug'

        if no_logs:
            console_logs = False
            file_logs = False
            sentry_logs = False
        if json_ipc:
            console_logs = False

        GlobalLoggerSettings.set_log_level(log_level_name=log_level)

        if console_logs:
            GlobalLoggerSettings.start_console_logging()
        if file_logs:
            GlobalLoggerSettings.start_text_file_logging()
            GlobalLoggerSettings.start_json_file_logging()
        if sentry_logs:
            GlobalLoggerSettings.start_sentry_logging(self.sentry_endpoint)
        if json_ipc:
            GlobalLoggerSettings.stop_console_logging()  # JSON-RPC Protection

        self.debug = debug
        self.json_ipc = json_ipc
Example #13
0
class RetryRequestMiddleware:
    """
    Automatically retries rpc requests whenever a 429 status code is returned.
    """
    def __init__(self,
                 make_request: Callable[[RPCEndpoint, Any], RPCResponse],
                 w3: Web3,
                 retries: int = 3,
                 exponential_backoff: bool = True):
        self.w3 = w3
        self.make_request = make_request
        self.retries = retries
        self.exponential_backoff = exponential_backoff
        self.logger = Logger(self.__class__.__name__)

    def is_request_result_retry(self, result: Union[RPCResponse,
                                                    Exception]) -> bool:
        # default retry functionality - look for 429 codes
        # override for additional checks
        if isinstance(result, HTTPError):
            # HTTPError 429
            status_code = result.response.status_code
            if status_code == 429:
                return True
        elif not isinstance(result, Exception):
            # must be RPCResponse
            if 'error' in result:
                error = result['error']
                # either instance of RPCError or str
                if not isinstance(error, str) and error.get('code') == 429:
                    return True

        # not retry result
        return False

    def __call__(self, method, params):
        result = None
        num_iterations = 1 + self.retries  # initial call and subsequent retries
        for i in range(num_iterations):
            try:
                response = self.make_request(method, params)
            except Exception as e:  # type: ignore
                result = e
            else:
                result = response

            # completed request
            if not self.is_request_result_retry(result):
                if i > 0:
                    # not initial call and retry was actually performed
                    self.logger.debug(
                        f'Retried rpc request completed after {i} retries')
                break

            # max retries with no completion
            if i == self.retries:
                self.logger.warn(
                    f'RPC request retried {self.retries} times but was not completed'
                )
                break

            # backoff before next call
            if self.exponential_backoff:
                time.sleep(2**(i +
                               1))  # exponential back-off - 2^(retry number)

        if isinstance(result, Exception):
            raise result
        else:
            # RPCResponse
            return result
Example #14
0
class AvailabilityTracker:

    FAST_INTERVAL = 15    # Seconds
    SLOW_INTERVAL = 60 * 2
    SEEDING_DURATION = 60
    MAXIMUM_ALONE_TIME = 120

    MAXIMUM_SCORE = 10.0  # Score
    SAMPLE_SIZE = 1       # Ursulas
    SENSITIVITY = 0.5     # Threshold
    CHARGE_RATE = 0.9     # Measurement Multiplier

    class Unreachable(RuntimeError):
        pass

    class Solitary(Unreachable):
        message = "Cannot connect to any teacher nodes."

    class Lonely(Unreachable):
        message = "Cannot connect to enough teacher nodes."

    def __init__(self, ursula, enforce_loneliness: bool = True):

        self.log = Logger(self.__class__.__name__)
        self._ursula = ursula
        self.enforce_loneliness = enforce_loneliness

        self.__excuses = dict()  # List of failure reasons
        self.__score = 10
        # 10 == Perfect Score
        self.warnings = {
            9: self.mild_warning,
            7: self.medium_warning,
            2: self.severe_warning,
            1: self.shutdown_everything  # 0 is unobtainable
        }

        self._start_time = None
        self.__active_measurement = False
        self.__task = LoopingCall(self.maintain)
        self.responders = set()

    @property
    def excuses(self):
        return self.__excuses

    def mild_warning(self) -> None:
        self.log.info(f'[UNREACHABLE NOTICE (SCORE {self.score})] This node was recently reported as unreachable.')

    def medium_warning(self) -> None:
        self.log.warn(f'[UNREACHABLE CAUTION (SCORE {self.score})] This node is reporting as unreachable.'
                      f'Please check your network and firewall configuration.')

    def severe_warning(self) -> None:
        self.log.warn(f'[UNREACHABLE WARNING (SCORE {self.score})] '
                      f'Please check your network and firewall configuration.'
                      f'Auto-shutdown will commence soon if the services do not become available.')

    def shutdown_everything(self, reason=None, halt_reactor=False):
        self.log.warn(f'[NODE IS UNREACHABLE (SCORE {self.score})] Commencing auto-shutdown sequence...')
        self._ursula.stop(halt_reactor=False)
        try:
            if reason:
                raise reason(reason.message)
            raise self.Unreachable(f'{self._ursula} is unreachable (scored {self.score}).')
        finally:
            if halt_reactor:
                self._halt_reactor()

    @staticmethod
    def _halt_reactor() -> None:
        if reactor.running:
            reactor.stop()

    def handle_measurement_errors(self, crash_on_error: bool = False, *args, **kwargs) -> None:

        if args:
            failure = args[0]
            cleaned_traceback = failure.getTraceback().replace('{', '').replace('}', '')  # FIXME: Amazing.
            self.log.warn("Unhandled error during availability check: {}".format(cleaned_traceback))
            if crash_on_error:
                failure.raiseException()
        else:
            # Restart on failure
            if not self.running:
                self.log.debug(f"Availability check crashed, restarting...")
                self.start(now=True)

    def status(self) -> bool:
        """Returns current indication of availability"""
        result = self.score > (self.SENSITIVITY * self.MAXIMUM_SCORE)
        if not result:
            for time, reason in self.__excuses.items():
                self.log.info(f'[{time}] - {reason["error"]}')
        return result

    @property
    def running(self) -> bool:
        return self.__task.running

    def start(self, now: bool = False):
        if not self.running:
            self._start_time = maya.now()
            d = self.__task.start(interval=self.FAST_INTERVAL, now=now)
            d.addErrback(self.handle_measurement_errors)

    def stop(self) -> None:
        if self.running:
            self.__task.stop()

    def maintain(self) -> None:
        known_nodes_is_smaller_than_sample_size = len(self._ursula.known_nodes) < self.SAMPLE_SIZE

        # If there are no known nodes or too few known nodes, skip this round...
        # ... but not for longer than the maximum allotted alone time
        if known_nodes_is_smaller_than_sample_size:
            if not self._ursula.lonely and self.enforce_loneliness:
                now = maya.now().epoch
                delta = now - self._start_time.epoch
                if delta >= self.MAXIMUM_ALONE_TIME:
                    self.severe_warning()
                    reason = self.Solitary if not self._ursula.known_nodes else self.Lonely
                    self.shutdown_everything(reason=reason)
            return

        if self.__task.interval == self.FAST_INTERVAL:
            now = maya.now().epoch
            delta = now - self._start_time.epoch
            if delta >= self.SEEDING_DURATION:
                # Slow down
                self.__task.interval = self.SLOW_INTERVAL
                return

        if self.__active_measurement:
            self.log.debug(f"Availability check already in progress - skipping this round (Score: {self.score}). ")
            return  # Abort
        else:
            self.log.debug(f"Continuing to measure availability (Score: {self.score}).")
            self.__active_measurement = True

        try:
            self.measure_sample()
        finally:
            self.__active_measurement = False

        delta = maya.now() - self._start_time
        self.log.info(f"Current availability score is {self.score} measured since {delta}")
        self.issue_warnings()

    def issue_warnings(self, cascade: bool = True) -> None:
        warnings = sorted(self.warnings.items(), key=lambda t: t[0])
        for threshold, action in warnings:
            if self.score <= threshold:
                action()
                if not cascade:
                    # Exit after the first active warning is issued
                    return

    def sample(self, quantity: int) -> list:
        population = tuple(self._ursula.known_nodes._nodes.values())
        ursulas = random.sample(population=population, k=quantity)
        return ursulas

    @property
    def score(self) -> float:
        return self.__score

    def record(self, result: bool = None, reason: dict = None) -> None:
        """Score the result and cache it."""
        if (not result) and reason:
            self.__excuses[maya.now().epoch] = reason
        if result is None:
            return  # Actually nevermind, dont score this one...
        score = int(result) + self.CHARGE_RATE * self.__score
        if score >= self.MAXIMUM_SCORE:
            self.__score = self.MAXIMUM_SCORE
        else:
            self.__score = score
        self.log.debug(f"Recorded new uptime score ({self.score})")

    def measure_sample(self, ursulas: list = None) -> None:
        """
        Measure self-availability from a sample of Ursulas or automatically from known nodes.
        Handle the possibility of unreachable or invalid remote nodes in the sample.
        """

        # TODO: Relocate?
        Unreachable = (*NodeSeemsToBeDown,
                       self._ursula.NotStaking,
                       self._ursula.node_storage.InvalidNodeCertificate,
                       self._ursula.network_middleware.UnexpectedResponse)

        if not ursulas:
            ursulas = self.sample(quantity=self.SAMPLE_SIZE)

        for ursula_or_sprout in ursulas:
            try:
                self.measure(ursula_or_sprout=ursula_or_sprout)
            except self._ursula.network_middleware.NotFound:
                # Ignore this measurement and move on because the remote node is not compatible.
                self.record(None, reason={"error": "Remote node did not support 'ping' endpoint."})
            except Unreachable as e:
                # This node is either not an Ursula, not available, does not support uptime checks, or is not staking...
                # ...do nothing and move on without changing the score.
                self.log.debug(f'{ursula_or_sprout} responded to uptime check with {e.__class__.__name__}')
                continue

    def measure(self, ursula_or_sprout: Union['Ursula', NodeSprout]) -> None:
        """Measure self-availability from a single remote node that participates uptime checks."""
        try:
            response = self._ursula.network_middleware.check_rest_availability(initiator=self._ursula, responder=ursula_or_sprout)
        except RestMiddleware.BadRequest as e:
            self.responders.add(ursula_or_sprout.checksum_address)
            self.record(False, reason=e.reason)
        else:
            # Record response
            self.responders.add(ursula_or_sprout.checksum_address)
            if response.status_code == 200:
                self.record(True)
            elif response.status_code == 400:
                self.record(False, reason={'failed': f"{ursula_or_sprout.checksum_address} reported unavailability."})
            else:
                self.record(None, reason={"error": f"{ursula_or_sprout.checksum_address} returned {response.status_code} from 'ping' endpoint."})
Example #15
0
class Felix(Character, NucypherTokenActor):
    """
    A NuCypher ERC20 faucet / Airdrop scheduler.

    Felix is a web application that gives NuCypher *testnet* tokens to registered addresses
    with a scheduled reduction of disbursement amounts, and an HTTP endpoint
    for handling new address registration.

    The main goal of Felix is to provide a source of testnet tokens for
    research and the development of production-ready nucypher dApps.
    """

    _default_crypto_powerups = [SigningPower]

    # Intervals
    DISTRIBUTION_INTERVAL = 60  # seconds
    DISBURSEMENT_INTERVAL = 24 * 365  # only distribute tokens to the same address once each YEAR.
    STAGING_DELAY = 10  # seconds

    # Disbursement
    BATCH_SIZE = 10  # transactions
    MULTIPLIER = Decimal(
        '0.9')  # 10% reduction of previous disbursement is 0.9
    # this is not relevant until the year of time declared above, passes.
    MINIMUM_DISBURSEMENT = int(1e18)  # NuNits (1 NU)
    ETHER_AIRDROP_AMOUNT = int(1e17)  # Wei (.1 ether)
    MAX_INDIVIDUAL_REGISTRATIONS = 3  # Registration Limit

    # Node Discovery
    LEARNING_TIMEOUT = 30  # seconds
    _SHORT_LEARNING_DELAY = 60  # seconds
    _LONG_LEARNING_DELAY = 120  # seconds
    _ROUNDS_WITHOUT_NODES_AFTER_WHICH_TO_SLOW_DOWN = 1

    # Twisted
    _CLOCK = reactor
    _AIRDROP_QUEUE = dict()

    class NoDatabase(RuntimeError):
        pass

    def __init__(self,
                 db_filepath: str,
                 rest_host: str,
                 rest_port: int,
                 client_password: str = None,
                 crash_on_error: bool = False,
                 distribute_ether: bool = True,
                 registry: BaseContractRegistry = None,
                 *args,
                 **kwargs):

        # Character
        super().__init__(registry=registry, *args, **kwargs)
        self.log = Logger(f"felix-{self.checksum_address[-6::]}")

        # Network
        self.rest_port = rest_port
        self.rest_host = rest_host
        self.rest_app = NOT_RUNNING
        self.crash_on_error = crash_on_error

        # Database
        self.db_filepath = db_filepath
        self.db = NO_DATABASE_AVAILABLE
        self.db_engine = create_engine(f'sqlite:///{self.db_filepath}',
                                       convert_unicode=True)

        # Blockchain
        transacting_power = TransactingPower(password=client_password,
                                             account=self.checksum_address,
                                             cache=True)
        self._crypto_power.consume_power_up(transacting_power)

        self.token_agent = ContractAgency.get_agent(NucypherTokenAgent,
                                                    registry=registry)
        self.blockchain = self.token_agent.blockchain
        self.reserved_addresses = [self.checksum_address, NULL_ADDRESS]

        # Update reserved addresses with deployed contracts
        existing_entries = list(registry.enrolled_addresses)
        self.reserved_addresses.extend(existing_entries)

        # Distribution
        self.__distributed = 0  # Track NU Output
        self.__airdrop = 0  # Track Batch
        self.__disbursement = 0  # Track Quantity
        self._distribution_task = LoopingCall(f=self.airdrop_tokens)
        self._distribution_task.clock = self._CLOCK
        self.start_time = NOT_RUNNING

        self.economics = EconomicsFactory.get_economics(registry=registry)
        self.MAXIMUM_DISBURSEMENT = self.economics.maximum_allowed_locked
        self.INITIAL_DISBURSEMENT = self.economics.minimum_allowed_locked * 3

        # Optionally send ether with each token transaction
        self.distribute_ether = distribute_ether
        # Banner
        self.log.info(FELIX_BANNER.format(self.checksum_address))

    def __repr__(self):
        class_name = self.__class__.__name__
        r = f'{class_name}(checksum_address={self.checksum_address}, db_filepath={self.db_filepath})'
        return r

    def make_web_app(self):
        from flask import request
        from flask_sqlalchemy import SQLAlchemy

        # WSGI/Flask Service
        short_name = bytes(self.stamp).hex()[:6]
        self.rest_app = Flask(f"faucet-{short_name}",
                              template_folder=TEMPLATES_DIR)
        self.rest_app.config[
            'SQLALCHEMY_DATABASE_URI'] = f'sqlite:///{self.db_filepath}'
        self.rest_app.config['MAX_CONTENT_LENGTH'] = MAX_UPLOAD_CONTENT_LENGTH

        try:
            self.rest_app.secret_key = sha256(
                os.environ['NUCYPHER_FELIX_DB_SECRET'].encode())  # uses envvar
        except KeyError:
            raise OSError(
                "The 'NUCYPHER_FELIX_DB_SECRET' is not set.  Export your application secret and try again."
            )

        # Database
        self.db = SQLAlchemy(self.rest_app)

        # Database Tables
        class Recipient(self.db.Model):
            """
            The one and only table in Felix's database; Used to track recipients and airdrop metadata.
            """

            __tablename__ = 'recipient'

            id = self.db.Column(self.db.Integer, primary_key=True)
            address = self.db.Column(self.db.String, nullable=False)
            joined = self.db.Column(self.db.DateTime,
                                    nullable=False,
                                    default=datetime.utcnow)
            total_received = self.db.Column(self.db.String,
                                            default='0',
                                            nullable=False)
            last_disbursement_amount = self.db.Column(self.db.String,
                                                      nullable=False,
                                                      default=0)
            last_disbursement_time = self.db.Column(self.db.DateTime,
                                                    nullable=True,
                                                    default=None)
            is_staking = self.db.Column(self.db.Boolean,
                                        nullable=False,
                                        default=False)

            def __repr__(self):
                return f'{self.__class__.__name__}(id={self.id})'

        self.Recipient = Recipient  # Bind to outer class

        # Flask decorators
        rest_app = self.rest_app

        #
        # REST Routes
        #
        @rest_app.route("/status", methods=['GET'])
        def status():
            with ThreadedSession(self.db_engine) as session:
                total_recipients = session.query(self.Recipient).count()
                last_recipient = session.query(self.Recipient).filter(
                    self.Recipient.last_disbursement_time.isnot(
                        None)).order_by('last_disbursement_time').first()

                last_address = last_recipient.address if last_recipient else None
                last_transaction_date = last_recipient.last_disbursement_time.isoformat(
                ) if last_recipient else None

                unfunded = session.query(self.Recipient).filter(
                    self.Recipient.last_disbursement_time.is_(None)).count()

                return json.dumps({
                    "total_recipients": total_recipients,
                    "latest_recipient": last_address,
                    "latest_disburse_date": last_transaction_date,
                    "unfunded_recipients": unfunded,
                    "state": {
                        "eth": str(self.eth_balance),
                        "NU": str(self.token_balance),
                        "address": self.checksum_address,
                        "contract_address": self.token_agent.contract_address,
                    }
                })

        @rest_app.route("/register", methods=['POST'])
        def register():
            """Handle new recipient registration via POST request."""

            new_address = (request.form.get('address')
                           or request.get_json().get('address'))

            if not new_address:
                return Response(response="no address was supplied", status=411)

            if not eth_utils.is_address(new_address):
                return Response(
                    response=
                    "an invalid ethereum address was supplied.  please ensure the address is a proper checksum.",
                    status=400)
            else:
                new_address = eth_utils.to_checksum_address(new_address)

            if new_address in self.reserved_addresses:
                return Response(
                    response=
                    "sorry, that address is reserved and cannot receive funds.",
                    status=403)

            try:
                with ThreadedSession(self.db_engine) as session:

                    existing = Recipient.query.filter_by(
                        address=new_address).all()
                    if len(existing) > self.MAX_INDIVIDUAL_REGISTRATIONS:
                        # Address already exists; Abort
                        self.log.debug(
                            f"{new_address} is already enrolled {self.MAX_INDIVIDUAL_REGISTRATIONS} times."
                        )
                        return Response(
                            response=
                            f"{new_address} requested too many times  -  Please use another address.",
                            status=409)

                    # Create the record
                    recipient = Recipient(address=new_address,
                                          joined=datetime.now())
                    session.add(recipient)
                    session.commit()

            except Exception as e:
                # Pass along exceptions to the logger
                self.log.critical(str(e))
                raise

            else:
                return Response(status=200)  # TODO

        return rest_app

    def create_tables(self) -> None:
        self.make_web_app()
        return self.db.create_all(app=self.rest_app)

    def start(self,
              host: str,
              port: int,
              web_services: bool = True,
              distribution: bool = True,
              crash_on_error: bool = False):

        self.crash_on_error = crash_on_error

        if self.start_time is not NOT_RUNNING:
            raise RuntimeError("Felix is already running.")

        self.start_time = maya.now()
        payload = {"wsgi": self.rest_app, "http_port": port}
        deployer = HendrixDeploy(action="start", options=payload)

        if distribution is True:
            self.start_distribution()

        if web_services is True:
            deployer.run()  # <-- Blocking call (Reactor)

    def start_distribution(self, now: bool = True) -> bool:
        """Start token distribution"""
        self.log.info(NU_BANNER)
        self.log.info("Starting NU Token Distribution | START")
        if self.token_balance == NU.ZERO():
            raise self.ActorError(
                f"Felix address {self.checksum_address} has 0 NU tokens.")
        self._distribution_task.start(interval=self.DISTRIBUTION_INTERVAL,
                                      now=now)
        return True

    def stop_distribution(self) -> bool:
        """Start token distribution"""
        self.log.info("Stopping NU Token Distribution | STOP")
        self._distribution_task.stop()
        return True

    def __calculate_disbursement(self, recipient) -> int:
        """Calculate the next reward for a recipient once the are selected for distribution"""

        # Initial Reward - sets the future rates
        if recipient.last_disbursement_time is None:
            amount = self.INITIAL_DISBURSEMENT

        # Cap reached, We'll continue to leak the minimum disbursement
        elif int(recipient.total_received) >= self.MAXIMUM_DISBURSEMENT:
            amount = self.MINIMUM_DISBURSEMENT

        # Calculate the next disbursement
        else:
            amount = math.ceil(
                int(recipient.last_disbursement_amount) * self.MULTIPLIER)
            if amount < self.MINIMUM_DISBURSEMENT:
                amount = self.MINIMUM_DISBURSEMENT

        return int(amount)

    def __transfer(self, disbursement: int, recipient_address: str) -> str:
        """Perform a single token transfer transaction from one account to another."""

        # Re-unlock from cache
        self.blockchain.transacting_power.activate()

        self.__disbursement += 1
        receipt = self.token_agent.transfer(
            amount=disbursement,
            target_address=recipient_address,
            sender_address=self.checksum_address)
        txhash = receipt['transactionHash']
        if self.distribute_ether:
            ether = self.ETHER_AIRDROP_AMOUNT
            transaction = {
                'to': recipient_address,
                'from': self.checksum_address,
                'value': ether,
                'gasPrice': self.blockchain.client.gas_price
            }
            ether_txhash = self.blockchain.client.send_transaction(transaction)

            self.log.info(
                f"Disbursement #{self.__disbursement} OK | NU {txhash.hex()[-6:]} | ETH {ether_txhash.hex()[:-6]} "
                f"({str(NU(disbursement, 'NuNit'))} + {self.ETHER_AIRDROP_AMOUNT} wei) -> {recipient_address}"
            )

        else:
            self.log.info(
                f"Disbursement #{self.__disbursement} OK | {txhash.hex()[-6:]} |"
                f"({str(NU(disbursement, 'NuNit'))} -> {recipient_address}")

        return txhash

    def airdrop_tokens(self):
        """
        Calculate airdrop eligibility via faucet registration
        and transfer tokens to selected recipients.
        """

        with ThreadedSession(self.db_engine) as session:
            population = session.query(self.Recipient).count()

        message = f"{population} registered faucet recipients; " \
                  f"Distributed {str(NU(self.__distributed, 'NuNit'))} since {self.start_time.slang_time()}."
        self.log.debug(message)
        if population == 0:
            return  # Abort - no recipients are registered.

        # For filtration
        since = datetime.now() - timedelta(hours=self.DISBURSEMENT_INTERVAL)

        datetime_filter = or_(self.Recipient.last_disbursement_time <= since,
                              self.Recipient.last_disbursement_time ==
                              None)  # This must be `==` not `is`

        with ThreadedSession(self.db_engine) as session:
            candidates = session.query(
                self.Recipient).filter(datetime_filter).all()
            if not candidates:
                self.log.info("No eligible recipients this round.")
                return

        # Discard invalid addresses, in-depth
        invalid_addresses = list()

        def siphon_invalid_entries(candidate):
            address_is_valid = eth_utils.is_checksum_address(candidate.address)
            if not address_is_valid:
                invalid_addresses.append(candidate.address)
            return address_is_valid

        candidates = list(filter(siphon_invalid_entries, candidates))

        if invalid_addresses:
            self.log.info(
                f"{len(invalid_addresses)} invalid entries detected. Pruning database."
            )

            # TODO: Is this needed? - Invalid entries are rejected at the endpoint view.
            # Prune database of invalid records
            # with ThreadedSession(self.db_engine) as session:
            #     bad_eggs = session.query(self.Recipient).filter(self.Recipient.address in invalid_addresses).all()
            #     for egg in bad_eggs:
            #         session.delete(egg.id)
            #     session.commit()

        if not candidates:
            self.log.info("No eligible recipients this round.")
            return

        d = threads.deferToThread(self.__do_airdrop, candidates=candidates)
        self._AIRDROP_QUEUE[self.__airdrop] = d
        return d

    def __do_airdrop(self, candidates: list):

        self.log.info(f"Staging Airdrop #{self.__airdrop}.")

        # Staging
        staged_disbursements = [(r, self.__calculate_disbursement(recipient=r))
                                for r in candidates]
        batches = list(
            staged_disbursements[index:index + self.BATCH_SIZE]
            for index in range(0, len(staged_disbursements), self.BATCH_SIZE))
        total_batches = len(batches)

        self.log.info("====== Staged Airdrop ======")
        for recipient, disbursement in staged_disbursements:
            self.log.info(f"{recipient.address} ... {str(disbursement)[:-18]}")
        self.log.info("==========================")

        # Staging Delay
        self.log.info(
            f"Airdrop will commence in {self.STAGING_DELAY} seconds...")
        if self.STAGING_DELAY > 3:
            time.sleep(self.STAGING_DELAY - 3)
        for i in range(3):
            time.sleep(1)
            self.log.info(f"NU Token airdrop starting in {3 - i} seconds...")

        # Slowly, in series...
        for batch, staged_disbursement in enumerate(batches, start=1):
            self.log.info(f"======= Batch #{batch} ========")

            for recipient, disbursement in staged_disbursement:

                # Perform the transfer... leaky faucet.
                self.__transfer(disbursement=disbursement,
                                recipient_address=recipient.address)
                self.__distributed += disbursement

                # Update the database record
                recipient.last_disbursement_amount = str(disbursement)
                recipient.total_received = str(
                    int(recipient.total_received) + disbursement)
                recipient.last_disbursement_time = datetime.now()

                self.db.session.add(recipient)
                self.db.session.commit()

            # end inner loop
            self.log.info(
                f"Completed Airdrop #{self.__airdrop} Batch #{batch} of {total_batches}."
            )

        # end outer loop
        now = maya.now()
        next_interval_slang = now.add(
            seconds=self.DISTRIBUTION_INTERVAL).slang_time()
        self.log.info(
            f"Completed Airdrop #{self.__airdrop}; Next airdrop is {next_interval_slang}."
        )

        del self._AIRDROP_QUEUE[self.__airdrop]
        self.__airdrop += 1
Example #16
0
class Policy(ABC):
    """
    An edict by Alice, arranged with n Ursulas, to perform re-encryption for a specific Bob
    for a specific path.

    Once Alice is ready to enact a Policy, she generates KFrags, which become part of the Policy.

    Each Ursula is offered a Arrangement (see above) for a given Policy by Alice.

    Once Alice has secured agreement with n Ursulas to enact a Policy, she sends each a KFrag,
    and generates a TreasureMap for the Policy, recording which Ursulas got a KFrag.
    """

    POLICY_ID_LENGTH = 16
    _arrangement_class = NotImplemented

    log = Logger("Policy")

    class Rejected(RuntimeError):
        """Too many Ursulas rejected"""

    def __init__(self,
                 alice: Alice,
                 label: bytes,
                 expiration: maya.MayaDT,
                 bob: 'Bob' = None,
                 kfrags: Tuple[KFrag, ...] = (UNKNOWN_KFRAG, ),
                 public_key=None,
                 m: int = None,
                 alice_signature=NOT_SIGNED) -> None:
        """
        :param kfrags:  A list of KFrags to distribute per this Policy.
        :param label: The identity of the resource to which Bob is granted access.
        """
        self.alice = alice
        self.label = label
        self.bob = bob
        self.kfrags = kfrags
        self.public_key = public_key
        self._id = construct_policy_id(self.label, bytes(self.bob.stamp))
        self.treasure_map = self._treasure_map_class(m=m)
        self.expiration = expiration

        self._accepted_arrangements = set()  # type: Set[Arrangement]
        self._rejected_arrangements = set()  # type: Set[Arrangement]
        self._spare_candidates = set()  # type: Set[Ursula]

        self._enacted_arrangements = OrderedDict()
        self._published_arrangements = OrderedDict()

        self.alice_signature = alice_signature  # TODO: This is unused / To Be Implemented?

        self.publishing_mutex = None

    class MoreKFragsThanArrangements(TypeError):
        """
        Raised when a Policy has been used to generate Arrangements with Ursulas insufficient number
        such that we don't have enough KFrags to give to each Ursula.
        """

    @property
    def n(self) -> int:
        return len(self.kfrags)

    @property
    def id(self) -> bytes:
        return self._id

    def __repr__(self):
        return f"{self.__class__.__name__}:{self.id.hex()[:6]}"

    @property
    def accepted_ursulas(self) -> Set[Ursula]:
        return {
            arrangement.ursula
            for arrangement in self._accepted_arrangements
        }

    def hrac(self) -> bytes:
        """
        # TODO: #180 - This function is hanging on for dear life.  After 180 is closed, it can be completely deprecated.

        The "hashed resource authentication code".

        A hash of:
        * Alice's public key
        * Bob's public key
        * the label

        Alice and Bob have all the information they need to construct this.
        Ursula does not, so we share it with her.
        """
        return keccak_digest(
            bytes(self.alice.stamp) + bytes(self.bob.stamp) +
            self.label)[:HRAC_LENGTH]

    async def put_treasure_map_on_node(self, node, network_middleware):
        response = network_middleware.put_treasure_map_on_node(
            node=node, map_payload=bytes(self.treasure_map))
        return response

    def publish_treasure_map(
            self,
            network_middleware: RestMiddleware,
            blockchain_signer: Callable = None) -> NodeEngagementMutex:
        self.treasure_map.prepare_for_publication(
            self.bob.public_keys(DecryptingPower),
            self.bob.public_keys(SigningPower), self.alice.stamp, self.label)
        if blockchain_signer is not None:
            self.treasure_map.include_blockchain_signature(blockchain_signer)

        self.alice.block_until_number_of_known_nodes_is(
            8, timeout=2, learn_on_this_thread=True)

        target_nodes = self.bob.matching_nodes_among(self.alice.known_nodes)
        self.publishing_mutex = NodeEngagementMutex(
            callable_to_engage=self.put_treasure_map_on_node,
            nodes=target_nodes,
            network_middleware=network_middleware)

        self.publishing_mutex.start()

    def credential(self, with_treasure_map=True):
        """
        Creates a PolicyCredential for portable access to the policy via
        Alice or Bob. By default, it will include the treasure_map for the
        policy unless `with_treasure_map` is False.
        """

        treasure_map = self.treasure_map
        if not with_treasure_map:
            treasure_map = None
        credential = PolicyCredential(alice_verifying_key=self.alice.stamp,
                                      label=self.label,
                                      expiration=self.expiration,
                                      policy_pubkey=self.public_key,
                                      treasure_map=treasure_map)
        return credential

    def __assign_kfrags(self) -> Generator[Arrangement, None, None]:

        if len(self._accepted_arrangements) < self.n:
            raise self.MoreKFragsThanArrangements(
                "Not enough candidate arrangements. "
                "Call make_arrangements to make more.")

        for kfrag in self.kfrags:
            for arrangement in self._accepted_arrangements:
                if not arrangement in self._enacted_arrangements.values():
                    arrangement.kfrag = kfrag
                    self._enacted_arrangements[kfrag] = arrangement
                    yield arrangement
                    break  # This KFrag is now assigned; break the inner loop and go back to assign other kfrags.
            else:
                # We didn't assign that KFrag.  Trouble.
                # This is ideally an impossible situation, because we don't typically
                # enter this method unless we've already had n or more Arrangements accepted.
                raise self.MoreKFragsThanArrangements(
                    "Not enough accepted arrangements to assign all KFrags.")
        return

    def enact(self, network_middleware, publish_treasure_map=True) -> dict:
        """
        Assign kfrags to ursulas_on_network, and distribute them via REST,
        populating enacted_arrangements
        """
        for arrangement in self.__assign_kfrags():
            arrangement_message_kit = arrangement.encrypt_payload_for_ursula()

            try:
                # TODO: Concurrency
                response = network_middleware.enact_policy(
                    arrangement.ursula, arrangement.id,
                    arrangement_message_kit.to_bytes())
            except network_middleware.UnexpectedResponse as e:
                arrangement.status = e.status
            else:
                arrangement.status = response.status_code

            # TODO: Handle problem here - if the arrangement is bad, deal with it.
            self.treasure_map.add_arrangement(arrangement)

        else:
            # OK, let's check: if two or more Ursulas claimed we didn't pay,
            # we need to re-evaulate our situation here.
            arrangement_statuses = [
                a.status for a in self._accepted_arrangements
            ]
            number_of_claims_of_freeloading = sum(
                status == 402 for status in arrangement_statuses)

            if number_of_claims_of_freeloading > 2:
                raise self.alice.NotEnoughNodes  # TODO: Clean this up and enable re-tries.

            self.treasure_map.check_for_sufficient_destinations()

            # TODO: Leave a note to try any failures later.
            pass

            # ...After *all* the arrangements are enacted
            # Create Alice's revocation kit
            self.revocation_kit = RevocationKit(self, self.alice.stamp)
            self.alice.add_active_policy(self)

            if publish_treasure_map is True:
                return self.publish_treasure_map(
                    network_middleware=network_middleware
                )  # TODO: blockchain_signer?

    def propose_arrangement(self, network_middleware, arrangement) -> bool:
        negotiation_response = network_middleware.propose_arrangement(
            arrangement=arrangement)  # Wow, we aren't even passing node here.

        # TODO: check out the response: need to assess the result and see if we're actually good to go.
        arrangement_is_accepted = negotiation_response.status_code == 200

        bucket = self._accepted_arrangements if arrangement_is_accepted else self._rejected_arrangements
        bucket.add(arrangement)

        return arrangement_is_accepted

    def make_arrangements(
        self,
        network_middleware: RestMiddleware,
        handpicked_ursulas: Optional[Set[Ursula]] = None,
        discover_on_this_thread: bool = True,
        *args,
        **kwargs,
    ) -> None:

        sampled_ursulas = self.sample(
            handpicked_ursulas=handpicked_ursulas,
            discover_on_this_thread=discover_on_this_thread)

        if len(sampled_ursulas) < self.n:
            raise self.MoreKFragsThanArrangements(
                "To make a Policy in federated mode, you need to designate *all* '  \
                 the Ursulas you need (in this case, {}); there's no other way to ' \
                 know which nodes to use.  Either pass them here or when you make ' \
                 the Policy.".format(self.n))

        # TODO: One of these layers needs to add concurrency.
        self._propose_arrangements(network_middleware=network_middleware,
                                   candidate_ursulas=sampled_ursulas,
                                   *args,
                                   **kwargs)

        if len(self._accepted_arrangements) < self.n:
            formatted_offenders = '\n'.join(
                f'{u.checksum_address}@{u.rest_url()}'
                for u in sampled_ursulas)
            raise self.Rejected(
                f'Selected Ursulas rejected too many arrangements'
                f'- only {len(self._accepted_arrangements)} of {self.n} accepted.\n'
                f'Offending nodes: \n{formatted_offenders}\n')

    @abstractmethod
    def make_arrangement(self, ursula: Ursula, *args, **kwargs):
        raise NotImplementedError

    @abstractmethod
    def sample_essential(self, *args, **kwargs) -> Set[Ursula]:
        raise NotImplementedError

    def sample(
        self,
        handpicked_ursulas: Optional[Set[Ursula]] = None,
        discover_on_this_thread: bool = False,
    ) -> Set[Ursula]:
        selected_ursulas = set(
            handpicked_ursulas) if handpicked_ursulas else set()

        # Calculate the target sample quantity
        if self.n - len(selected_ursulas) > 0:
            sampled_ursulas = self.sample_essential(
                handpicked_ursulas=selected_ursulas,
                discover_on_this_thread=discover_on_this_thread)
            selected_ursulas.update(sampled_ursulas)

        return selected_ursulas

    def _propose_arrangements(self,
                              network_middleware: RestMiddleware,
                              candidate_ursulas: Set[Ursula],
                              consider_everyone: bool = False,
                              *args,
                              **kwargs) -> None:

        for index, selected_ursula in enumerate(candidate_ursulas):
            arrangement = self.make_arrangement(ursula=selected_ursula,
                                                *args,
                                                **kwargs)
            try:
                is_accepted = self.propose_arrangement(
                    arrangement=arrangement,
                    network_middleware=network_middleware)

            except NodeSeemsToBeDown as e:  # TODO: #355 Also catch InvalidNode here?
                # This arrangement won't be added to the accepted bucket.
                # If too many nodes are down, it will fail in make_arrangements.
                # Also TODO: Prolly log this or something at this stage.
                continue

            else:
                # Bucket the arrangements
                if is_accepted:
                    self.log.debug(
                        f"Arrangement accepted by {selected_ursula}")
                    self._accepted_arrangements.add(arrangement)
                    accepted = len(self._accepted_arrangements)
                    if accepted == self.n and not consider_everyone:
                        try:
                            spares = set(list(candidate_ursulas)[index + 1::])
                            self._spare_candidates.update(spares)
                        except IndexError:
                            self._spare_candidates = set()
                        break
                else:
                    self.log.debug(
                        f"Arrangement failed with {selected_ursula}")
                    self._rejected_arrangements.add(arrangement)
Example #17
0
from nucypher.characters.control.emitters import JSONRPCStdoutEmitter
from nucypher.characters.lawful import Ursula
from nucypher.cli import utils
from nucypher.cli.literature import SUCCESSFUL_DESTRUCTION
from nucypher.cli.main import nucypher_cli
from nucypher.config.characters import BobConfiguration
from nucypher.config.constants import TEMPORARY_DOMAIN
from nucypher.crypto.kits import UmbralMessageKit
from nucypher.crypto.powers import SigningPower
from nucypher.utilities.logging import GlobalLoggerSettings, Logger
from tests.constants import (FAKE_PASSWORD_CONFIRMED,
                             INSECURE_DEVELOPMENT_PASSWORD,
                             MOCK_CUSTOM_INSTALLATION_PATH)

log = Logger()


@mock.patch('nucypher.config.characters.BobConfiguration.default_filepath',
            return_value='/non/existent/file')
def test_missing_configuration_file(default_filepath_mock, click_runner):
    cmd_args = ('bob', 'run')
    result = click_runner.invoke(nucypher_cli,
                                 cmd_args,
                                 catch_exceptions=False)
    assert result.exit_code != 0
    assert default_filepath_mock.called
    assert "nucypher bob init" in result.output


def test_bob_public_keys(click_runner):
Example #18
0
class FleetSensor:
    """
    A representation of a fleet of NuCypher nodes.
    """
    _checksum = NO_KNOWN_NODES.bool_value(False)
    _nickname = NO_KNOWN_NODES
    _tracking = False
    most_recent_node_change = NO_KNOWN_NODES
    snapshot_splitter = BytestringSplitter(32, 4)
    log = Logger("Learning")
    FleetState = namedtuple(
        "FleetState", ("nickname", "icon", "nodes", "updated", "checksum"))

    def __init__(self, domain: str):
        self.domain = domain
        self.additional_nodes_to_track = []
        self.updated = maya.now()
        self._nodes = OrderedDict()
        self._marked = defaultdict(list)  # Beginning of bucketing.
        self.states = OrderedDict()

    def __setitem__(self, checksum_address, node_or_sprout):
        if node_or_sprout.domain == self.domain:
            self._nodes[checksum_address] = node_or_sprout

            if self._tracking:
                self.log.info(
                    "Updating fleet state after saving node {}".format(
                        node_or_sprout))
                self.record_fleet_state()
        else:
            msg = f"Rejected node {node_or_sprout} because its domain is '{node_or_sprout.domain}' but we're only tracking '{self.domain}'"
            self.log.warn(msg)

    def __getitem__(self, checksum_address):
        return self._nodes[checksum_address]

    def __bool__(self):
        return bool(self._nodes)

    def __contains__(self, item):
        return item in self._nodes.keys() or item in self._nodes.values()

    def __iter__(self):
        yield from self._nodes.values()

    def __len__(self):
        return len(self._nodes)

    def __eq__(self, other):
        return self._nodes == other._nodes

    def __repr__(self):
        return self._nodes.__repr__()

    def population(self):
        return len(self) + len(self.additional_nodes_to_track)

    @property
    def checksum(self):
        return self._checksum

    @checksum.setter
    def checksum(self, checksum_value):
        self._checksum = checksum_value
        self._nickname = Nickname.from_seed(checksum_value, length=1)

    @property
    def nickname(self):
        return self._nickname

    @property
    def icon(self) -> str:
        if self.nickname is NO_KNOWN_NODES:
            return str(NO_KNOWN_NODES)
        return self.nickname.icon

    def addresses(self):
        return self._nodes.keys()

    def snapshot(self):
        fleet_state_checksum_bytes = binascii.unhexlify(self.checksum)
        fleet_state_updated_bytes = self.updated.epoch.to_bytes(
            4, byteorder="big")
        return fleet_state_checksum_bytes + fleet_state_updated_bytes

    def record_fleet_state(self, additional_nodes_to_track=None):
        if additional_nodes_to_track:
            self.additional_nodes_to_track.extend(additional_nodes_to_track)

        if not self._nodes:
            # No news here.
            return
        sorted_nodes = self.sorted()

        sorted_nodes_joined = b"".join(bytes(n) for n in sorted_nodes)
        checksum = keccak_digest(sorted_nodes_joined).hex()
        if checksum not in self.states:
            self.checksum = keccak_digest(b"".join(
                bytes(n) for n in self.sorted())).hex()
            self.updated = maya.now()
            # For now we store the sorted node list.  Someday we probably spin this out into
            # its own class, FleetState, and use it as the basis for partial updates.
            new_state = self.FleetState(nickname=self.nickname,
                                        nodes=sorted_nodes,
                                        icon=self.icon,
                                        updated=self.updated,
                                        checksum=self.checksum)
            self.states[checksum] = new_state
            return checksum, new_state

    def start_tracking_state(self, additional_nodes_to_track=None):
        if additional_nodes_to_track is None:
            additional_nodes_to_track = list()
        self.additional_nodes_to_track.extend(additional_nodes_to_track)
        self._tracking = True
        self.update_fleet_state()

    def sorted(self):
        nodes_to_consider = list(
            self._nodes.values()) + self.additional_nodes_to_track
        return sorted(nodes_to_consider, key=lambda n: n.checksum_address)

    def shuffled(self):
        nodes_we_know_about = list(self._nodes.values())
        random.shuffle(nodes_we_know_about)
        return nodes_we_know_about

    def abridged_states_dict(self):
        abridged_states = {}
        for k, v in self.states.items():
            abridged_states[k] = self.abridged_state_details(v)
        return abridged_states

    @staticmethod
    def abridged_state_details(state):
        return {
            "nickname": str(state.nickname),
            # FIXME: generalize in case we want to extend the number of symbols in the state nickname
            "symbol": state.nickname.characters[0].symbol,
            "color_hex": state.nickname.characters[0].color_hex,
            "color_name": state.nickname.characters[0].color_name,
            "updated": state.updated.rfc2822(),
        }

    def mark_as(self, label: Exception, node: "Teacher"):
        self._marked[label].append(node)

        if self._nodes.get(node):
            del self._nodes[node]
Example #19
0
    def __init__(
            self,

            # Base
            emitter=None,
            config_root: str = None,
            filepath: str = None,

            # Mode
            dev_mode: bool = False,
            federated_only: bool = False,

            # Identity
            checksum_address: str = None,
            crypto_power: CryptoPower = None,

            # Keyring
            keyring: NucypherKeyring = None,
            keyring_root: str = None,

            # Learner
            learn_on_same_thread: bool = False,
            abort_on_learning_error: bool = False,
            start_learning_now: bool = True,

            # Network
            controller_port: int = None,
            domain: str = DEFAULT_DOMAIN,
            interface_signature: Signature = None,
            network_middleware: RestMiddleware = None,
            lonely: bool = False,

            # Node Storage
            known_nodes: set = None,
            node_storage: NodeStorage = None,
            reload_metadata: bool = True,
            save_metadata: bool = True,

            # Blockchain
            poa: bool = None,
            light: bool = False,
            sync: bool = False,
            provider_uri: str = None,
            provider_process=None,
            gas_strategy: Union[Callable, str] = DEFAULT_GAS_STRATEGY,
            signer_uri: str = None,

            # Registry
            registry: BaseContractRegistry = None,
            registry_filepath: str = None,

            # Deployed Workers
            worker_data: dict = None):

        self.log = Logger(self.__class__.__name__)
        UNINITIALIZED_CONFIGURATION.bool_value(False)

        # Identity
        # NOTE: NodeConfigurations can only be used with Self-Characters
        self.is_me = True
        self.checksum_address = checksum_address

        # Keyring
        self.crypto_power = crypto_power
        self.keyring = keyring or NO_KEYRING_ATTACHED
        self.keyring_root = keyring_root or UNINITIALIZED_CONFIGURATION

        # Contract Registry
        if registry and registry_filepath:
            if registry.filepath != registry_filepath:
                error = f"Inconsistent registry filepaths for '{registry.filepath}' and '{registry_filepath}'."
                raise ValueError(error)
            else:
                self.log.warn(
                    f"Registry and registry filepath were both passed.")
        self.registry = registry or NO_BLOCKCHAIN_CONNECTION.bool_value(False)
        self.registry_filepath = registry_filepath or UNINITIALIZED_CONFIGURATION

        # Blockchain
        self.poa = poa
        self.is_light = light
        self.provider_uri = provider_uri or NO_BLOCKCHAIN_CONNECTION
        self.provider_process = provider_process or NO_BLOCKCHAIN_CONNECTION
        self.signer_uri = signer_uri or None

        # Learner
        self.federated_only = federated_only
        self.domain = domain
        self.learn_on_same_thread = learn_on_same_thread
        self.abort_on_learning_error = abort_on_learning_error
        self.start_learning_now = start_learning_now
        self.save_metadata = save_metadata
        self.reload_metadata = reload_metadata
        self.known_nodes = known_nodes or set()  # handpicked
        self.lonely = lonely

        # Configuration
        self.__dev_mode = dev_mode
        self.config_file_location = filepath or UNINITIALIZED_CONFIGURATION
        self.config_root = UNINITIALIZED_CONFIGURATION

        # Deployed Workers
        self.worker_data = worker_data

        #
        # Federated vs. Blockchain arguments consistency
        #

        #
        # Federated
        #

        if self.federated_only:
            # Check for incompatible values
            blockchain_args = {
                'filepath': registry_filepath,
                'poa': poa,
                'provider_process': provider_process,
                'provider_uri': provider_uri,
                'gas_strategy': gas_strategy
            }
            if any(blockchain_args.values()):
                bad_args = ", ".join(f"{arg}={val}"
                                     for arg, val in blockchain_args.items()
                                     if val)
                self.log.warn(
                    f"Arguments {bad_args} are incompatible with federated_only. "
                    f"Overridden with a sane default.")

                # Clear decentralized attributes to ensure consistency with a
                # federated configuration.
                self.poa = False
                self.is_light = False
                self.provider_uri = None
                self.provider_process = None
                self.registry_filepath = None
                self.gas_strategy = None

        #
        # Decentralized
        #

        else:
            self.gas_strategy = gas_strategy
            is_initialized = BlockchainInterfaceFactory.is_interface_initialized(
                provider_uri=self.provider_uri)
            if not is_initialized and provider_uri:
                BlockchainInterfaceFactory.initialize_interface(
                    provider_uri=self.provider_uri,
                    poa=self.poa,
                    light=self.is_light,
                    provider_process=self.provider_process,
                    sync=sync,
                    emitter=emitter,
                    gas_strategy=gas_strategy)
            else:
                self.log.warn(
                    f"Using existing blockchain interface connection ({self.provider_uri})."
                )

            if not self.registry:
                # TODO: These two code blocks are untested.
                if not self.registry_filepath:  # TODO: Registry URI  (goerli://speedynet.json) :-)
                    self.log.info(f"Fetching latest registry from source.")
                    self.registry = InMemoryContractRegistry.from_latest_publication(
                        network=self.domain)
                else:
                    self.registry = LocalContractRegistry(
                        filepath=self.registry_filepath)
                    self.log.info(f"Using local registry ({self.registry}).")

        if dev_mode:
            self.__temp_dir = UNINITIALIZED_CONFIGURATION
            self.__setup_node_storage()
            self.initialize(password=DEVELOPMENT_CONFIGURATION)
        else:
            self.__temp_dir = LIVE_CONFIGURATION
            self.config_root = config_root or self.DEFAULT_CONFIG_ROOT
            self._cache_runtime_filepaths()
            self.__setup_node_storage(node_storage=node_storage)

        # Network
        self.controller_port = controller_port or self.DEFAULT_CONTROLLER_PORT
        self.network_middleware = network_middleware or self.DEFAULT_NETWORK_MIDDLEWARE(
            registry=self.registry)
        self.interface_signature = interface_signature

        super().__init__(filepath=self.config_file_location,
                         config_root=self.config_root)
Example #20
0
class TesterBlockchain(BlockchainDeployerInterface):
    """
    Blockchain subclass with additional test utility methods and options.
    """

    __test__ = False  # prohibit pytest from collecting this object as a test

    # Solidity
    SOURCES: List[SourceBundle] = [
        SourceBundle(base_path=SOLIDITY_SOURCE_ROOT,
                     other_paths=(TEST_SOLIDITY_SOURCE_ROOT, ))
    ]

    # Web3
    GAS_STRATEGIES = {
        **BlockchainDeployerInterface.GAS_STRATEGIES, 'free':
        free_gas_price_strategy
    }
    PROVIDER_URI = PYEVM_DEV_URI
    DEFAULT_GAS_STRATEGY = 'free'

    # Reserved addresses
    _ETHERBASE = 0
    _ALICE = 1
    _BOB = 2
    _FIRST_STAKER = 5
    _FIRST_URSULA = _FIRST_STAKER + NUMBER_OF_STAKERS_IN_BLOCKCHAIN_TESTS

    # Internal
    __STAKERS_RANGE = range(NUMBER_OF_STAKERS_IN_BLOCKCHAIN_TESTS)
    __WORKERS_RANGE = range(NUMBER_OF_URSULAS_IN_BLOCKCHAIN_TESTS)
    __ACCOUNT_CACHE = list()

    # Defaults
    DEFAULT_ECONOMICS = StandardTokenEconomics()

    def __init__(self,
                 test_accounts: int = NUMBER_OF_ETH_TEST_ACCOUNTS,
                 poa: bool = True,
                 light: bool = False,
                 eth_airdrop: bool = False,
                 free_transactions: bool = False,
                 *args,
                 **kwargs):

        self.free_transactions = free_transactions

        EXPECTED_CONFIRMATION_TIME_IN_SECONDS[
            'free'] = 5  # Just some upper-limit

        super().__init__(provider_uri=self.PROVIDER_URI,
                         provider_process=None,
                         poa=poa,
                         light=light,
                         *args,
                         **kwargs)

        self.log = Logger("test-blockchain")
        self.connect()

        # Generate additional ethereum accounts for testing
        population = test_accounts
        enough_accounts = len(self.client.accounts) >= population
        if not enough_accounts:
            accounts_to_make = population - len(self.client.accounts)
            self.__generate_insecure_unlocked_accounts(
                quantity=accounts_to_make)
            assert test_accounts == len(self.w3.eth.accounts)

        if eth_airdrop is True:  # ETH for everyone!
            self.ether_airdrop(amount=DEVELOPMENT_ETH_AIRDROP_AMOUNT)

    def attach_middleware(self):
        if self.free_transactions:
            self.w3.eth.setGasPriceStrategy(free_gas_price_strategy)

    def __generate_insecure_unlocked_accounts(self,
                                              quantity: int) -> List[str]:

        #
        # Sanity Check - Only PyEVM can be used.
        #

        # Detect provider platform
        client_version = self.w3.clientVersion

        if 'Geth' in client_version:
            raise RuntimeError("WARNING: Geth providers are not implemented.")
        elif "Parity" in client_version:
            raise RuntimeError(
                "WARNING: Parity providers are not implemented.")

        addresses = list()
        for _ in range(quantity):
            address = self.provider.ethereum_tester.add_account(
                '0x' + os.urandom(32).hex())
            addresses.append(address)
            self.__ACCOUNT_CACHE.append(address)
            self.log.info('Generated new insecure account {}'.format(address))
        return addresses

    def ether_airdrop(self, amount: int) -> List[str]:
        """Airdrops ether from creator address to all other addresses!"""

        coinbase, *addresses = self.w3.eth.accounts

        tx_hashes = list()
        for address in addresses:
            tx = {'to': address, 'from': coinbase, 'value': amount}
            txhash = self.w3.eth.sendTransaction(tx)

            _receipt = self.wait_for_receipt(txhash)
            tx_hashes.append(txhash)
            eth_amount = Web3().fromWei(amount, 'ether')
            self.log.info("Airdropped {} ETH {} -> {}".format(
                eth_amount, tx['from'], tx['to']))

        return tx_hashes

    def time_travel(self,
                    hours: int = None,
                    seconds: int = None,
                    periods: int = None):
        """
        Wait the specified number of wait_hours by comparing
        block timestamps and mines a single block.
        """

        more_than_one_arg = sum(map(bool, (hours, seconds, periods))) > 1
        if more_than_one_arg:
            raise ValueError(
                "Specify hours, seconds, or periods, not a combination")

        if periods:
            duration = self.DEFAULT_ECONOMICS.seconds_per_period * periods
            base = self.DEFAULT_ECONOMICS.seconds_per_period
        elif hours:
            duration = hours * (60 * 60)
            base = 60 * 60
        elif seconds:
            duration = seconds
            base = 1
        else:
            raise ValueError("Specify either hours, seconds, or periods.")

        now = self.w3.eth.getBlock('latest').timestamp
        end_timestamp = ((now + duration) // base) * base

        self.w3.eth.web3.testing.timeTravel(timestamp=end_timestamp)
        self.w3.eth.web3.testing.mine(1)

        delta = maya.timedelta(seconds=end_timestamp - now)
        self.log.info(
            f"Time traveled {delta} "
            f"| period {epoch_to_period(epoch=end_timestamp, seconds_per_period=self.DEFAULT_ECONOMICS.seconds_per_period)} "
            f"| epoch {end_timestamp}")

    @classmethod
    def bootstrap_network(
        cls,
        economics: BaseEconomics = None
    ) -> Tuple['TesterBlockchain', 'InMemoryContractRegistry']:
        """For use with metric testing scripts"""

        registry = InMemoryContractRegistry()
        testerchain = cls()
        BlockchainInterfaceFactory.register_interface(testerchain)
        power = TransactingPower(password=INSECURE_DEVELOPMENT_PASSWORD,
                                 account=testerchain.etherbase_account)
        power.activate()
        testerchain.transacting_power = power

        origin = testerchain.client.etherbase
        deployer = ContractAdministrator(deployer_address=origin,
                                         registry=registry,
                                         economics=economics
                                         or cls.DEFAULT_ECONOMICS,
                                         staking_escrow_test_mode=True)

        _receipts = deployer.deploy_network_contracts(interactive=False)
        return testerchain, registry

    @property
    def etherbase_account(self):
        return self.client.accounts[self._ETHERBASE]

    @property
    def alice_account(self):
        return self.client.accounts[self._ALICE]

    @property
    def bob_account(self):
        return self.client.accounts[self._BOB]

    def ursula_account(self, index):
        if index not in self.__WORKERS_RANGE:
            raise ValueError(
                f"Ursula index must be lower than {NUMBER_OF_URSULAS_IN_BLOCKCHAIN_TESTS}"
            )
        return self.client.accounts[index + self._FIRST_URSULA]

    def staker_account(self, index):
        if index not in self.__STAKERS_RANGE:
            raise ValueError(
                f"Staker index must be lower than {NUMBER_OF_STAKERS_IN_BLOCKCHAIN_TESTS}"
            )
        return self.client.accounts[index + self._FIRST_STAKER]

    @property
    def ursulas_accounts(self):
        return list(self.ursula_account(i) for i in self.__WORKERS_RANGE)

    @property
    def stakers_accounts(self):
        return list(self.staker_account(i) for i in self.__STAKERS_RANGE)

    @property
    def unassigned_accounts(self):
        special_accounts = [
            self.etherbase_account, self.alice_account, self.bob_account
        ]
        assigned_accounts = set(self.stakers_accounts + self.ursulas_accounts +
                                special_accounts)
        accounts = set(self.client.accounts)
        return list(accounts.difference(assigned_accounts))

    def wait_for_receipt(self,
                         txhash: Union[bytes, str, HexBytes],
                         timeout: int = None) -> dict:
        """Wait for a transaction receipt and return it"""
        timeout = timeout or self.TIMEOUT
        result = self.client.wait_for_receipt(transaction_hash=txhash,
                                              timeout=timeout)
        if result.status == 0:
            raise TransactionFailed()
        return result

    def get_block_number(self) -> int:
        return self.client.block_number

    def read_storage_slot(self, address, slot):
        # https://github.com/ethereum/web3.py/issues/1490
        address = to_canonical_address(address)
        return self.client.w3.provider.ethereum_tester.backend.chain.get_vm(
        ).state.get_storage(address, slot)
Example #21
0
class RestMiddleware:
    log = Logger()

    _client_class = NucypherMiddlewareClient

    class Unreachable(Exception):
        def __init__(self, message, *args, **kwargs):
            super().__init__(message, *args, **kwargs)

    class UnexpectedResponse(Exception):
        def __init__(self, message, status, *args, **kwargs):
            super().__init__(message, *args, **kwargs)
            self.status = status

    class NotFound(UnexpectedResponse):
        def __init__(self, *args, **kwargs):
            super().__init__(status=HTTPStatus.NOT_FOUND, *args, **kwargs)

    class BadRequest(UnexpectedResponse):
        def __init__(self, reason, *args, **kwargs):
            self.reason = reason
            super().__init__(message=reason,
                             status=HTTPStatus.BAD_REQUEST,
                             *args,
                             **kwargs)

    class PaymentRequired(UnexpectedResponse):
        """Raised for HTTP 402"""
        def __init__(self, *args, **kwargs):
            super().__init__(status=HTTPStatus.PAYMENT_REQUIRED,
                             *args,
                             **kwargs)

    class Unauthorized(UnexpectedResponse):
        """Raised for HTTP 403"""
        def __init__(self, *args, **kwargs):
            super().__init__(status=HTTPStatus.FORBIDDEN, *args, **kwargs)

    def __init__(self, registry=None, eth_provider_uri: str = None):
        self.client = self._client_class(registry=registry,
                                         eth_provider_uri=eth_provider_uri)

    def request_revocation(self, ursula, revocation):
        # TODO: Implement offchain revocation #2787
        response = self.client.post(
            node_or_sprout=ursula,
            path=f"revoke",
            data=bytes(revocation),
        )
        return response

    def reencrypt(self, ursula: 'Ursula', reencryption_request_bytes: bytes):
        response = self.client.post(node_or_sprout=ursula,
                                    path=f"reencrypt",
                                    data=reencryption_request_bytes,
                                    timeout=2)
        return response

    def check_availability(self, initiator, responder):
        response = self.client.post(
            node_or_sprout=responder,
            data=bytes(initiator.metatada()),
            path="check_availability",
            timeout=6,  # Two round trips are expected
        )
        return response

    def ping(self, node):
        response = self.client.get(node_or_sprout=node, path="ping", timeout=2)
        return response

    def get_nodes_via_rest(self, node,
                           fleet_state_checksum: FleetStateChecksum,
                           announce_nodes: Sequence[NodeMetadata]):

        request = MetadataRequest(fleet_state_checksum=fleet_state_checksum,
                                  announce_nodes=announce_nodes)
        response = self.client.post(
            node_or_sprout=node,
            path="node_metadata",
            data=bytes(request),
        )
        return response
def test_collect_rewards_integration(
        click_runner, testerchain, agency_local_registry,
        stakeholder_configuration_file_location, blockchain_alice,
        blockchain_bob, random_policy_label, beneficiary,
        preallocation_escrow_agent, mock_allocation_registry, manual_worker,
        token_economics, mock_transacting_power_activation, stake_value,
        policy_value, policy_rate):
    # Disable re-staking
    restake_args = ('stake', 'restake', '--disable', '--config-file',
                    stakeholder_configuration_file_location,
                    '--allocation-filepath',
                    MOCK_INDIVIDUAL_ALLOCATION_FILEPATH, '--force')

    result = click_runner.invoke(nucypher_cli,
                                 restake_args,
                                 input=INSECURE_DEVELOPMENT_PASSWORD,
                                 catch_exceptions=False)
    assert result.exit_code == 0

    half_stake_time = token_economics.minimum_locked_periods // 2  # Test setup
    logger = Logger("Test-CLI")  # Enter the Teacher's Logger, and
    current_period = 0  # State the initial period for incrementing

    staker_address = preallocation_escrow_agent.principal_contract.address
    worker_address = manual_worker

    # The staker is staking.
    stakes = StakeList(registry=agency_local_registry,
                       checksum_address=staker_address)
    stakes.refresh()
    assert stakes

    staking_agent = ContractAgency.get_agent(StakingEscrowAgent,
                                             registry=agency_local_registry)
    assert worker_address == staking_agent.get_worker_from_staker(
        staker_address=staker_address)

    ursula_port = select_test_port()
    ursula = Ursula(is_me=True,
                    checksum_address=staker_address,
                    worker_address=worker_address,
                    registry=agency_local_registry,
                    rest_host='127.0.0.1',
                    rest_port=ursula_port,
                    start_working_now=False,
                    network_middleware=MockRestMiddleware(),
                    db_filepath=tempfile.mkdtemp())

    MOCK_KNOWN_URSULAS_CACHE[ursula_port] = ursula
    assert ursula.worker_address == worker_address
    assert ursula.checksum_address == staker_address

    mock_transacting_power_activation(account=worker_address,
                                      password=INSECURE_DEVELOPMENT_PASSWORD)

    # Make a commitment for half the first stake duration
    for _ in range(half_stake_time):
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        ursula.commit_to_next_period()
        testerchain.time_travel(periods=1)
        current_period += 1

    # Alice creates a policy and grants Bob access
    blockchain_alice.selection_buffer = 1

    M, N = 1, 1
    days = 3
    now = testerchain.w3.eth.getBlock(block_identifier='latest').timestamp
    expiration = maya.MayaDT(now).add(days=days - 1)
    blockchain_policy = blockchain_alice.grant(bob=blockchain_bob,
                                               label=random_policy_label,
                                               m=M,
                                               n=N,
                                               value=policy_value,
                                               expiration=expiration,
                                               handpicked_ursulas={ursula})

    # Ensure that the handpicked Ursula was selected for the policy
    arrangement = list(blockchain_policy._accepted_arrangements)[0]
    assert arrangement.ursula == ursula

    # Bob learns about the new staker and joins the policy
    blockchain_bob.start_learning_loop()
    blockchain_bob.remember_node(node=ursula)
    blockchain_bob.join_policy(random_policy_label,
                               bytes(blockchain_alice.stamp))

    # Enrico Encrypts (of course)
    enrico = Enrico(policy_encrypting_key=blockchain_policy.public_key,
                    network_middleware=MockRestMiddleware())

    verifying_key = blockchain_alice.stamp.as_umbral_pubkey()

    for index in range(half_stake_time - 5):
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        ursula.commit_to_next_period()

        # Encrypt
        random_data = os.urandom(random.randrange(20, 100))
        message_kit, signature = enrico.encrypt_message(plaintext=random_data)

        # Decrypt
        cleartexts = blockchain_bob.retrieve(message_kit,
                                             enrico=enrico,
                                             alice_verifying_key=verifying_key,
                                             label=random_policy_label)
        assert random_data == cleartexts[0]

        # Ursula Staying online and the clock advancing
        testerchain.time_travel(periods=1)
        current_period += 1

    # Finish the passage of time
    for _ in range(
            5 - 1
    ):  # minus 1 because the first period was already committed to in test_ursula_run
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        ursula.commit_to_next_period()
        current_period += 1
        testerchain.time_travel(periods=1)

    #
    # WHERES THE MONEY URSULA?? - Collecting Rewards
    #

    balance = testerchain.client.get_balance(beneficiary)

    # Rewards will be unlocked after the
    # final committed period has passed (+1).
    logger.debug(f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
    testerchain.time_travel(periods=1)
    current_period += 1
    logger.debug(f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")

    # Since we are mocking the blockchain connection, manually consume the transacting power of the Beneficiary.
    mock_transacting_power_activation(account=beneficiary,
                                      password=INSECURE_DEVELOPMENT_PASSWORD)

    # Collect Policy Fee
    collection_args = ('stake', 'collect-reward', '--config-file',
                       stakeholder_configuration_file_location, '--policy-fee',
                       '--no-staking-reward', '--withdraw-address',
                       beneficiary, '--allocation-filepath',
                       MOCK_INDIVIDUAL_ALLOCATION_FILEPATH, '--force')

    result = click_runner.invoke(nucypher_cli,
                                 collection_args,
                                 input=INSECURE_DEVELOPMENT_PASSWORD,
                                 catch_exceptions=False)
    assert result.exit_code == 0

    # Policy Fee
    collected_policy_fee = testerchain.client.get_balance(beneficiary)
    assert collected_policy_fee > balance

    #
    # Collect Staking Reward
    #
    token_agent = ContractAgency.get_agent(agent_class=NucypherTokenAgent,
                                           registry=agency_local_registry)
    balance_before_collecting = token_agent.get_balance(address=staker_address)

    collection_args = ('stake', 'collect-reward', '--config-file',
                       stakeholder_configuration_file_location,
                       '--no-policy-fee', '--staking-reward',
                       '--allocation-filepath',
                       MOCK_INDIVIDUAL_ALLOCATION_FILEPATH, '--force')

    result = click_runner.invoke(nucypher_cli,
                                 collection_args,
                                 input=INSECURE_DEVELOPMENT_PASSWORD,
                                 catch_exceptions=False)
    assert result.exit_code == 0

    # The beneficiary has withdrawn her staking rewards, which are now in the staking contract
    assert token_agent.get_balance(
        address=staker_address) >= balance_before_collecting
Example #23
0
class BlockchainInterface:
    """
    Interacts with a solidity compiler and a registry in order to instantiate compiled
    ethereum contracts with the given web3 provider backend.
    """

    TIMEOUT = 600  # seconds  # TODO: Correlate with the gas strategy - #2070

    DEFAULT_GAS_STRATEGY = 'fast'
    GAS_STRATEGIES = {'glacial': time_based.glacial_gas_price_strategy,     # 24h
                      'slow': time_based.slow_gas_price_strategy,           # 1h
                      'medium': time_based.medium_gas_price_strategy,       # 5m
                      'fast': time_based.fast_gas_price_strategy            # 60s
                      }

    process = NO_PROVIDER_PROCESS.bool_value(False)
    Web3 = Web3

    _contract_factory = VersionedContract

    class InterfaceError(Exception):
        pass

    class NoProvider(InterfaceError):
        pass

    class UnsupportedProvider(InterfaceError):
        pass

    class ConnectionFailed(InterfaceError):
        pass

    class UnknownContract(InterfaceError):
        pass

    REASONS = {
        INSUFFICIENT_ETH: 'insufficient funds for gas * price + value',
    }

    class TransactionFailed(InterfaceError):

        IPC_CODE = -32000  # (geth)

        def __init__(self,
                     message: str,
                     transaction_dict: dict,
                     contract_function: Union[ContractFunction, ContractConstructor],
                     *args):

            self.base_message = message
            self.name = get_transaction_name(contract_function=contract_function)
            self.payload = transaction_dict
            self.contract_function = contract_function
            self.failures = {
                BlockchainInterface.REASONS[INSUFFICIENT_ETH]: self.insufficient_eth
            }
            self.message = self.failures.get(self.base_message, self.default)
            super().__init__(self.message, *args)

        @property
        def default(self) -> str:
            sender = self.payload["from"]
            message = f'{self.name} from {sender[:6]}... \n' \
                      f'Sender balance: {self.get_balance()} ETH \n' \
                      f'Reason: {self.base_message} \n' \
                      f'Transaction: {self.payload}'
            return message

        def get_balance(self):
            blockchain = BlockchainInterfaceFactory.get_interface()
            balance = blockchain.client.get_balance(account=self.payload['from'])
            return balance

        @property
        def insufficient_eth(self) -> str:
            gas = (self.payload.get('gas', 1) * self.payload['gasPrice'])  # FIXME: If gas is not included...
            cost = gas + self.payload.get('value', 0)
            message = f'{self.payload} from {self.payload["from"][:8]} - {self.base_message}.' \
                      f'Calculated cost is {cost} but sender only has {self.get_balance()}.'
            return message

    def __init__(self,
                 emitter = None,  # TODO # 1754
                 poa: bool = None,
                 light: bool = False,
                 provider_process=NO_PROVIDER_PROCESS,
                 provider_uri: str = NO_BLOCKCHAIN_CONNECTION,
                 provider: Web3Providers = NO_BLOCKCHAIN_CONNECTION,
                 gas_strategy: Union[str, Callable] = DEFAULT_GAS_STRATEGY):

        """
        TODO: #1502 - Move to API docs.

         Filesystem          Configuration           Node              Client                  EVM
        ================ ====================== =============== =====================  ===========================

         Solidity Files -- SolidityCompiler -                      --- HTTPProvider ------ ...
                                            |                    |
                                            |                    |
                                            |                    |
                                            - *BlockchainInterface* -- IPCProvider ----- External EVM (geth, parity...)
                                                       |         |
                                                       |         |
                                                 TestProvider ----- EthereumTester -------------
                                                                                                |
                                                                                                |
                                                                                        PyEVM (Development Chain)

         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

         Runtime Files --                 --BlockchainInterface ----> Registry
                        |                |             ^
                        |                |             |
                        |                |             |
         Key Files ------ CharacterConfiguration     Agent                          ... (Contract API)
                        |                |             ^
                        |                |             |
                        |                |             |
                        |                |           Actor                          ...Blockchain-Character API)
                        |                |             ^
                        |                |             |
                        |                |             |
         Config File ---                  --------- Character                       ... (Public API)
                                                       ^
                                                       |
                                                     Human


        The Blockchain is the junction of the solidity compiler, a contract registry, and a collection of
        web3 network providers as a means of interfacing with the ethereum blockchain to execute
        or deploy contract code on the network.


        Compiler and Registry Usage
        -----------------------------

        Contracts are freshly re-compiled if an instance of SolidityCompiler is passed; otherwise,
        The registry will read contract data saved to disk that is be used to retrieve contact address and op-codes.
        Optionally, A registry instance can be passed instead.


        Provider Usage
        ---------------
        https: // github.com / ethereum / eth - tester     # available-backends


        * HTTP Provider - Web3 HTTP provider, typically JSON RPC 2.0 over HTTP
        * Websocket Provider - Web3 WS provider, typically JSON RPC 2.0 over WS, supply endpoint uri and websocket=True
        * IPC Provider - Web3 File based IPC provider transported over standard I/O
        * Custom Provider - A pre-initialized web3.py provider instance to attach to this interface

        """

        self.log = Logger('Blockchain')
        self.poa = poa
        self.provider_uri = provider_uri
        self._provider = provider
        self._provider_process = provider_process
        self.w3 = NO_BLOCKCHAIN_CONNECTION
        self.client = NO_BLOCKCHAIN_CONNECTION         # type: EthereumClient
        self.transacting_power = READ_ONLY_INTERFACE
        self.is_light = light
        self.gas_strategy = self.get_gas_strategy(gas_strategy)

    def __repr__(self):
        r = '{name}({uri})'.format(name=self.__class__.__name__, uri=self.provider_uri)
        return r

    @classmethod
    def from_dict(cls, payload: dict, **overrides) -> 'BlockchainInterface':
        payload.update({k: v for k, v in overrides.items() if v is not None})
        blockchain = cls(**payload)
        return blockchain

    def to_dict(self) -> dict:
        payload = dict(provider_uri=self.provider_uri, poa=self.poa, light=self.is_light)
        return payload

    @property
    def is_connected(self) -> bool:
        """
        https://web3py.readthedocs.io/en/stable/__provider.html#examples-using-automated-detection
        """
        if self.client is NO_BLOCKCHAIN_CONNECTION:
            return False
        return self.client.is_connected

    @classmethod
    def get_gas_strategy(cls, gas_strategy: Union[str, Callable] = None) -> Callable:
        try:
            gas_strategy = cls.GAS_STRATEGIES[gas_strategy]
        except KeyError:
            if gas_strategy:
                if not callable(gas_strategy):
                    raise ValueError(f"{gas_strategy} must be callable to be a valid gas strategy.")
            else:
                gas_strategy = cls.GAS_STRATEGIES[cls.DEFAULT_GAS_STRATEGY]
        return gas_strategy

    def attach_middleware(self):
        chain_id = int(self.client.chain_id)
        if self.poa is None:  # If POA is not set explicitly, try to autodetect from chain id
            self.poa = chain_id in POA_CHAINS

        self.log.debug(f'Ethereum chain: {self.client.chain_name} (chain_id={chain_id}, poa={self.poa})')

        # For use with Proof-Of-Authority test-blockchains
        if self.poa is True:
            self.log.debug('Injecting POA middleware at layer 0')
            self.client.inject_middleware(geth_poa_middleware, layer=0)

        # Gas Price Strategy:
        # Bundled web3 strategies are too expensive for Infura (it takes ~1 minute to get a price),
        # so we use external gas price oracles, instead (see #2139)
        if isinstance(self.client, InfuraClient):
            gas_strategy = datafeed_fallback_gas_price_strategy
        else:
            gas_strategy = self.gas_strategy
        self.client.set_gas_strategy(gas_strategy=gas_strategy)
        gwei_gas_price = Web3.fromWei(self.client.gas_price_for_transaction(), 'gwei')
        self.log.debug(f"Currently, our gas strategy returns a gas price of {gwei_gas_price} gwei")

        self.client.add_middleware(middleware.time_based_cache_middleware)
        self.client.add_middleware(middleware.latest_block_based_cache_middleware)
        self.client.add_middleware(middleware.simple_cache_middleware)

    def connect(self):

        # Spawn child process
        if self._provider_process:
            self._provider_process.start()
            provider_uri = self._provider_process.provider_uri(scheme='file')
        else:
            provider_uri = self.provider_uri
            self.log.info(f"Using external Web3 Provider '{self.provider_uri}'")

        # Attach Provider
        self._attach_provider(provider=self._provider, provider_uri=provider_uri)
        self.log.info("Connecting to {}".format(self.provider_uri))
        if self._provider is NO_BLOCKCHAIN_CONNECTION:
            raise self.NoProvider("There are no configured blockchain providers")

        # Connect if not connected
        try:
            self.w3 = self.Web3(provider=self._provider)
            self.client = EthereumClient.from_w3(w3=self.w3)
        except requests.ConnectionError:  # RPC
            raise self.ConnectionFailed(f'Connection Failed - {str(self.provider_uri)} - is RPC enabled?')
        except FileNotFoundError:         # IPC File Protocol
            raise self.ConnectionFailed(f'Connection Failed - {str(self.provider_uri)} - is IPC enabled?')
        else:
            self.attach_middleware()

        return self.is_connected

    def sync(self, emitter=None) -> None:

        sync_state = self.client.sync()
        if emitter is not None:

            emitter.echo(f"Syncing: {self.client.chain_name.capitalize()}. Waiting for sync to begin.", verbosity=1)

            while not len(self.client.peers):
                emitter.echo("waiting for peers...", verbosity=1)
                time.sleep(5)

            peer_count = len(self.client.peers)
            emitter.echo(
                f"Found {'an' if peer_count == 1 else peer_count} Ethereum peer{('s' if peer_count > 1 else '')}.",
                verbosity=1)

            try:
                emitter.echo("Beginning sync...", verbosity=1)
                initial_state = next(sync_state)
            except StopIteration:  # will occur if no syncing needs to happen
                emitter.echo("Local blockchain data is already synced.", verbosity=1)
                return

            prior_state = initial_state
            total_blocks_to_sync = int(initial_state.get('highestBlock', 0)) - int(
                initial_state.get('currentBlock', 0))
            with click.progressbar(
                    length=total_blocks_to_sync,
                    label="sync progress",
                    file=emitter.get_stream(verbosity=1)
            ) as bar:
                for syncdata in sync_state:
                    if syncdata:
                        blocks_accomplished = int(syncdata['currentBlock']) - int(
                            prior_state.get('currentBlock', 0))
                        bar.update(blocks_accomplished)
                        prior_state = syncdata
        else:
            try:
                for syncdata in sync_state:
                    self.client.log.info(f"Syncing {syncdata['currentBlock']}/{syncdata['highestBlock']}")
            except TypeError:  # it's already synced
                return
        return

    @property
    def provider(self) -> Union[IPCProvider, WebsocketProvider, HTTPProvider]:
        return self._provider

    def _attach_provider(self,
                         provider: Web3Providers = None,
                         provider_uri: str = None) -> None:
        """
        https://web3py.readthedocs.io/en/latest/providers.html#providers
        """

        if not provider_uri and not provider:
            raise self.NoProvider("No URI or provider instances supplied.")

        if provider_uri and not provider:
            uri_breakdown = urlparse(provider_uri)

            if uri_breakdown.scheme == 'tester':
                providers = {
                    'pyevm': _get_pyevm_test_provider,
                    'geth': _get_test_geth_parity_provider,
                    'parity-ethereum': _get_test_geth_parity_provider,
                    'mock': _get_mock_test_provider
                }
                provider_scheme = uri_breakdown.netloc

            else:
                providers = {
                    'auto': _get_auto_provider,
                    'infura': _get_infura_provider,
                    'ipc': _get_IPC_provider,
                    'file': _get_IPC_provider,
                    'ws': _get_websocket_provider,
                    'wss': _get_websocket_provider,
                    'http': _get_HTTP_provider,
                    'https': _get_HTTP_provider,
                }
                provider_scheme = uri_breakdown.scheme

            # auto-detect for file based ipc
            if not provider_scheme:
                if os.path.exists(provider_uri):
                    # file is available - assume ipc/file scheme
                    provider_scheme = 'file'
                    self.log.info(f"Auto-detected provider scheme as 'file://' for provider {provider_uri}")

            try:
                self._provider = providers[provider_scheme](provider_uri)
            except KeyError:
                raise self.UnsupportedProvider(f"{provider_uri} is an invalid or unsupported blockchain provider URI")
            else:
                self.provider_uri = provider_uri or NO_BLOCKCHAIN_CONNECTION
        else:
            self._provider = provider

    def __transaction_failed(self,
                             exception: Exception,
                             transaction_dict: dict,
                             contract_function: Union[ContractFunction, ContractConstructor]
                             ) -> None:
        """
        Re-raising error handler and context manager for transaction broadcast or
        build failure events at the interface layer. This method is a last line of defense
        against unhandled exceptions caused by transaction failures and must raise an exception.
        # TODO: #1504 - Additional Handling of validation failures (gas limits, invalid fields, etc.)
        """

        try:
            # Assume this error is formatted as an IPC response
            code, message = exception.args[0].values()

        except (ValueError, IndexError, AttributeError):
            # TODO: #1504 - Try even harder to determine if this is insufficient funds causing the issue,
            #               This may be best handled at the agent or actor layer for registry and token interactions.
            # Worst case scenario - raise the exception held in context implicitly
            raise exception

        else:
            if int(code) != self.TransactionFailed.IPC_CODE:
                # Only handle client-specific exceptions
                # https://www.jsonrpc.org/specification Section 5.1
                raise exception
            self.log.critical(message)                     # simple context
            transaction_failed = self.TransactionFailed(message=message,  # rich error (best case)
                                                        contract_function=contract_function,
                                                        transaction_dict=transaction_dict)
            raise transaction_failed from exception

    def __log_transaction(self, transaction_dict: dict, contract_function: ContractFunction):
        """
        Format and log a transaction dict and return the transaction name string.
        This method *must not* mutate the original transaction dict.
        """
        # Do not mutate the original transaction dict
        tx = dict(transaction_dict).copy()

        # Format
        if tx.get('to'):
            tx['to'] = to_checksum_address(contract_function.address)
        try:
            tx['selector'] = contract_function.selector
        except AttributeError:
            pass
        tx['from'] = to_checksum_address(tx['from'])
        tx.update({f: prettify_eth_amount(v) for f, v in tx.items() if f in ('gasPrice', 'value')})
        payload_pprint = ', '.join("{}: {}".format(k, v) for k, v in tx.items())

        # Log
        transaction_name = get_transaction_name(contract_function=contract_function)
        self.log.debug(f"[TX-{transaction_name}] | {payload_pprint}")

    @validate_checksum_address
    def build_payload(self,
                      sender_address: str,
                      payload: dict = None,
                      transaction_gas_limit: int = None,
                      ) -> dict:

        base_payload = {'chainId': int(self.client.chain_id),
                        'nonce': self.client.w3.eth.getTransactionCount(sender_address, 'pending'),
                        'from': sender_address}

        # Aggregate
        if not payload:
            payload = {}
        payload.update(base_payload)
        # Explicit gas override - will skip gas estimation in next operation.
        if transaction_gas_limit:
            payload['gas'] = int(transaction_gas_limit)
        return payload

    @validate_checksum_address
    def build_contract_transaction(self,
                                   contract_function: ContractFunction,
                                   sender_address: str,
                                   payload: dict = None,
                                   transaction_gas_limit: int = None,
                                   ) -> dict:
        payload = self.build_payload(sender_address=sender_address,
                                     payload=payload,
                                     transaction_gas_limit=transaction_gas_limit)
        self.__log_transaction(transaction_dict=payload, contract_function=contract_function)
        try:
            transaction_dict = contract_function.buildTransaction(payload)  # Gas estimation occurs here
        except (TestTransactionFailed, ValidationError, ValueError) as error:
            # Note: Geth raises ValueError in the same condition that pyevm raises ValidationError here.
            # Treat this condition as "Transaction Failed" during gas estimation.
            raise self.__transaction_failed(exception=error, transaction_dict=payload, contract_function=contract_function)
        return transaction_dict

    def sign_and_broadcast_transaction(self,
                                       transaction_dict,
                                       transaction_name: str = "",
                                       confirmations: int = 0
                                       ) -> dict:

        #
        # Setup
        #

        # TODO # 1754 - Move this to singleton - I do not approve... nor does Bogdan?
        if GlobalLoggerSettings._json_ipc:
            emitter = JSONRPCStdoutEmitter()
        else:
            emitter = StdoutEmitter()

        if self.transacting_power is READ_ONLY_INTERFACE:
            raise self.InterfaceError(str(READ_ONLY_INTERFACE))

        #
        # Sign
        #

        # TODO: Show the USD Price:  https://api.coinmarketcap.com/v1/ticker/ethereum/
        price = transaction_dict['gasPrice']
        price_gwei = Web3.fromWei(price, 'gwei')
        cost_wei = price * transaction_dict['gas']
        cost = Web3.fromWei(cost_wei, 'ether')

        if self.transacting_power.is_device:
            emitter.message(f'Confirm transaction {transaction_name} on hardware wallet... '
                            f'({cost} ETH @ {price_gwei} gwei)',
                            color='yellow')
        signed_raw_transaction = self.transacting_power.sign_transaction(transaction_dict)

        #
        # Broadcast
        #

        emitter.message(f'Broadcasting {transaction_name} Transaction ({cost} ETH @ {price_gwei} gwei)...',
                        color='yellow')
        try:
            txhash = self.client.send_raw_transaction(signed_raw_transaction)  # <--- BROADCAST
        except (TestTransactionFailed, ValueError) as error:
            raise  # TODO: Unify with Transaction failed handling

        #
        # Receipt
        #

        try:  # TODO: Handle block confirmation exceptions
            receipt = self.client.wait_for_receipt(txhash, timeout=self.TIMEOUT, confirmations=confirmations)
        except TimeExhausted:
            # TODO: #1504 - Handle transaction timeout
            raise
        else:
            self.log.debug(f"[RECEIPT-{transaction_name}] | txhash: {receipt['transactionHash'].hex()}")

        #
        # Confirmations
        #

        # Primary check
        transaction_status = receipt.get('status', UNKNOWN_TX_STATUS)
        if transaction_status == 0:
            failure = f"Transaction transmitted, but receipt returned status code 0. " \
                      f"Full receipt: \n {pprint.pformat(receipt, indent=2)}"
            raise self.InterfaceError(failure)

        if transaction_status is UNKNOWN_TX_STATUS:
            self.log.info(f"Unknown transaction status for {txhash} (receipt did not contain a status field)")

            # Secondary check
            tx = self.client.get_transaction(txhash)
            if tx["gas"] == receipt["gasUsed"]:
                raise self.InterfaceError(f"Transaction consumed 100% of transaction gas."
                                          f"Full receipt: \n {pprint.pformat(receipt, indent=2)}")

        return receipt

    def get_blocktime(self):
        return self.client.get_blocktime()

    @validate_checksum_address
    def send_transaction(self,
                         contract_function: Union[ContractFunction, ContractConstructor],
                         sender_address: str,
                         payload: dict = None,
                         transaction_gas_limit: int = None,
                         confirmations: int = 0
                         ) -> dict:

        transaction = self.build_contract_transaction(contract_function=contract_function,
                                                      sender_address=sender_address,
                                                      payload=payload,
                                                      transaction_gas_limit=transaction_gas_limit)

        # Get transaction name
        try:
            transaction_name = contract_function.fn_name.upper()
        except AttributeError:
            transaction_name = 'DEPLOY' if isinstance(contract_function, ContractConstructor) else 'UNKNOWN'

        receipt = self.sign_and_broadcast_transaction(transaction_dict=transaction,
                                                      transaction_name=transaction_name,
                                                      confirmations=confirmations)
        return receipt

    def get_contract_by_name(self,
                             registry: BaseContractRegistry,
                             contract_name: str,
                             contract_version: str = None,
                             enrollment_version: Union[int, str] = None,
                             proxy_name: str = None,
                             use_proxy_address: bool = True
                             ) -> VersionedContract:
        """
        Instantiate a deployed contract from registry data,
        and assimilate it with its proxy if it is upgradeable.
        """
        target_contract_records = registry.search(contract_name=contract_name, contract_version=contract_version)

        if not target_contract_records:
            raise self.UnknownContract(f"No such contract records with name {contract_name}:{contract_version}.")

        if proxy_name:

            # Lookup proxies; Search for a published proxy that targets this contract record
            proxy_records = registry.search(contract_name=proxy_name)

            results = list()
            for proxy_name, proxy_version, proxy_address, proxy_abi in proxy_records:
                proxy_contract = self.client.w3.eth.contract(abi=proxy_abi,
                                                             address=proxy_address,
                                                             version=proxy_version,
                                                             ContractFactoryClass=self._contract_factory)

                # Read this dispatcher's target address from the blockchain
                proxy_live_target_address = proxy_contract.functions.target().call()
                for target_name, target_version, target_address, target_abi in target_contract_records:

                    if target_address == proxy_live_target_address:
                        if use_proxy_address:
                            triplet = (proxy_address, target_version, target_abi)
                        else:
                            triplet = (target_address, target_version, target_abi)
                    else:
                        continue

                    results.append(triplet)

            if len(results) > 1:
                address, _version, _abi = results[0]
                message = "Multiple {} deployments are targeting {}".format(proxy_name, address)
                raise self.InterfaceError(message.format(contract_name))

            else:
                try:
                    selected_address, selected_version, selected_abi = results[0]
                except IndexError:
                    raise self.UnknownContract(
                        f"There are no Dispatcher records targeting '{contract_name}':{contract_version}")

        else:
            # TODO: use_proxy_address doesnt' work in this case. Should we raise if used?

            # NOTE: 0 must be allowed as a valid version number
            if len(target_contract_records) != 1:
                if enrollment_version is None:
                    m = f"{len(target_contract_records)} records enrolled " \
                        f"for contract {contract_name}:{contract_version} " \
                        f"and no version index was supplied."
                    raise self.InterfaceError(m)
                enrollment_version = self.__get_enrollment_version_index(name=contract_name,
                                                                         contract_version=contract_version,
                                                                         version_index=enrollment_version,
                                                                         enrollments=len(target_contract_records))

            else:
                enrollment_version = -1  # default

            _contract_name, selected_version, selected_address, selected_abi = target_contract_records[enrollment_version]

        # Create the contract from selected sources
        unified_contract = self.client.w3.eth.contract(abi=selected_abi,
                                                       address=selected_address,
                                                       version=selected_version,
                                                       ContractFactoryClass=self._contract_factory)

        return unified_contract

    @staticmethod
    def __get_enrollment_version_index(version_index: Union[int, str],
                                       enrollments: int,
                                       name: str,
                                       contract_version: str):
        version_names = {'latest': -1, 'earliest': 0}
        try:
            version = version_names[version_index]
        except KeyError:
            try:
                version = int(version_index)
            except ValueError:
                what_is_this = version_index
                raise ValueError(f"'{what_is_this}' is not a valid enrollment version number")
            else:
                if version > enrollments - 1:
                    message = f"Version index '{version}' is larger than the number of enrollments " \
                              f"for {name}:{contract_version}."
                    raise ValueError(message)
        return version
Example #24
0
class WorkTracker:

    CLOCK = reactor
    INTERVAL_FLOOR = 60 * 15  # fifteen minutes
    INTERVAL_CEIL = 60 * 180  # three hours

    ALLOWED_DEVIATION = 0.5  # i.e., up to +50% from the expected confirmation time

    def __init__(self, worker, *args, **kwargs):

        super().__init__(*args, **kwargs)
        self.log = Logger('stake-tracker')
        self.worker = worker
        self.staking_agent = self.worker.staking_agent
        self.client = self.staking_agent.blockchain.client

        self.gas_strategy = self.staking_agent.blockchain.gas_strategy

        self._tracking_task = task.LoopingCall(self._do_work)
        self._tracking_task.clock = self.CLOCK

        self.__pending = dict()  # TODO: Prime with pending worker transactions
        self.__requirement = None
        self.__current_period = None
        self.__start_time = NOT_STAKING
        self.__uptime_period = NOT_STAKING
        self._abort_on_error = False

    @classmethod
    def random_interval(cls) -> int:
        return random.randint(cls.INTERVAL_FLOOR, cls.INTERVAL_CEIL)

    @property
    def current_period(self):
        return self.__current_period

    def max_confirmation_time(self) -> int:
        expected_time = EXPECTED_CONFIRMATION_TIME_IN_SECONDS[self.gas_strategy]
        result = expected_time * (1 + self.ALLOWED_DEVIATION)
        return result

    def stop(self) -> None:
        if self._tracking_task.running:
            self._tracking_task.stop()
            self.log.info(f"STOPPED WORK TRACKING")

    def start(self, act_now: bool = True, requirement_func: Callable = None, force: bool = False) -> None:
        """
        High-level stake tracking initialization, this function aims
        to be safely called at any time - For example, it is okay to call
        this function multiple times within the same period.
        """
        if self._tracking_task.running and not force:
            return

        # Add optional confirmation requirement callable
        self.__requirement = requirement_func

        # Record the start time and period
        self.__start_time = maya.now()
        self.__uptime_period = self.staking_agent.get_current_period()
        self.__current_period = self.__uptime_period

        self.log.info(f"START WORK TRACKING")
        d = self._tracking_task.start(interval=self.random_interval(), now=act_now)
        d.addErrback(self.handle_working_errors)

    def _crash_gracefully(self, failure=None) -> None:
        """
        A facility for crashing more gracefully in the event that
        an exception is unhandled in a different thread.
        """
        self._crashed = failure
        failure.raiseException()

    def handle_working_errors(self, *args, **kwargs) -> None:
        failure = args[0]
        if self._abort_on_error:
            self.log.critical(f'Unhandled error during node work tracking. {failure!r}',
                              failure=failure)
            self.stop()
            reactor.callFromThread(self._crash_gracefully, failure=failure)
        else:
            self.log.warn(f'Unhandled error during work tracking: {failure.getTraceback()!r}',
                          failure=failure)
            self.start()

    def __work_requirement_is_satisfied(self) -> bool:
        # TODO: Check for stake expiration and exit
        if self.__requirement is None:
            return True
        r = self.__requirement()
        if not isinstance(r, bool):
            raise ValueError(f"'requirement' must return a boolean.")
        return r

    @property
    def pending(self) -> Dict[int, HexBytes]:
        return self.__pending.copy()

    def __commitments_tracker_is_consistent(self) -> bool:
        worker_address = self.worker.worker_address
        tx_count_pending = self.client.get_transaction_count(account=worker_address, pending=True)
        tx_count_latest = self.client.get_transaction_count(account=worker_address, pending=False)
        txs_in_mempool = tx_count_pending - tx_count_latest
        if len(self.__pending) == txs_in_mempool:
            return True  # OK!
        if txs_in_mempool > len(self.__pending):  # We're missing some pending TXs
            return False
        else:  # TODO #2429: What to do when txs_in_mempool < len(self.__pending)? What does this imply?
            return True

    def __track_pending_commitments(self) -> bool:
        # TODO: Keep a purpose-built persistent log of worker transaction history

        unmined_transactions = list()
        pending_transactions = self.pending.items()    # note: this must be performed non-mutatively
        for tx_firing_block_number, txhash in sorted(pending_transactions):
            try:
                confirmed_tx_receipt = self.client.get_transaction_receipt(transaction_hash=txhash)
            except TransactionNotFound:
                unmined_transactions.append(txhash)  # mark as unmined - Keep tracking it for now
                continue
            else:
                confirmation_block_number = confirmed_tx_receipt['blockNumber']
                confirmations = confirmation_block_number - tx_firing_block_number
                self.log.info(f'Commitment transaction {txhash.hex()[:10]} confirmed: {confirmations} confirmations')
                del self.__pending[tx_firing_block_number]

        if unmined_transactions:
            s = "s" if len(unmined_transactions) > 1 else ""
            self.log.info(f'{len(unmined_transactions)} pending commitment transaction{s} detected.')

        inconsistent_tracker = not self.__commitments_tracker_is_consistent()
        if inconsistent_tracker:
            # If we detect there's a mismatch between the number of internally tracked and
            # pending block transactions, create a special pending TX that accounts for this.
            # TODO: Detect if this untracked pending transaction is a commitment transaction at all.
            self.__pending[0] = UNTRACKED_PENDING_TRANSACTION
            return True

        return bool(self.__pending)

    def __fire_replacement_commitment(self, current_block_number: int, tx_firing_block_number: int) -> None:
        replacement_txhash = self.__fire_commitment()  # replace
        self.__pending[current_block_number] = replacement_txhash  # track this transaction
        del self.__pending[tx_firing_block_number]  # assume our original TX is stuck

    def __handle_replacement_commitment(self, current_block_number: int) -> None:
        tx_firing_block_number, txhash = list(sorted(self.pending.items()))[0]
        if txhash is UNTRACKED_PENDING_TRANSACTION:
            # TODO: Detect if this untracked pending transaction is a commitment transaction at all.
            message = f"We have an untracked pending transaction. Issuing a replacement transaction."
        else:
            # If the transaction is still not mined after a max confirmation time
            # (based on current gas strategy) issue a replacement transaction.
            wait_time_in_blocks = current_block_number - tx_firing_block_number
            wait_time_in_seconds = wait_time_in_blocks * AVERAGE_BLOCK_TIME_IN_SECONDS
            if wait_time_in_seconds < self.max_confirmation_time():
                self.log.info(f'Waiting for pending commitment transaction to be mined ({txhash.hex()}).')
                return
            else:
                message = f"We've waited for {wait_time_in_seconds}, but max time is {self.max_confirmation_time()}" \
                          f" for {self.gas_strategy} gas strategy. Issuing a replacement transaction."

        # Send a replacement transaction
        self.log.info(message)
        self.__fire_replacement_commitment(current_block_number=current_block_number,
                                           tx_firing_block_number=tx_firing_block_number)

    def _do_work(self) -> None:
        """
        Async working task for Ursula  # TODO: Split into multiple async tasks
        """

        # Call once here, and inject later for temporal consistency
        current_block_number = self.client.block_number

        # Update on-chain status
        self.log.info(f"Checking for new period. Current period is {self.__current_period}")
        onchain_period = self.staking_agent.get_current_period()  # < -- Read from contract
        if self.current_period != onchain_period:
            self.__current_period = onchain_period

            # TODO: #1515 and #1517 - Shut down at end of terminal stake
            # This slows down tests substantially and adds additional
            # RPC calls, but might be acceptable in production
            # self.worker.stakes.refresh()

        # Measure working interval
        interval = onchain_period - self.worker.last_committed_period
        if interval < 0:
            return  # No need to commit to this period.  Save the gas.
        if interval > 0:
            # TODO: #1516 Follow-up actions for missed commitments
            self.log.warn(f"MISSED COMMITMENTS - {interval} missed staking commitments detected.")

        # Commitment tracking
        unmined_transactions = self.__track_pending_commitments()
        if unmined_transactions:
            self.__handle_replacement_commitment(current_block_number=current_block_number)
            # while there are known pending transactions, remain in fast interval mode
            self._tracking_task.interval = self.INTERVAL_FLOOR
            return  # This cycle is finished.
        else:
            # Randomize the next task interval over time, within bounds.
            self._tracking_task.interval = self.random_interval()

        # Only perform work this round if the requirements are met
        if not self.__work_requirement_is_satisfied():
            self.log.warn(f'COMMIT PREVENTED (callable: "{self.__requirement.__name__}") - '
                          f'There are unmet commit requirements.')
            # TODO: Follow-up actions for failed requirements
            return

        txhash = self.__fire_commitment()
        self.__pending[current_block_number] = txhash

    def __fire_commitment(self):
        """Makes an initial/replacement worker commitment transaction"""
        transacting_power = self.worker.transacting_power
        with transacting_power:
            txhash = self.worker.commit_to_next_period(fire_and_forget=True)  # < --- blockchain WRITE
        self.log.info(f"Making a commitment to period {self.current_period} - TxHash: {txhash.hex()}")
        return txhash
Example #25
0
    mock_remember_node, mock_rest_app_creation, mock_secret_source,
    mock_verify_node)
from tests.utils.blockchain import TesterBlockchain, token_airdrop
from tests.utils.config import (make_alice_test_configuration,
                                make_bob_test_configuration,
                                make_ursula_test_configuration)
from tests.utils.middleware import MockRestMiddleware, MockRestMiddlewareForLargeFleetTests
from tests.utils.policy import generate_random_label
from tests.utils.ursula import (MOCK_URSULA_STARTING_PORT,
                                make_decentralized_ursulas,
                                make_federated_ursulas,
                                MOCK_KNOWN_URSULAS_CACHE,
                                _mock_ursula_reencrypts)
from constant_sorrow.constants import (FULL, INIT)

test_logger = Logger("test-logger")

# defer.setDebugging(True)

#
# Temporary
#


@pytest.fixture(scope="function")
def tempfile_path():
    fd, path = tempfile.mkstemp()
    yield path
    os.close(fd)
    os.remove(path)
Example #26
0
class StakeList(UserList):

    @validate_checksum_address
    def __init__(self,
                 registry: BaseContractRegistry,
                 checksum_address: str = None,
                 *args, **kwargs):

        super().__init__(*args, **kwargs)
        self.log = Logger('stake-tracker')
        self.staking_agent = ContractAgency.get_agent(StakingEscrowAgent, registry=registry)
        from nucypher.blockchain.economics import EconomicsFactory
        self.economics = EconomicsFactory.get_economics(registry=registry)

        self.__initial_period = NOT_STAKING
        self.__terminal_period = NOT_STAKING

        # "load-in":  Read on-chain stakes
        # Allow stake tracker to be initialized as an empty collection.
        if checksum_address:
            if not is_checksum_address(checksum_address):
                raise ValueError(f'{checksum_address} is not a valid EIP-55 checksum address')
        self.checksum_address = checksum_address
        self.__updated = None

    @property
    def updated(self) -> maya.MayaDT:
        return self.__updated

    @property
    def initial_period(self) -> int:
        return self.__initial_period

    @property
    def terminal_period(self) -> int:
        return self.__terminal_period

    @validate_checksum_address
    def refresh(self) -> None:
        """Public staking cache invalidation method"""
        return self.__read_stakes()

    def __read_stakes(self) -> None:
        """Rewrite the local staking cache by reading on-chain stakes"""

        existing_records = len(self)

        # Candidate replacement cache values
        current_period = self.staking_agent.get_current_period()
        onchain_stakes, initial_period, terminal_period = list(), 0, current_period

        # Read from blockchain
        stakes_reader = self.staking_agent.get_all_stakes(staker_address=self.checksum_address)
        for onchain_index, stake_info in enumerate(stakes_reader):

            if not stake_info:
                onchain_stake = EMPTY_STAKING_SLOT

            else:
                onchain_stake = Stake.from_stake_info(checksum_address=self.checksum_address,
                                                      stake_info=stake_info,
                                                      staking_agent=self.staking_agent,
                                                      index=onchain_index,
                                                      economics=self.economics)

                # rack the earliest terminal period
                if onchain_stake.first_locked_period:
                    if onchain_stake.first_locked_period < initial_period:
                        initial_period = onchain_stake.first_locked_period

                # rack the latest terminal period
                if onchain_stake.final_locked_period > terminal_period:
                    terminal_period = onchain_stake.final_locked_period

            # Store the replacement stake
            onchain_stakes.append(onchain_stake)

        # Commit the new stake and terminal values to the cache
        self.data = onchain_stakes
        if onchain_stakes:
            self.__initial_period = initial_period
            self.__terminal_period = terminal_period
            changed_records = abs(existing_records - len(onchain_stakes))
            self.log.debug(f"Updated {changed_records} local staking cache entries.")

        # Record most recent cache update
        self.__updated = maya.now()
Example #27
0
    def __init__(
            self,
            emitter=None,  # TODO # 1754
            poa: bool = None,
            light: bool = False,
            provider_uri: str = NO_BLOCKCHAIN_CONNECTION,
            provider: BaseProvider = NO_BLOCKCHAIN_CONNECTION,
            gas_strategy: Optional[Union[str, Callable]] = None,
            max_gas_price: Optional[int] = None):
        """
        TODO: #1502 - Move to API docs.

         Filesystem          Configuration           Node              Client                  EVM
        ================ ====================== =============== =====================  ===========================

         Solidity Files -- SolidityCompiler -                      --- HTTPProvider ------ ...
                                            |                    |
                                            |                    |
                                            |                    |
                                            - *BlockchainInterface* -- IPCProvider ----- External EVM (geth, parity...)
                                                       |         |
                                                       |         |
                                                 TestProvider ----- EthereumTester -------------
                                                                                                |
                                                                                                |
                                                                                        PyEVM (Development Chain)

         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

         Runtime Files --                 --BlockchainInterface ----> Registry
                        |                |             ^
                        |                |             |
                        |                |             |
         Key Files ------ CharacterConfiguration     Agent                          ... (Contract API)
                        |                |             ^
                        |                |             |
                        |                |             |
                        |                |           Actor                          ...Blockchain-Character API)
                        |                |             ^
                        |                |             |
                        |                |             |
         Config File ---                  --------- Character                       ... (Public API)
                                                       ^
                                                       |
                                                     Human


        The Blockchain is the junction of the solidity compiler, a contract registry, and a collection of
        web3 network providers as a means of interfacing with the ethereum blockchain to execute
        or deploy contract code on the network.


        Compiler and Registry Usage
        -----------------------------

        Contracts are freshly re-compiled if an instance of SolidityCompiler is passed; otherwise,
        The registry will read contract data saved to disk that is be used to retrieve contact address and op-codes.
        Optionally, A registry instance can be passed instead.


        Provider Usage
        ---------------
        https: // github.com / ethereum / eth - tester     # available-backends


        * HTTP Provider - Web3 HTTP provider, typically JSON RPC 2.0 over HTTP
        * Websocket Provider - Web3 WS provider, typically JSON RPC 2.0 over WS, supply endpoint uri and websocket=True
        * IPC Provider - Web3 File based IPC provider transported over standard I/O
        * Custom Provider - A pre-initialized web3.py provider instance to attach to this interface

        """

        self.log = Logger('Blockchain')
        self.poa = poa
        self.provider_uri = provider_uri
        self._provider = provider
        self.w3 = NO_BLOCKCHAIN_CONNECTION
        self.client = NO_BLOCKCHAIN_CONNECTION
        self.is_light = light

        # TODO: Not ready to give users total flexibility. Let's stick for the moment to known values. See #2447
        if gas_strategy not in ('slow', 'medium', 'fast', 'free',
                                None):  # FIXME: What is 'None' doing here?
            raise ValueError(f"'{gas_strategy}' is an invalid gas strategy")
        self.gas_strategy = gas_strategy or self.DEFAULT_GAS_STRATEGY
        self.max_gas_price = max_gas_price
Example #28
0
class RestMiddleware:
    log = Logger()

    _client_class = NucypherMiddlewareClient

    class UnexpectedResponse(Exception):
        def __init__(self, message, status, *args, **kwargs):
            super().__init__(message, *args, **kwargs)
            self.status = status

    class NotFound(UnexpectedResponse):
        def __init__(self, *args, **kwargs):
            super().__init__(status=404, *args, **kwargs)

    class BadRequest(UnexpectedResponse):
        def __init__(self, reason, *args, **kwargs):
            self.reason = reason
            super().__init__(message=reason, status=400, *args, **kwargs)

    def __init__(self, registry=None):
        self.client = self._client_class(registry)

    def get_certificate(self,
                        host,
                        port,
                        timeout=3,
                        retry_attempts: int = 3,
                        retry_rate: int = 2,
                        current_attempt: int = 0):

        socket.setdefaulttimeout(timeout)  # Set Socket Timeout

        try:
            self.log.info(f"Fetching seednode {host}:{port} TLS certificate")
            seednode_certificate = ssl.get_server_certificate(addr=(host,
                                                                    port))

        except socket.timeout:
            if current_attempt == retry_attempts:
                message = f"No Response from seednode {host}:{port} after {retry_attempts} attempts"
                self.log.info(message)
                raise ConnectionRefusedError("No response from {}:{}".format(
                    host, port))
            self.log.info(
                f"No Response from seednode {host}:{port}. Retrying in {retry_rate} seconds..."
            )
            time.sleep(retry_rate)
            return self.get_certificate(host, port, timeout, retry_attempts,
                                        retry_rate, current_attempt + 1)

        except OSError:
            raise  # TODO: #1835

        else:
            certificate = x509.load_pem_x509_certificate(
                seednode_certificate.encode(), backend=default_backend())
            return certificate

    def consider_arrangement(self, arrangement):
        node = arrangement.ursula
        response = self.client.post(node_or_sprout=node,
                                    path="consider_arrangement",
                                    data=bytes(arrangement),
                                    timeout=2)
        return response

    def enact_policy(self, ursula, kfrag_id, payload):
        response = self.client.post(node_or_sprout=ursula,
                                    path=f'kFrag/{kfrag_id.hex()}',
                                    data=payload,
                                    timeout=2)
        return response

    def reencrypt(self, work_order):
        ursula_rest_response = self.send_work_order_payload_to_ursula(
            work_order)
        splitter = BytestringSplitter((CapsuleFrag, VariableLengthBytestring),
                                      Signature)
        cfrags_and_signatures = splitter.repeat(ursula_rest_response.content)
        return cfrags_and_signatures

    def revoke_arrangement(self, ursula, revocation):
        # TODO: Implement revocation confirmations
        response = self.client.delete(
            node_or_sprout=ursula,
            path=f"kFrag/{revocation.arrangement_id.hex()}",
            data=bytes(revocation),
        )
        return response

    def get_competitive_rate(self):
        return NotImplemented

    def get_treasure_map_from_node(self, node, map_id):
        response = self.client.get(node_or_sprout=node,
                                   path=f"treasure_map/{map_id}",
                                   timeout=2)
        return response

    def put_treasure_map_on_node(self, node, map_id, map_payload):
        response = self.client.post(node_or_sprout=node,
                                    path=f"treasure_map/{map_id}",
                                    data=map_payload,
                                    timeout=2)
        return response

    def send_work_order_payload_to_ursula(self, work_order):
        payload = work_order.payload()
        id_as_hex = work_order.arrangement_id.hex()
        response = self.client.post(node_or_sprout=work_order.ursula,
                                    path=f"kFrag/{id_as_hex}/reencrypt",
                                    data=payload,
                                    timeout=2)
        return response

    def check_rest_availability(self, initiator, responder):
        response = self.client.post(
            node_or_sprout=responder,
            data=bytes(initiator),
            path="ping",
            timeout=6,  # Two round trips are expected
        )
        return response

    def get_nodes_via_rest(self,
                           node,
                           announce_nodes=None,
                           nodes_i_need=None,
                           fleet_checksum=None):
        if nodes_i_need:
            # TODO: This needs to actually do something.  NRN
            # Include node_ids in the request; if the teacher node doesn't know about the
            # nodes matching these ids, then it will ask other nodes.
            pass

        if fleet_checksum:
            params = {'fleet': fleet_checksum}
        else:
            params = {}

        if announce_nodes:
            payload = bytes().join(
                bytes(VariableLengthBytestring(n)) for n in announce_nodes)
            response = self.client.post(
                node_or_sprout=node,
                path="node_metadata",
                params=params,
                data=payload,
            )
        else:
            response = self.client.get(node_or_sprout=node,
                                       path="node_metadata",
                                       params=params)

        return response
def test_collect_rewards_integration(
        click_runner, testerchain, agency_local_registry,
        stakeholder_configuration_file_location, blockchain_alice,
        blockchain_bob, random_policy_label, manual_staker, manual_worker,
        token_economics, mock_transacting_power_activation, policy_value,
        policy_rate):

    half_stake_time = token_economics.minimum_locked_periods // 2  # Test setup
    logger = Logger("Test-CLI")  # Enter the Teacher's Logger, and
    current_period = 0  # State the initial period for incrementing

    staker_address = manual_staker
    worker_address = manual_worker

    staker = Staker(is_me=True,
                    checksum_address=staker_address,
                    registry=agency_local_registry)
    staker.refresh_stakes()

    # The staker is staking.
    assert staker.is_staking
    assert staker.stakes
    assert staker.worker_address == worker_address

    ursula_port = select_test_port()
    ursula = Ursula(is_me=True,
                    checksum_address=staker_address,
                    worker_address=worker_address,
                    registry=agency_local_registry,
                    rest_host='127.0.0.1',
                    rest_port=ursula_port,
                    network_middleware=MockRestMiddleware(),
                    db_filepath=tempfile.mkdtemp())

    MOCK_KNOWN_URSULAS_CACHE[ursula_port] = ursula
    assert ursula.worker_address == worker_address
    assert ursula.checksum_address == staker_address

    mock_transacting_power_activation(account=worker_address,
                                      password=INSECURE_DEVELOPMENT_PASSWORD)

    # Make a commitment for half the first stake duration
    for _ in range(half_stake_time):
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        ursula.commit_to_next_period()
        testerchain.time_travel(periods=1)
        current_period += 1

    # Alice creates a policy and grants Bob access
    blockchain_alice.selection_buffer = 1

    M, N = 1, 1
    days = 3
    now = testerchain.w3.eth.getBlock(block_identifier='latest').timestamp
    expiration = maya.MayaDT(now).add(days=days - 1)
    blockchain_policy = blockchain_alice.grant(bob=blockchain_bob,
                                               label=random_policy_label,
                                               m=M,
                                               n=N,
                                               value=policy_value,
                                               expiration=expiration,
                                               handpicked_ursulas={ursula})

    # Ensure that the handpicked Ursula was selected for the policy
    arrangement = list(blockchain_policy._accepted_arrangements)[0]
    assert arrangement.ursula == ursula

    # Bob learns about the new staker and joins the policy
    blockchain_bob.start_learning_loop()
    blockchain_bob.remember_node(node=ursula)
    blockchain_bob.join_policy(random_policy_label,
                               bytes(blockchain_alice.stamp))

    # Enrico Encrypts (of course)
    enrico = Enrico(policy_encrypting_key=blockchain_policy.public_key,
                    network_middleware=MockRestMiddleware())

    verifying_key = blockchain_alice.stamp.as_umbral_pubkey()

    for index in range(half_stake_time - 5):
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        ursula.commit_to_next_period()

        # Encrypt
        random_data = os.urandom(random.randrange(20, 100))
        ciphertext, signature = enrico.encrypt_message(plaintext=random_data)

        # Decrypt
        cleartexts = blockchain_bob.retrieve(ciphertext,
                                             enrico=enrico,
                                             alice_verifying_key=verifying_key,
                                             label=random_policy_label)
        assert random_data == cleartexts[0]

        # Ursula Staying online and the clock advancing
        testerchain.time_travel(periods=1)
        current_period += 1

    # Finish the passage of time for the first Stake
    for _ in range(5):  # plus the extended periods from stake division
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        ursula.commit_to_next_period()
        testerchain.time_travel(periods=1)
        current_period += 1

    #
    # WHERES THE MONEY URSULA?? - Collecting Rewards
    #

    # The address the client wants Ursula to send rewards to
    burner_wallet = testerchain.w3.eth.account.create(
        INSECURE_DEVELOPMENT_PASSWORD)

    # The rewards wallet is initially empty, because it is freshly created
    assert testerchain.client.get_balance(burner_wallet.address) == 0

    # Rewards will be unlocked after the
    # final committed period has passed (+1).
    logger.debug(f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
    testerchain.time_travel(periods=1)
    current_period += 1
    logger.debug(f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")

    # At least half of the tokens are unlocked (restaking was enabled for some prior periods)
    assert staker.locked_tokens() >= token_economics.minimum_allowed_locked

    # Since we are mocking the blockchain connection, manually consume the transacting power of the Staker.
    mock_transacting_power_activation(account=staker_address,
                                      password=INSECURE_DEVELOPMENT_PASSWORD)

    # Collect Policy Fee
    collection_args = ('stake', 'collect-reward', '--config-file',
                       stakeholder_configuration_file_location, '--policy-fee',
                       '--no-staking-reward', '--staking-address',
                       staker_address, '--withdraw-address',
                       burner_wallet.address)
    result = click_runner.invoke(nucypher_cli,
                                 collection_args,
                                 input=INSECURE_DEVELOPMENT_PASSWORD,
                                 catch_exceptions=False)
    assert result.exit_code == 0

    # Policy Fee
    collected_policy_fee = testerchain.client.get_balance(
        burner_wallet.address)
    expected_collection = policy_rate * 30
    assert collected_policy_fee == expected_collection

    # Finish the passage of time... once and for all
    # Extended periods from stake division
    for _ in range(9):
        ursula.commit_to_next_period()
        current_period += 1
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        testerchain.time_travel(periods=1)

    #
    # Collect Staking Reward
    #

    balance_before_collecting = staker.token_agent.get_balance(
        address=staker_address)

    collection_args = ('stake', 'collect-reward', '--config-file',
                       stakeholder_configuration_file_location,
                       '--no-policy-fee', '--staking-reward',
                       '--staking-address', staker_address, '--force')

    result = click_runner.invoke(nucypher_cli,
                                 collection_args,
                                 input=INSECURE_DEVELOPMENT_PASSWORD,
                                 catch_exceptions=False)
    assert result.exit_code == 0

    # The staker has withdrawn her staking rewards
    assert staker.token_agent.get_balance(
        address=staker_address) > balance_before_collecting
Example #30
0
RequestErrors = (
    # https://requests.readthedocs.io/en/latest/user/quickstart/#errors-and-exceptions
    ConnectionError,
    TimeoutError,
    RequestException,
    HTTPError
)

RESERVED_IP_ADDRESSES = (
    '0.0.0.0',
    '127.0.0.1',
    '1.2.3.4'
)

IP_DETECTION_LOGGER = Logger('external-ip-detection')



def validate_worker_ip(worker_ip: str) -> None:
    if worker_ip in RESERVED_IP_ADDRESSES:
        raise InvalidWorkerIP(f'{worker_ip} is not a valid or permitted worker IP address.  '
                              f'Verify the rest_host is set to the external IPV4 address')


def _request(url: str, certificate=None) -> Union[str, None]:
    """
    Utility function to send a GET request to a URL returning it's
    text content or None, suppressing all errors. Certificate is
    needed if the remote URL source is self-signed.
    """