Example #1
0
class EthereumClient:
    is_local = False

    GETH = 'Geth'
    PARITY = 'Parity'
    ALT_PARITY = 'Parity-Ethereum'
    GANACHE = 'EthereumJS TestRPC'

    ETHEREUM_TESTER = 'EthereumTester'  # (PyEVM)
    CLEF = 'Clef'  # Signer-only

    BLOCK_CONFIRMATIONS_POLLING_TIME = 3  # seconds
    TRANSACTION_POLLING_TIME = 0.5  # seconds
    COOLING_TIME = 5  # seconds
    STALECHECK_ALLOWABLE_DELAY = 30  # seconds

    class ConnectionNotEstablished(RuntimeError):
        pass

    class SyncTimeout(RuntimeError):
        pass

    class UnknownAccount(ValueError):
        pass

    class TransactionBroadcastError(RuntimeError):
        pass

    class NotEnoughConfirmations(TransactionBroadcastError):
        pass

    class TransactionTimeout(TransactionBroadcastError):
        pass

    class ChainReorganizationDetected(TransactionBroadcastError):
        """Raised when block confirmations logic detects that a TX was lost due to a chain reorganization"""

        error_message = (
            "Chain re-organization detected: Transaction {transaction_hash} was reported to be in "
            "block {block_hash}, but it's not there anymore")

        def __init__(self, receipt):
            self.receipt = receipt
            self.message = self.error_message.format(
                transaction_hash=Web3.toHex(receipt['transactionHash']),
                block_hash=Web3.toHex(receipt['blockHash']))
            super().__init__(self.message)

    def __init__(self, w3, node_technology: str, version: str, platform: str,
                 backend: str):

        self.w3 = w3
        self.node_technology = node_technology
        self.node_version = version
        self.platform = platform
        self.backend = backend
        self.log = Logger(self.__class__.__name__)

        self._add_default_middleware()

    def _add_default_middleware(self):
        # default retry functionality
        self.log.debug('Adding RPC retry middleware to client')
        self.add_middleware(RetryRequestMiddleware)

    @classmethod
    def _get_variant(cls, w3):
        return cls

    @classmethod
    def from_w3(cls, w3: Web3) -> 'EthereumClient':
        """

        Client version strings:

        Geth    -> 'Geth/v1.4.11-stable-fed692f6/darwin/go1.7'
        Parity  -> 'Parity-Ethereum/v2.5.1-beta-e0141f8-20190510/x86_64-linux-gnu/rustc1.34.1'
        Ganache -> 'EthereumJS TestRPC/v2.1.5/ethereum-js'
        PyEVM   -> 'EthereumTester/0.1.0b39/linux/python3.6.7'
        """
        clients = {

            # Geth
            cls.GETH: GethClient,

            # Parity
            cls.PARITY: ParityClient,
            cls.ALT_PARITY: ParityClient,

            # Test Clients
            cls.GANACHE: GanacheClient,
            cls.ETHEREUM_TESTER: EthereumTesterClient,
        }

        try:
            client_data = w3.clientVersion.split('/')
            node_technology = client_data[0]
            ClientSubclass = clients[node_technology]

        except (ValueError, IndexError):
            raise ValueError(
                f"Invalid client version string. Got '{w3.clientVersion}'")

        except KeyError:
            raise NotImplementedError(
                f'{w3.clientVersion} is not a supported ethereum client')

        client_kwargs = {
            'node_technology': node_technology,
            'version': client_data[1],
            'backend': client_data[-1],
            'platform': client_data[2]
            if len(client_data) == 4 else None  # Platform is optional
        }

        instance = ClientSubclass._get_variant(w3)(w3, **client_kwargs)
        return instance

    @property
    def peers(self):
        raise NotImplementedError

    @property
    def chain_name(self) -> str:
        chain_inventory = LOCAL_CHAINS if self.is_local else PUBLIC_CHAINS
        name = chain_inventory.get(self.chain_id, UNKNOWN_DEVELOPMENT_CHAIN_ID)
        return name

    def lock_account(self, account) -> bool:
        if self.is_local:
            return True
        return NotImplemented

    def unlock_account(self, account, password, duration=None) -> bool:
        if self.is_local:
            return True
        return NotImplemented

    @property
    def is_connected(self):
        return self.w3.isConnected()

    @property
    def etherbase(self) -> str:
        return self.w3.eth.accounts[0]

    @property
    def accounts(self):
        return self.w3.eth.accounts

    def get_balance(self, account):
        return self.w3.eth.getBalance(account)

    def inject_middleware(self, middleware, **kwargs):
        self.w3.middleware_onion.inject(middleware, **kwargs)

    def add_middleware(self, middleware):
        self.w3.middleware_onion.add(middleware)

    def set_gas_strategy(self, gas_strategy):
        self.w3.eth.setGasPriceStrategy(gas_strategy)

    @property
    def chain_id(self) -> int:
        try:
            # from hex-str
            return int(self.w3.eth.chainId, 16)
        except TypeError:
            # from str
            return int(self.w3.eth.chainId)

    @property
    def net_version(self) -> int:
        return int(self.w3.net.version)

    def get_contract(self, **kwargs) -> Contract:
        return self.w3.eth.contract(**kwargs)

    @property
    def gas_price(self) -> Wei:
        """
        Returns client's gas price. Underneath, it uses the eth_gasPrice JSON-RPC method
        """
        return self.w3.eth.gasPrice

    def gas_price_for_transaction(self, transaction=None) -> Wei:
        """
        Obtains a gas price via the current gas strategy, if any; otherwise, it resorts to the client's gas price.
        This method mirrors the behavior of web3._utils.transactions when building transactions.
        """
        return self.w3.eth.generateGasPrice(transaction) or self.gas_price

    @property
    def block_number(self) -> BlockNumber:
        return self.w3.eth.blockNumber

    @property
    def coinbase(self) -> ChecksumAddress:
        return self.w3.eth.coinbase

    def wait_for_receipt(self,
                         transaction_hash: str,
                         timeout: float,
                         confirmations: int = 0) -> TxReceipt:
        receipt: TxReceipt = None
        if confirmations:
            # If we're waiting for confirmations, we may as well let pass some time initially to make everything easier
            time.sleep(self.COOLING_TIME)

            # We'll keep trying to get receipts until there are enough confirmations or the timeout happens
            with Timeout(seconds=timeout,
                         exception=self.TransactionTimeout) as timeout_context:
                while not receipt:
                    try:
                        receipt = self.block_until_enough_confirmations(
                            transaction_hash=transaction_hash,
                            timeout=timeout,
                            confirmations=confirmations)
                    except (self.ChainReorganizationDetected,
                            self.NotEnoughConfirmations, TimeExhausted):
                        timeout_context.sleep(
                            self.BLOCK_CONFIRMATIONS_POLLING_TIME)
                        continue

        else:
            # If not asking for confirmations, just use web3 and assume the returned receipt is final
            try:
                receipt = self.w3.eth.waitForTransactionReceipt(
                    transaction_hash=transaction_hash,
                    timeout=timeout,
                    poll_latency=self.TRANSACTION_POLLING_TIME)
            except TimeExhausted:
                raise  # TODO: #1504 - Handle transaction timeout

        return receipt

    def block_until_enough_confirmations(self, transaction_hash: str,
                                         timeout: float,
                                         confirmations: int) -> dict:

        receipt: TxReceipt = self.w3.eth.waitForTransactionReceipt(
            transaction_hash=transaction_hash,
            timeout=timeout,
            poll_latency=self.TRANSACTION_POLLING_TIME)

        preliminary_block_hash = Web3.toHex(receipt['blockHash'])
        tx_block_number = Web3.toInt(receipt['blockNumber'])
        self.log.info(
            f"Transaction {Web3.toHex(transaction_hash)} is preliminarily included in "
            f"block {preliminary_block_hash}")

        confirmations_timeout = self._calculate_confirmations_timeout(
            confirmations)
        confirmations_so_far = 0
        with Timeout(seconds=confirmations_timeout,
                     exception=self.NotEnoughConfirmations) as timeout_context:
            while confirmations_so_far < confirmations:
                timeout_context.sleep(self.BLOCK_CONFIRMATIONS_POLLING_TIME)
                self.check_transaction_is_on_chain(receipt=receipt)
                confirmations_so_far = self.block_number - tx_block_number
                self.log.info(
                    f"We have {confirmations_so_far} confirmations. "
                    f"Waiting for {confirmations - confirmations_so_far} more."
                )
            return receipt

    @staticmethod
    def _calculate_confirmations_timeout(confirmations):
        confirmations_timeout = 3 * AVERAGE_BLOCK_TIME_IN_SECONDS * confirmations
        return confirmations_timeout

    def check_transaction_is_on_chain(self, receipt: TxReceipt) -> bool:
        transaction_hash = Web3.toHex(receipt['transactionHash'])
        try:
            new_receipt = self.w3.eth.getTransactionReceipt(transaction_hash)
        except TransactionNotFound:
            reorg_detected = True
        else:
            reorg_detected = receipt['blockHash'] != new_receipt['blockHash']

        if reorg_detected:
            exception = self.ChainReorganizationDetected(receipt=receipt)
            self.log.info(exception.message)
            raise exception
            # TODO: Consider adding an optional param in this exception to include extra info (e.g. new block)
        return True

    def sign_transaction(self, transaction_dict: dict) -> bytes:
        raise NotImplementedError

    def get_transaction(self, transaction_hash) -> dict:
        return self.w3.eth.getTransaction(transaction_hash)

    def get_transaction_receipt(self, transaction_hash) -> Union[dict, None]:
        return self.w3.eth.getTransactionReceipt(transaction_hash)

    def get_transaction_count(self, account: str, pending: bool) -> int:
        block_identifier = 'pending' if pending else 'latest'
        return self.w3.eth.getTransactionCount(account, block_identifier)

    def send_transaction(self, transaction_dict: dict) -> str:
        return self.w3.eth.sendTransaction(transaction_dict)

    def send_raw_transaction(self, transaction_bytes: bytes) -> str:
        return self.w3.eth.sendRawTransaction(transaction_bytes)

    def sign_message(self, account: str, message: bytes) -> str:
        """
        Calls the appropriate signing function for the specified account on the
        backend. If the backend is based on eth-tester, then it uses the
        eth-tester signing interface to do so.
        """
        return self.w3.eth.sign(account, data=message)

    def get_blocktime(self):
        highest_block = self.w3.eth.getBlock('latest')
        now = highest_block['timestamp']
        return now

    def _has_latest_block(self) -> bool:
        # TODO: Investigate using `web3.middleware.make_stalecheck_middleware` #2060
        # check that our local chain data is up to date
        return (time.time() -
                self.get_blocktime()) < self.STALECHECK_ALLOWABLE_DELAY

    def parse_transaction_data(self, transaction):
        return transaction.input
Example #2
0
class CharacterConfiguration(BaseConfiguration):
    """
    'Sideways Engagement' of Character classes; a reflection of input parameters.
    """

    VERSION = 2  # bump when static payload scheme changes

    CHARACTER_CLASS = NotImplemented
    DEFAULT_CONTROLLER_PORT = NotImplemented
    DEFAULT_DOMAIN = NetworksInventory.DEFAULT
    DEFAULT_NETWORK_MIDDLEWARE = RestMiddleware
    TEMP_CONFIGURATION_DIR_PREFIX = 'tmp-nucypher'

    # When we begin to support other threshold schemes, this will be one of the concepts that makes us want a factory.  #571
    known_node_class = Ursula

    # Gas
    DEFAULT_GAS_STRATEGY = 'fast'

    def __init__(
            self,

            # Base
            emitter=None,
            config_root: str = None,
            filepath: str = None,

            # Mode
            dev_mode: bool = False,
            federated_only: bool = False,

            # Identity
            checksum_address: str = None,
            crypto_power: CryptoPower = None,

            # Keyring
            keyring: NucypherKeyring = None,
            keyring_root: str = None,

            # Learner
            learn_on_same_thread: bool = False,
            abort_on_learning_error: bool = False,
            start_learning_now: bool = True,

            # Network
            controller_port: int = None,
            domain: str = DEFAULT_DOMAIN,
            interface_signature: Signature = None,
            network_middleware: RestMiddleware = None,
            lonely: bool = False,

            # Node Storage
            known_nodes: set = None,
            node_storage: NodeStorage = None,
            reload_metadata: bool = True,
            save_metadata: bool = True,

            # Blockchain
            poa: bool = None,
            light: bool = False,
            sync: bool = False,
            provider_uri: str = None,
            provider_process=None,
            gas_strategy: Union[Callable, str] = DEFAULT_GAS_STRATEGY,
            signer_uri: str = None,

            # Registry
            registry: BaseContractRegistry = None,
            registry_filepath: str = None,

            # Deployed Workers
            worker_data: dict = None):

        self.log = Logger(self.__class__.__name__)
        UNINITIALIZED_CONFIGURATION.bool_value(False)

        # Identity
        # NOTE: NodeConfigurations can only be used with Self-Characters
        self.is_me = True
        self.checksum_address = checksum_address

        # Keyring
        self.crypto_power = crypto_power
        self.keyring = keyring or NO_KEYRING_ATTACHED
        self.keyring_root = keyring_root or UNINITIALIZED_CONFIGURATION

        # Contract Registry
        if registry and registry_filepath:
            if registry.filepath != registry_filepath:
                error = f"Inconsistent registry filepaths for '{registry.filepath}' and '{registry_filepath}'."
                raise ValueError(error)
            else:
                self.log.warn(
                    f"Registry and registry filepath were both passed.")
        self.registry = registry or NO_BLOCKCHAIN_CONNECTION.bool_value(False)
        self.registry_filepath = registry_filepath or UNINITIALIZED_CONFIGURATION

        # Blockchain
        self.poa = poa
        self.is_light = light
        self.provider_uri = provider_uri or NO_BLOCKCHAIN_CONNECTION
        self.provider_process = provider_process or NO_BLOCKCHAIN_CONNECTION
        self.signer_uri = signer_uri or None

        # Learner
        self.federated_only = federated_only
        self.domain = domain
        self.learn_on_same_thread = learn_on_same_thread
        self.abort_on_learning_error = abort_on_learning_error
        self.start_learning_now = start_learning_now
        self.save_metadata = save_metadata
        self.reload_metadata = reload_metadata
        self.known_nodes = known_nodes or set()  # handpicked
        self.lonely = lonely

        # Configuration
        self.__dev_mode = dev_mode
        self.config_file_location = filepath or UNINITIALIZED_CONFIGURATION
        self.config_root = UNINITIALIZED_CONFIGURATION

        # Deployed Workers
        self.worker_data = worker_data

        #
        # Federated vs. Blockchain arguments consistency
        #

        #
        # Federated
        #

        if self.federated_only:
            # Check for incompatible values
            blockchain_args = {
                'filepath': registry_filepath,
                'poa': poa,
                'provider_process': provider_process,
                'provider_uri': provider_uri,
                'gas_strategy': gas_strategy
            }
            if any(blockchain_args.values()):
                bad_args = ", ".join(f"{arg}={val}"
                                     for arg, val in blockchain_args.items()
                                     if val)
                self.log.warn(
                    f"Arguments {bad_args} are incompatible with federated_only. "
                    f"Overridden with a sane default.")

                # Clear decentralized attributes to ensure consistency with a
                # federated configuration.
                self.poa = False
                self.is_light = False
                self.provider_uri = None
                self.provider_process = None
                self.registry_filepath = None
                self.gas_strategy = None

        #
        # Decentralized
        #

        else:
            self.gas_strategy = gas_strategy
            is_initialized = BlockchainInterfaceFactory.is_interface_initialized(
                provider_uri=self.provider_uri)
            if not is_initialized and provider_uri:
                BlockchainInterfaceFactory.initialize_interface(
                    provider_uri=self.provider_uri,
                    poa=self.poa,
                    light=self.is_light,
                    provider_process=self.provider_process,
                    sync=sync,
                    emitter=emitter,
                    gas_strategy=gas_strategy)
            else:
                self.log.warn(
                    f"Using existing blockchain interface connection ({self.provider_uri})."
                )

            if not self.registry:
                # TODO: These two code blocks are untested.
                if not self.registry_filepath:  # TODO: Registry URI  (goerli://speedynet.json) :-)
                    self.log.info(f"Fetching latest registry from source.")
                    self.registry = InMemoryContractRegistry.from_latest_publication(
                        network=self.domain)
                else:
                    self.registry = LocalContractRegistry(
                        filepath=self.registry_filepath)
                    self.log.info(f"Using local registry ({self.registry}).")

        if dev_mode:
            self.__temp_dir = UNINITIALIZED_CONFIGURATION
            self.__setup_node_storage()
            self.initialize(password=DEVELOPMENT_CONFIGURATION)
        else:
            self.__temp_dir = LIVE_CONFIGURATION
            self.config_root = config_root or self.DEFAULT_CONFIG_ROOT
            self._cache_runtime_filepaths()
            self.__setup_node_storage(node_storage=node_storage)

        # Network
        self.controller_port = controller_port or self.DEFAULT_CONTROLLER_PORT
        self.network_middleware = network_middleware or self.DEFAULT_NETWORK_MIDDLEWARE(
            registry=self.registry)
        self.interface_signature = interface_signature

        super().__init__(filepath=self.config_file_location,
                         config_root=self.config_root)

    def __call__(self, **character_kwargs):
        return self.produce(**character_kwargs)

    @classmethod
    def checksum_address_from_filepath(cls, filepath: str) -> str:

        pattern = re.compile(
            r'''
                             (^\w+)-
                             (0x{1}         # Then, 0x the start of the string, exactly once
                             [0-9a-fA-F]{40}) # Followed by exactly 40 hex chars
                             ''', re.VERBOSE)

        filename = os.path.basename(filepath)
        match = pattern.match(filename)

        if match:
            character_name, checksum_address = match.groups()

        else:
            # Extract from default by "peeking" inside the configuration file.
            default_name = cls.generate_filename()
            if filename == default_name:
                checksum_address = cls.peek(filepath=filepath,
                                            field='checksum_address')

                ###########
                # TODO: Cleanup and deprecate worker_address in config files, leaving only checksum_address
                from nucypher.config.characters import UrsulaConfiguration
                if isinstance(cls, UrsulaConfiguration):
                    federated = bool(
                        cls.peek(filepath=filepath, field='federated_only'))
                    if not federated:
                        checksum_address = cls.peek(filepath=cls.filepath,
                                                    field='worker_address')
                ###########

            else:
                raise ValueError(
                    f"Cannot extract checksum from filepath '{filepath}'")

        if not is_checksum_address(checksum_address):
            raise RuntimeError(
                f"Invalid checksum address detected in configuration file at '{filepath}'."
            )
        return checksum_address

    def update(self, **kwargs) -> None:
        """
        A facility for updating existing attributes on existing configuration instances.

        Warning: This method allows mutation and may result in an inconsistent configuration.
        """
        return super().update(modifier=self.checksum_address,
                              filepath=self.config_file_location,
                              **kwargs)

    @classmethod
    def generate(cls, password: str, *args, **kwargs):
        """Shortcut: Hook-up a new initial installation and write configuration file to the disk"""
        node_config = cls(dev_mode=False, *args, **kwargs)
        node_config.initialize(password=password)
        node_config.to_configuration_file()
        return node_config

    def cleanup(self) -> None:
        if self.__dev_mode:
            self.__temp_dir.cleanup()

    @property
    def dev_mode(self) -> bool:
        return self.__dev_mode

    def __setup_node_storage(self, node_storage=None) -> None:
        if self.dev_mode:
            node_storage = ForgetfulNodeStorage(
                registry=self.registry, federated_only=self.federated_only)
        elif not node_storage:
            node_storage = LocalFileBasedNodeStorage(
                registry=self.registry,
                config_root=self.config_root,
                federated_only=self.federated_only)
        self.node_storage = node_storage

    def forget_nodes(self) -> None:
        self.node_storage.clear()
        message = "Removed all stored node node metadata and certificates"
        self.log.debug(message)

    def destroy(self) -> None:
        """Parse a node configuration and remove all associated files from the filesystem"""
        self.attach_keyring()
        self.keyring.destroy()
        os.remove(self.config_file_location)

    def generate_parameters(self, **overrides) -> dict:
        """
        Warning: This method allows mutation and may result in an inconsistent configuration.
        """
        merged_parameters = {
            **self.static_payload(),
            **self.dynamic_payload,
            **overrides
        }
        non_init_params = ('config_root', 'poa', 'light', 'provider_uri',
                           'registry_filepath', 'gas_strategy', 'signer_uri')
        character_init_params = filter(lambda t: t[0] not in non_init_params,
                                       merged_parameters.items())
        return dict(character_init_params)

    def produce(self, **overrides) -> CHARACTER_CLASS:
        """Initialize a new character instance and return it."""
        merged_parameters = self.generate_parameters(**overrides)
        character = self.CHARACTER_CLASS(**merged_parameters)
        return character

    @classmethod
    def assemble(cls, filepath: str = None, **overrides) -> dict:
        """
        Warning: This method allows mutation and may result in an inconsistent configuration.
        """
        payload = cls._read_configuration_file(filepath=filepath)
        node_storage = cls.load_node_storage(
            storage_payload=payload['node_storage'],
            federated_only=payload['federated_only'])
        domain = payload['domain']

        # Assemble
        payload.update(dict(node_storage=node_storage, domain=domain))
        # Filter out None values from **overrides to detect, well, overrides...
        # Acts as a shim for optional CLI flags.
        overrides = {k: v for k, v in overrides.items() if v is not None}
        payload = {**payload, **overrides}
        return payload

    @classmethod
    def from_configuration_file(
            cls,
            filepath: str = None,
            provider_process=None,
            **overrides  # < ---- Inlet for CLI Flags
    ) -> 'CharacterConfiguration':
        """Initialize a CharacterConfiguration from a JSON file."""
        filepath = filepath or cls.default_filepath()
        assembled_params = cls.assemble(filepath=filepath, **overrides)
        node_configuration = cls(filepath=filepath,
                                 provider_process=provider_process,
                                 **assembled_params)
        return node_configuration

    def validate(self) -> bool:

        # Top-level
        if not os.path.exists(self.config_root):
            raise self.ConfigurationError(
                f'No configuration directory found at {self.config_root}.')

        # Sub-paths
        filepaths = self.runtime_filepaths
        for field, path in filepaths.items():
            if path and not os.path.exists(path):
                message = 'Missing configuration file or directory: {}.'
                if 'registry' in path:
                    message += ' Did you mean to pass --federated-only?'
                raise CharacterConfiguration.InvalidConfiguration(
                    message.format(path))
        return True

    def static_payload(self) -> dict:
        """Exported static configuration values for initializing Ursula"""

        payload = dict(

            # Identity
            federated_only=self.federated_only,
            checksum_address=self.checksum_address,
            keyring_root=self.keyring_root,

            # Behavior
            domain=self.domain,
            learn_on_same_thread=self.learn_on_same_thread,
            abort_on_learning_error=self.abort_on_learning_error,
            start_learning_now=self.start_learning_now,
            save_metadata=self.save_metadata,
            node_storage=self.node_storage.payload(),
            lonely=self.lonely,
        )

        # Optional values (mode)
        if not self.federated_only:
            if self.provider_uri:
                if not self.signer_uri:
                    self.signer_uri = self.provider_uri
                payload.update(
                    dict(provider_uri=self.provider_uri,
                         poa=self.poa,
                         light=self.is_light,
                         signer_uri=self.signer_uri))
            if self.registry_filepath:
                payload.update(dict(registry_filepath=self.registry_filepath))

            # Gas Price
            payload.update(dict(gas_strategy=self.gas_strategy))

        # Merge with base payload
        base_payload = super().static_payload()
        base_payload.update(payload)

        return payload

    @property  # TODO: Graduate to a method and "derive" dynamic from static payload.
    def dynamic_payload(self) -> dict:
        """Exported dynamic configuration values for initializing Ursula"""
        payload = dict()
        if not self.federated_only:
            testnet = self.domain != NetworksInventory.MAINNET
            signer = Signer.from_signer_uri(self.signer_uri, testnet=testnet)
            payload.update(dict(registry=self.registry, signer=signer))

        payload.update(
            dict(network_middleware=self.network_middleware
                 or self.DEFAULT_NETWORK_MIDDLEWARE(),
                 known_nodes=self.known_nodes,
                 node_storage=self.node_storage,
                 crypto_power_ups=self.derive_node_power_ups()))
        return payload

    def generate_filepath(self,
                          filepath: str = None,
                          modifier: str = None,
                          override: bool = False) -> str:
        modifier = modifier or self.checksum_address
        filepath = super().generate_filepath(filepath=filepath,
                                             modifier=modifier,
                                             override=override)
        return filepath

    @property
    def runtime_filepaths(self) -> dict:
        filepaths = dict(config_root=self.config_root,
                         keyring_root=self.keyring_root,
                         registry_filepath=self.registry_filepath)
        return filepaths

    @classmethod
    def generate_runtime_filepaths(cls, config_root: str) -> dict:
        """Dynamically generate paths based on configuration root directory"""
        filepaths = dict(config_root=config_root,
                         config_file_location=os.path.join(
                             config_root, cls.generate_filename()),
                         keyring_root=os.path.join(config_root, 'keyring'))
        return filepaths

    def _cache_runtime_filepaths(self) -> None:
        """Generate runtime filepaths and cache them on the config object"""
        filepaths = self.generate_runtime_filepaths(
            config_root=self.config_root)
        for field, filepath in filepaths.items():
            if getattr(self, field) is UNINITIALIZED_CONFIGURATION:
                setattr(self, field, filepath)

    def attach_keyring(self,
                       checksum_address: str = None,
                       *args,
                       **kwargs) -> None:
        account = checksum_address or self.checksum_address
        if not account:
            raise self.ConfigurationError(
                "No account specified to unlock keyring")
        if self.keyring is not NO_KEYRING_ATTACHED:
            if self.keyring.checksum_address != account:
                raise self.ConfigurationError(
                    "There is already a keyring attached to this configuration."
                )
            return
        self.keyring = NucypherKeyring(keyring_root=self.keyring_root,
                                       account=account,
                                       *args,
                                       **kwargs)

    def derive_node_power_ups(self) -> List[CryptoPowerUp]:
        power_ups = list()
        if self.is_me and not self.dev_mode:
            for power_class in self.CHARACTER_CLASS._default_crypto_powerups:
                power_up = self.keyring.derive_crypto_power(power_class)
                power_ups.append(power_up)
        return power_ups

    def initialize(self, password: str) -> str:
        """Initialize a new configuration and write installation files to disk."""

        # Development
        if self.dev_mode:
            self.__temp_dir = TemporaryDirectory(
                prefix=self.TEMP_CONFIGURATION_DIR_PREFIX)
            self.config_root = self.__temp_dir.name

        # Persistent
        else:
            self._ensure_config_root_exists()
            self.write_keyring(password=password)

        self._cache_runtime_filepaths()
        self.node_storage.initialize()

        # Validate
        if not self.__dev_mode:
            self.validate()

        # Success
        message = "Created nucypher installation files at {}".format(
            self.config_root)
        self.log.debug(message)
        return self.config_root

    def write_keyring(self,
                      password: str,
                      checksum_address: str = None,
                      **generation_kwargs) -> NucypherKeyring:

        if self.federated_only:
            checksum_address = FEDERATED_ADDRESS

        elif not checksum_address:

            # Note: It is assumed the blockchain interface is not yet connected.
            if self.provider_process:

                # Generate Geth's "datadir"
                if not os.path.exists(self.provider_process.data_dir):
                    os.mkdir(self.provider_process.data_dir)

                # Get or create wallet address
                if not self.checksum_address:
                    self.checksum_address = self.provider_process.ensure_account_exists(
                        password=password)
                elif self.checksum_address not in self.provider_process.accounts(
                ):
                    raise self.ConfigurationError(
                        f'Unknown Account {self.checksum_address}')

            elif not self.checksum_address:
                raise self.ConfigurationError(
                    f'No checksum address provided for decentralized configuration.'
                )

            checksum_address = self.checksum_address

        self.keyring = NucypherKeyring.generate(
            password=password,
            keyring_root=self.keyring_root,
            checksum_address=checksum_address,
            **generation_kwargs)

        if self.federated_only:
            self.checksum_address = self.keyring.checksum_address

        return self.keyring

    @classmethod
    def load_node_storage(cls, storage_payload: dict, federated_only: bool):
        from nucypher.config.storages import NodeStorage
        node_storage_subclasses = {
            storage._name: storage
            for storage in NodeStorage.__subclasses__()
        }
        storage_type = storage_payload[NodeStorage._TYPE_LABEL]
        storage_class = node_storage_subclasses[storage_type]
        node_storage = storage_class.from_payload(
            payload=storage_payload, federated_only=federated_only)
        return node_storage
Example #3
0
class AvailabilityTracker:

    FAST_INTERVAL = 15  # Seconds
    SLOW_INTERVAL = 60 * 2
    SEEDING_DURATION = 60
    MAXIMUM_ALONE_TIME = 120

    MAXIMUM_SCORE = 10.0  # Score
    SAMPLE_SIZE = 1  # Ursulas
    SENSITIVITY = 0.5  # Threshold
    CHARGE_RATE = 0.9  # Measurement Multiplier

    class Unreachable(RuntimeError):
        pass

    class Solitary(Unreachable):
        message = "Cannot connect to any teacher nodes."

    class Lonely(Unreachable):
        message = "Cannot connect to enough teacher nodes."

    def __init__(self, ursula, enforce_loneliness: bool = True):

        self.log = Logger(self.__class__.__name__)
        self._ursula = ursula
        self.enforce_loneliness = enforce_loneliness

        self.__excuses = dict()  # List of failure reasons
        self.__score = 10
        # 10 == Perfect Score
        self.warnings = {
            9: self.mild_warning,
            7: self.medium_warning,
            2: self.severe_warning,
            1: self.shutdown_everything  # 0 is unobtainable
        }

        self._start_time = None
        self.__active_measurement = False
        self.__task = LoopingCall(self.maintain)
        self.responders = set()

    @property
    def excuses(self):
        return self.__excuses

    def mild_warning(self) -> None:
        self.log.info(
            f'[UNREACHABLE NOTICE (SCORE {self.score})] This node was recently reported as unreachable.'
        )

    def medium_warning(self) -> None:
        self.log.warn(
            f'[UNREACHABLE CAUTION (SCORE {self.score})] This node is reporting as unreachable.'
            f'Please check your network and firewall configuration.')

    def severe_warning(self) -> None:
        self.log.warn(
            f'[UNREACHABLE WARNING (SCORE {self.score})] '
            f'Please check your network and firewall configuration.'
            f'Auto-shutdown will commence soon if the services do not become available.'
        )

    def shutdown_everything(self, reason=None, halt_reactor=False):
        self.log.warn(
            f'[NODE IS UNREACHABLE (SCORE {self.score})] Commencing auto-shutdown sequence...'
        )
        self._ursula.stop(halt_reactor=False)
        try:
            if reason:
                raise reason(reason.message)
            raise self.Unreachable(
                f'{self._ursula} is unreachable (scored {self.score}).')
        finally:
            if halt_reactor:
                self._halt_reactor()

    @staticmethod
    def _halt_reactor() -> None:
        if reactor.running:
            reactor.stop()

    def handle_measurement_errors(self,
                                  crash_on_error: bool = False,
                                  *args,
                                  **kwargs) -> None:

        if args:
            failure = args[0]
            cleaned_traceback = failure.getTraceback().replace(
                '{', '').replace('}', '')  # FIXME: Amazing.
            self.log.warn(
                "Unhandled error during availability check: {}".format(
                    cleaned_traceback))
            if crash_on_error:
                failure.raiseException()
        else:
            # Restart on failure
            if not self.running:
                self.log.debug(f"Availability check crashed, restarting...")
                self.start(now=True)

    def status(self) -> bool:
        """Returns current indication of availability"""
        result = self.score > (self.SENSITIVITY * self.MAXIMUM_SCORE)
        if not result:
            for time, reason in self.__excuses.items():
                self.log.info(f'[{time}] - {reason["error"]}')
        return result

    @property
    def running(self) -> bool:
        return self.__task.running

    def start(self, now: bool = False):
        if not self.running:
            self._start_time = maya.now()
            d = self.__task.start(interval=self.FAST_INTERVAL, now=now)
            d.addErrback(self.handle_measurement_errors)

    def stop(self) -> None:
        if self.running:
            self.__task.stop()

    def maintain(self) -> None:
        known_nodes_is_smaller_than_sample_size = len(
            self._ursula.known_nodes) < self.SAMPLE_SIZE

        # If there are no known nodes or too few known nodes, skip this round...
        # ... but not for longer than the maximum allotted alone time
        if known_nodes_is_smaller_than_sample_size:
            if not self._ursula.lonely and self.enforce_loneliness:
                now = maya.now().epoch
                delta = now - self._start_time.epoch
                if delta >= self.MAXIMUM_ALONE_TIME:
                    self.severe_warning()
                    reason = self.Solitary if not self._ursula.known_nodes else self.Lonely
                    self.shutdown_everything(reason=reason)
            return

        if self.__task.interval == self.FAST_INTERVAL:
            now = maya.now().epoch
            delta = now - self._start_time.epoch
            if delta >= self.SEEDING_DURATION:
                # Slow down
                self.__task.interval = self.SLOW_INTERVAL
                return

        if self.__active_measurement:
            self.log.debug(
                f"Availability check already in progress - skipping this round (Score: {self.score}). "
            )
            return  # Abort
        else:
            self.log.debug(
                f"Continuing to measure availability (Score: {self.score}).")
            self.__active_measurement = True

        try:
            self.measure_sample()
        finally:
            self.__active_measurement = False

        delta = maya.now() - self._start_time
        self.log.info(
            f"Current availability score is {self.score} measured since {delta}"
        )
        self.issue_warnings()

    def issue_warnings(self, cascade: bool = True) -> None:
        warnings = sorted(self.warnings.items(), key=lambda t: t[0])
        for threshold, action in warnings:
            if self.score <= threshold:
                action()
                if not cascade:
                    # Exit after the first active warning is issued
                    return

    def sample(self, quantity: int) -> list:
        population = tuple(self._ursula.known_nodes.values())
        ursulas = random.sample(population=population, k=quantity)
        return ursulas

    @property
    def score(self) -> float:
        return self.__score

    def record(self, result: bool = None, reason: dict = None) -> None:
        """Score the result and cache it."""
        if (not result) and reason:
            self.__excuses[maya.now().epoch] = reason
        if result is None:
            return  # Actually nevermind, dont score this one...
        score = int(result) + self.CHARGE_RATE * self.__score
        if score >= self.MAXIMUM_SCORE:
            self.__score = self.MAXIMUM_SCORE
        else:
            self.__score = score
        self.log.debug(f"Recorded new uptime score ({self.score})")

    def measure_sample(self, ursulas: list = None) -> None:
        """
        Measure self-availability from a sample of Ursulas or automatically from known nodes.
        Handle the possibility of unreachable or invalid remote nodes in the sample.
        """

        # TODO: Relocate?
        Unreachable = (*NodeSeemsToBeDown, self._ursula.NotStaking,
                       self._ursula.network_middleware.UnexpectedResponse)

        if not ursulas:
            ursulas = self.sample(quantity=self.SAMPLE_SIZE)

        for ursula_or_sprout in ursulas:
            try:
                self.measure(ursula_or_sprout=ursula_or_sprout)
            except self._ursula.network_middleware.NotFound:
                # Ignore this measurement and move on because the remote node is not compatible.
                self.record(None,
                            reason={
                                "error":
                                "Remote node did not support 'ping' endpoint."
                            })
            except Unreachable as e:
                # This node is either not an Ursula, not available, does not support uptime checks, or is not staking...
                # ...do nothing and move on without changing the score.
                self.log.debug(
                    f'{ursula_or_sprout} responded to uptime check with {e.__class__.__name__}'
                )
                continue

    def measure(self, ursula_or_sprout: Union['Ursula', NodeSprout]) -> None:
        """Measure self-availability from a single remote node that participates uptime checks."""
        try:
            response = self._ursula.network_middleware.check_availability(
                initiator=self._ursula, responder=ursula_or_sprout)
        except RestMiddleware.BadRequest as e:
            self.responders.add(ursula_or_sprout.checksum_address)
            self.record(False, reason=e.reason)
        else:
            # Record response
            self.responders.add(ursula_or_sprout.checksum_address)
            if response.status_code == 200:
                self.record(True)
            elif response.status_code == 400:
                self.record(
                    False,
                    reason={
                        'failed':
                        f"{ursula_or_sprout.checksum_address} reported unavailability."
                    })
            else:
                self.record(
                    None,
                    reason={
                        "error":
                        f"{ursula_or_sprout.checksum_address} returned {response.status_code} from 'ping' endpoint."
                    })
Example #4
0
class StakeList(UserList):

    @validate_checksum_address
    def __init__(self,
                 registry: BaseContractRegistry,
                 checksum_address: ChecksumAddress = None,  # allow for lazy setting
                 *args, **kwargs):

        super().__init__(*args, **kwargs)
        self.log = Logger('stake-tracker')
        self.staking_agent = ContractAgency.get_agent(StakingEscrowAgent, registry=registry)
        from nucypher.blockchain.economics import EconomicsFactory
        self.economics = EconomicsFactory.get_economics(registry=registry)

        self.__initial_period = NOT_STAKING
        self.__terminal_period = NOT_STAKING

        # "load-in" Read on-chain stakes
        self.checksum_address = checksum_address
        self.__updated = None

    @property
    def updated(self) -> maya.MayaDT:
        return self.__updated

    @property
    def initial_period(self) -> int:
        return self.__initial_period

    @property
    def terminal_period(self) -> int:
        return self.__terminal_period

    @validate_checksum_address
    def refresh(self) -> None:
        """Public staking cache invalidation method"""
        return self.__read_stakes()

    def __read_stakes(self) -> None:
        """Rewrite the local staking cache by reading on-chain stakes"""

        existing_records = len(self)

        # Candidate replacement cache values
        current_period = self.staking_agent.get_current_period()
        onchain_stakes, initial_period, terminal_period = list(), 0, current_period

        # Read from blockchain
        stakes_reader = self.staking_agent.get_all_stakes(staker_address=self.checksum_address)
        inactive_substakes = []
        for onchain_index, stake_info in enumerate(stakes_reader):

            if not stake_info:
                onchain_stake = EMPTY_STAKING_SLOT

            else:
                onchain_stake = Stake.from_stake_info(checksum_address=self.checksum_address,
                                                      stake_info=stake_info,
                                                      staking_agent=self.staking_agent,
                                                      index=onchain_index,
                                                      economics=self.economics)

                # rack the earliest terminal period
                if onchain_stake.first_locked_period:
                    if onchain_stake.first_locked_period < initial_period:
                        initial_period = onchain_stake.first_locked_period

                # rack the latest terminal period
                if onchain_stake.final_locked_period > terminal_period:
                    terminal_period = onchain_stake.final_locked_period

                if onchain_stake.status().is_child(Stake.Status.INACTIVE):
                    inactive_substakes.append(onchain_index)

            # Store the replacement stake
            onchain_stakes.append(onchain_stake)

        # Commit the new stake and terminal values to the cache
        self.data = onchain_stakes
        if onchain_stakes:
            self.__initial_period = initial_period
            self.__terminal_period = terminal_period
            changed_records = abs(existing_records - len(onchain_stakes))
            self.log.debug(f"Updated {changed_records} local staking cache entries.")
            if inactive_substakes:
                self.log.debug(f"The following sub-stakes are inactive: {inactive_substakes}")

        # Record most recent cache update
        self.__updated = maya.now()
Example #5
0
class Felix(Character, NucypherTokenActor):
    """
    A NuCypher ERC20 faucet / Airdrop scheduler.

    Felix is a web application that gives NuCypher *testnet* tokens to registered addresses
    with a scheduled reduction of disbursement amounts, and an HTTP endpoint
    for handling new address registration.

    The main goal of Felix is to provide a source of testnet tokens for
    research and the development of production-ready nucypher dApps.
    """

    _default_crypto_powerups = [SigningPower]

    # Intervals
    DISTRIBUTION_INTERVAL = 60  # seconds
    DISBURSEMENT_INTERVAL = 24 * 365  # only distribute tokens to the same address once each YEAR.
    STAGING_DELAY = 10  # seconds

    # Disbursement
    BATCH_SIZE = 10  # transactions
    MULTIPLIER = Decimal(
        '0.9')  # 10% reduction of previous disbursement is 0.9
    # this is not relevant until the year of time declared above, passes.
    MINIMUM_DISBURSEMENT = int(1e18)  # NuNits (1 NU)
    ETHER_AIRDROP_AMOUNT = int(1e17)  # Wei (.1 ether)
    MAX_INDIVIDUAL_REGISTRATIONS = 3  # Registration Limit

    # Node Discovery
    LEARNING_TIMEOUT = 30  # seconds
    _SHORT_LEARNING_DELAY = 60  # seconds
    _LONG_LEARNING_DELAY = 120  # seconds
    _ROUNDS_WITHOUT_NODES_AFTER_WHICH_TO_SLOW_DOWN = 1

    # Twisted
    _CLOCK = reactor
    _AIRDROP_QUEUE = dict()

    class NoDatabase(RuntimeError):
        pass

    def __init__(self,
                 db_filepath: str,
                 rest_host: str,
                 rest_port: int,
                 client_password: str = None,
                 crash_on_error: bool = False,
                 distribute_ether: bool = True,
                 registry: BaseContractRegistry = None,
                 *args,
                 **kwargs):

        # Character
        super().__init__(registry=registry, *args, **kwargs)
        self.log = Logger(f"felix-{self.checksum_address[-6::]}")

        # Network
        self.rest_port = rest_port
        self.rest_host = rest_host
        self.rest_app = NOT_RUNNING
        self.crash_on_error = crash_on_error

        # Database
        self.db_filepath = db_filepath
        self.db = NO_DATABASE_AVAILABLE
        self.db_engine = create_engine(f'sqlite:///{self.db_filepath}',
                                       convert_unicode=True)

        # Blockchain
        transacting_power = TransactingPower(password=client_password,
                                             account=self.checksum_address,
                                             signer=self.signer,
                                             cache=True)
        self._crypto_power.consume_power_up(transacting_power)

        self.token_agent = ContractAgency.get_agent(NucypherTokenAgent,
                                                    registry=registry)
        self.blockchain = self.token_agent.blockchain
        self.reserved_addresses = [self.checksum_address, NULL_ADDRESS]

        # Update reserved addresses with deployed contracts
        existing_entries = list(registry.enrolled_addresses)
        self.reserved_addresses.extend(existing_entries)

        # Distribution
        self.__distributed = 0  # Track NU Output
        self.__airdrop = 0  # Track Batch
        self.__disbursement = 0  # Track Quantity
        self._distribution_task = LoopingCall(f=self.airdrop_tokens)
        self._distribution_task.clock = self._CLOCK
        self.start_time = NOT_RUNNING

        self.economics = EconomicsFactory.get_economics(registry=registry)
        self.MAXIMUM_DISBURSEMENT = self.economics.maximum_allowed_locked
        self.INITIAL_DISBURSEMENT = self.economics.minimum_allowed_locked * 3

        # Optionally send ether with each token transaction
        self.distribute_ether = distribute_ether
        # Banner
        self.log.info(FELIX_BANNER.format(self.checksum_address))

    def __repr__(self):
        class_name = self.__class__.__name__
        r = f'{class_name}(checksum_address={self.checksum_address}, db_filepath={self.db_filepath})'
        return r

    def make_web_app(self):
        from flask import request
        from flask_sqlalchemy import SQLAlchemy

        # WSGI/Flask Service
        short_name = bytes(self.stamp).hex()[:6]
        self.rest_app = Flask(f"faucet-{short_name}",
                              template_folder=TEMPLATES_DIR)
        self.rest_app.config[
            'SQLALCHEMY_DATABASE_URI'] = f'sqlite:///{self.db_filepath}'
        self.rest_app.config['MAX_CONTENT_LENGTH'] = MAX_UPLOAD_CONTENT_LENGTH

        try:
            self.rest_app.secret_key = sha256(
                os.environ['NUCYPHER_FELIX_DB_SECRET'].encode())  # uses envvar
        except KeyError:
            raise OSError(
                "The 'NUCYPHER_FELIX_DB_SECRET' is not set.  Export your application secret and try again."
            )

        # Database
        self.db = SQLAlchemy(self.rest_app)

        # Database Tables
        class Recipient(self.db.Model):
            """
            The one and only table in Felix's database; Used to track recipients and airdrop metadata.
            """

            __tablename__ = 'recipient'

            id = self.db.Column(self.db.Integer, primary_key=True)
            address = self.db.Column(self.db.String, nullable=False)
            joined = self.db.Column(self.db.DateTime,
                                    nullable=False,
                                    default=datetime.utcnow)
            total_received = self.db.Column(self.db.String,
                                            default='0',
                                            nullable=False)
            last_disbursement_amount = self.db.Column(self.db.String,
                                                      nullable=False,
                                                      default=0)
            last_disbursement_time = self.db.Column(self.db.DateTime,
                                                    nullable=True,
                                                    default=None)
            is_staking = self.db.Column(self.db.Boolean,
                                        nullable=False,
                                        default=False)

            def __repr__(self):
                return f'{self.__class__.__name__}(id={self.id})'

        self.Recipient = Recipient  # Bind to outer class

        # Flask decorators
        rest_app = self.rest_app

        #
        # REST Routes
        #
        @rest_app.route("/status", methods=['GET'])
        def status():
            with ThreadedSession(self.db_engine) as session:
                total_recipients = session.query(self.Recipient).count()
                last_recipient = session.query(self.Recipient).filter(
                    self.Recipient.last_disbursement_time.isnot(
                        None)).order_by('last_disbursement_time').first()

                last_address = last_recipient.address if last_recipient else None
                last_transaction_date = last_recipient.last_disbursement_time.isoformat(
                ) if last_recipient else None

                unfunded = session.query(self.Recipient).filter(
                    self.Recipient.last_disbursement_time.is_(None)).count()

                return json.dumps({
                    "total_recipients": total_recipients,
                    "latest_recipient": last_address,
                    "latest_disburse_date": last_transaction_date,
                    "unfunded_recipients": unfunded,
                    "state": {
                        "eth": str(self.eth_balance),
                        "NU": str(self.token_balance),
                        "address": self.checksum_address,
                        "contract_address": self.token_agent.contract_address,
                    }
                })

        @rest_app.route("/register", methods=['POST'])
        def register():
            """Handle new recipient registration via POST request."""

            new_address = (request.form.get('address')
                           or request.get_json().get('address'))

            if not new_address:
                return Response(response="no address was supplied", status=411)

            if not eth_utils.is_address(new_address):
                return Response(
                    response=
                    "an invalid ethereum address was supplied.  please ensure the address is a proper checksum.",
                    status=400)
            else:
                new_address = eth_utils.to_checksum_address(new_address)

            if new_address in self.reserved_addresses:
                return Response(
                    response=
                    "sorry, that address is reserved and cannot receive funds.",
                    status=403)

            try:
                with ThreadedSession(self.db_engine) as session:

                    existing = Recipient.query.filter_by(
                        address=new_address).all()
                    if len(existing) > self.MAX_INDIVIDUAL_REGISTRATIONS:
                        # Address already exists; Abort
                        self.log.debug(
                            f"{new_address} is already enrolled {self.MAX_INDIVIDUAL_REGISTRATIONS} times."
                        )
                        return Response(
                            response=
                            f"{new_address} requested too many times  -  Please use another address.",
                            status=409)

                    # Create the record
                    recipient = Recipient(address=new_address,
                                          joined=datetime.now())
                    session.add(recipient)
                    session.commit()

            except Exception as e:
                # Pass along exceptions to the logger
                self.log.critical(str(e))
                raise

            else:
                return Response(status=200)  # TODO

        return rest_app

    def create_tables(self) -> None:
        self.make_web_app()
        return self.db.create_all(app=self.rest_app)

    def start(self,
              host: str,
              port: int,
              web_services: bool = True,
              distribution: bool = True,
              crash_on_error: bool = False):

        self.crash_on_error = crash_on_error

        if self.start_time is not NOT_RUNNING:
            raise RuntimeError("Felix is already running.")

        self.start_time = maya.now()
        payload = {"wsgi": self.rest_app, "http_port": port}
        deployer = HendrixDeploy(action="start", options=payload)

        if distribution is True:
            self.start_distribution()

        if web_services is True:
            deployer.run()  # <-- Blocking call (Reactor)

    def start_distribution(self, now: bool = True) -> bool:
        """Start token distribution"""
        self.log.info(NU_BANNER)
        self.log.info("Starting NU Token Distribution | START")
        if self.token_balance == NU.ZERO():
            raise self.ActorError(
                f"Felix address {self.checksum_address} has 0 NU tokens.")
        self._distribution_task.start(interval=self.DISTRIBUTION_INTERVAL,
                                      now=now)
        return True

    def stop_distribution(self) -> bool:
        """Start token distribution"""
        self.log.info("Stopping NU Token Distribution | STOP")
        self._distribution_task.stop()
        return True

    def __calculate_disbursement(self, recipient) -> int:
        """Calculate the next reward for a recipient once the are selected for distribution"""

        # Initial Reward - sets the future rates
        if recipient.last_disbursement_time is None:
            amount = self.INITIAL_DISBURSEMENT

        # Cap reached, We'll continue to leak the minimum disbursement
        elif int(recipient.total_received) >= self.MAXIMUM_DISBURSEMENT:
            amount = self.MINIMUM_DISBURSEMENT

        # Calculate the next disbursement
        else:
            amount = math.ceil(
                int(recipient.last_disbursement_amount) * self.MULTIPLIER)
            if amount < self.MINIMUM_DISBURSEMENT:
                amount = self.MINIMUM_DISBURSEMENT

        return int(amount)

    def __transfer(self, disbursement: int, recipient_address: str) -> str:
        """Perform a single token transfer transaction from one account to another."""

        # Re-unlock from cache
        self.blockchain.transacting_power.activate()

        self.__disbursement += 1
        receipt = self.token_agent.transfer(
            amount=disbursement,
            target_address=recipient_address,
            sender_address=self.checksum_address)
        txhash = receipt['transactionHash']
        if self.distribute_ether:
            ether = self.ETHER_AIRDROP_AMOUNT
            transaction = {
                'to': recipient_address,
                'from': self.checksum_address,
                'value': ether,
                'gasPrice': self.blockchain.client.gas_price_for_transaction()
            }

            transaction_dict = self.blockchain.build_payload(
                sender_address=self.checksum_address,
                payload=transaction,
                transaction_gas_limit=22000)
            _receipt = self.blockchain.sign_and_broadcast_transaction(
                transaction_dict=transaction_dict, transaction_name='transfer')
            self.log.info(
                f"Disbursement #{self.__disbursement} OK | NU {txhash.hex()[-6:]}"
                f"({str(NU(disbursement, 'NuNit'))} + {self.ETHER_AIRDROP_AMOUNT} wei) -> {recipient_address}"
            )
        else:
            self.log.info(
                f"Disbursement #{self.__disbursement} OK"
                f"({str(NU(disbursement, 'NuNit'))} -> {recipient_address}")

        return txhash

    def airdrop_tokens(self):
        """
        Calculate airdrop eligibility via faucet registration
        and transfer tokens to selected recipients.
        """

        with ThreadedSession(self.db_engine) as session:
            population = session.query(self.Recipient).count()

        message = f"{population} registered faucet recipients; " \
                  f"Distributed {str(NU(self.__distributed, 'NuNit'))} since {self.start_time.slang_time()}."
        self.log.debug(message)
        if population == 0:
            return  # Abort - no recipients are registered.

        # For filtration
        since = datetime.now() - timedelta(hours=self.DISBURSEMENT_INTERVAL)

        datetime_filter = or_(self.Recipient.last_disbursement_time <= since,
                              self.Recipient.last_disbursement_time ==
                              None)  # This must be `==` not `is`

        with ThreadedSession(self.db_engine) as session:
            candidates = session.query(
                self.Recipient).filter(datetime_filter).all()
            if not candidates:
                self.log.info("No eligible recipients this round.")
                return

        # Discard invalid addresses, in-depth
        invalid_addresses = list()

        def siphon_invalid_entries(candidate):
            address_is_valid = eth_utils.is_checksum_address(candidate.address)
            if not address_is_valid:
                invalid_addresses.append(candidate.address)
            return address_is_valid

        candidates = list(filter(siphon_invalid_entries, candidates))

        if invalid_addresses:
            self.log.info(
                f"{len(invalid_addresses)} invalid entries detected. Pruning database."
            )

            # TODO: Is this needed? - Invalid entries are rejected at the endpoint view.
            # Prune database of invalid records
            # with ThreadedSession(self.db_engine) as session:
            #     bad_eggs = session.query(self.Recipient).filter(self.Recipient.address in invalid_addresses).all()
            #     for egg in bad_eggs:
            #         session.delete(egg.id)
            #     session.commit()

        if not candidates:
            self.log.info("No eligible recipients this round.")
            return

        d = threads.deferToThread(self.__do_airdrop, candidates=candidates)
        self._AIRDROP_QUEUE[self.__airdrop] = d
        return d

    def __do_airdrop(self, candidates: list):

        self.log.info(f"Staging Airdrop #{self.__airdrop}.")

        # Staging
        staged_disbursements = [(r, self.__calculate_disbursement(recipient=r))
                                for r in candidates]
        batches = list(
            staged_disbursements[index:index + self.BATCH_SIZE]
            for index in range(0, len(staged_disbursements), self.BATCH_SIZE))
        total_batches = len(batches)

        self.log.info("====== Staged Airdrop ======")
        for recipient, disbursement in staged_disbursements:
            self.log.info(f"{recipient.address} ... {str(disbursement)[:-18]}")
        self.log.info("==========================")

        # Staging Delay
        self.log.info(
            f"Airdrop will commence in {self.STAGING_DELAY} seconds...")
        if self.STAGING_DELAY > 3:
            time.sleep(self.STAGING_DELAY - 3)
        for i in range(3):
            time.sleep(1)
            self.log.info(f"NU Token airdrop starting in {3 - i} seconds...")

        # Slowly, in series...
        for batch, staged_disbursement in enumerate(batches, start=1):
            self.log.info(f"======= Batch #{batch} ========")

            for recipient, disbursement in staged_disbursement:

                # Perform the transfer... leaky faucet.
                self.__transfer(disbursement=disbursement,
                                recipient_address=recipient.address)
                self.__distributed += disbursement

                # Update the database record
                recipient.last_disbursement_amount = str(disbursement)
                recipient.total_received = str(
                    int(recipient.total_received) + disbursement)
                recipient.last_disbursement_time = datetime.now()

                self.db.session.add(recipient)
                self.db.session.commit()

            # end inner loop
            self.log.info(
                f"Completed Airdrop #{self.__airdrop} Batch #{batch} of {total_batches}."
            )

        # end outer loop
        now = maya.now()
        next_interval_slang = now.add(
            seconds=self.DISTRIBUTION_INTERVAL).slang_time()
        self.log.info(
            f"Completed Airdrop #{self.__airdrop}; Next airdrop is {next_interval_slang}."
        )

        del self._AIRDROP_QUEUE[self.__airdrop]
        self.__airdrop += 1
def test_collect_rewards_integration(
        click_runner, testerchain, agency_local_registry,
        stakeholder_configuration_file_location, blockchain_alice,
        blockchain_bob, random_policy_label, manual_staker, manual_worker,
        token_economics, policy_value):

    half_stake_time = 2 * token_economics.minimum_locked_periods  # Test setup
    logger = Logger("Test-CLI")  # Enter the Teacher's Logger, and
    current_period = 0  # State the initial period for incrementing

    staker_address = manual_staker
    worker_address = manual_worker

    staker = Staker(domain=TEMPORARY_DOMAIN,
                    checksum_address=staker_address,
                    registry=agency_local_registry)
    staker.refresh_stakes()

    # The staker is staking.
    assert staker.is_staking
    assert staker.stakes
    assert staker.worker_address == worker_address

    ursula_port = select_test_port()
    ursula = Ursula(is_me=True,
                    checksum_address=staker_address,
                    signer=Web3Signer(testerchain.client),
                    worker_address=worker_address,
                    registry=agency_local_registry,
                    rest_host=LOOPBACK_ADDRESS,
                    rest_port=ursula_port,
                    provider_uri=TEST_PROVIDER_URI,
                    network_middleware=MockRestMiddleware(),
                    db_filepath=tempfile.mkdtemp(),
                    domain=TEMPORARY_DOMAIN)

    MOCK_KNOWN_URSULAS_CACHE[ursula_port] = ursula
    assert ursula.worker_address == worker_address
    assert ursula.checksum_address == staker_address

    # Make a commitment for half the first stake duration
    testerchain.time_travel(periods=1)
    for _ in range(half_stake_time):
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        ursula.commit_to_next_period()
        testerchain.time_travel(periods=1)
        current_period += 1

    # Alice creates a policy and grants Bob access
    blockchain_alice.selection_buffer = 1

    M, N = 1, 1
    duration_in_periods = 3
    days = (duration_in_periods - 1) * (token_economics.hours_per_period // 24)
    now = testerchain.w3.eth.getBlock('latest').timestamp
    expiration = maya.MayaDT(now).add(days=days)
    blockchain_policy = blockchain_alice.grant(bob=blockchain_bob,
                                               label=random_policy_label,
                                               m=M,
                                               n=N,
                                               value=policy_value,
                                               expiration=expiration,
                                               handpicked_ursulas={ursula})

    # Ensure that the handpicked Ursula was selected for the policy
    assert ursula.checksum_address in blockchain_policy.treasure_map.destinations

    # Bob learns about the new staker and joins the policy
    blockchain_bob.start_learning_loop()
    blockchain_bob.remember_node(node=ursula)
    blockchain_bob.join_policy(random_policy_label,
                               bytes(blockchain_alice.stamp))

    # Enrico Encrypts (of course)
    enrico = Enrico(policy_encrypting_key=blockchain_policy.public_key,
                    network_middleware=MockRestMiddleware())

    verifying_key = blockchain_alice.stamp.as_umbral_pubkey()

    for index in range(half_stake_time - 5):
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        ursula.commit_to_next_period()

        # Encrypt
        random_data = os.urandom(random.randrange(20, 100))
        ciphertext, signature = enrico.encrypt_message(plaintext=random_data)

        # Decrypt
        cleartexts = blockchain_bob.retrieve(ciphertext,
                                             enrico=enrico,
                                             alice_verifying_key=verifying_key,
                                             label=random_policy_label)
        assert random_data == cleartexts[0]

        # Ursula Staying online and the clock advancing
        testerchain.time_travel(periods=1)
        current_period += 1

    # Finish the passage of time for the first Stake
    for _ in range(5):  # plus the extended periods from stake division
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        ursula.commit_to_next_period()
        testerchain.time_travel(periods=1)
        current_period += 1

    #
    # WHERES THE MONEY URSULA?? - Collecting Rewards
    #

    # The address the client wants Ursula to send rewards to
    burner_wallet = testerchain.w3.eth.account.create(
        INSECURE_DEVELOPMENT_PASSWORD)

    # The rewards wallet is initially empty, because it is freshly created
    assert testerchain.client.get_balance(burner_wallet.address) == 0

    # Rewards will be unlocked after the
    # final committed period has passed (+1).
    logger.debug(f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
    testerchain.time_travel(periods=1)
    current_period += 1
    logger.debug(f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")

    # At least half of the tokens are unlocked (restaking was enabled for some prior periods)
    assert staker.locked_tokens() >= token_economics.minimum_allowed_locked

    # Collect Policy Fee
    collection_args = ('stake', 'rewards', 'withdraw', '--config-file',
                       stakeholder_configuration_file_location, '--fees',
                       '--no-tokens', '--staking-address', staker_address,
                       '--withdraw-address', burner_wallet.address)
    result = click_runner.invoke(nucypher_cli,
                                 collection_args,
                                 input=INSECURE_DEVELOPMENT_PASSWORD,
                                 catch_exceptions=False)
    assert result.exit_code == 0

    # Policy Fee
    collected_policy_fee = testerchain.client.get_balance(
        burner_wallet.address)
    expected_collection = policy_value
    assert collected_policy_fee == expected_collection

    # Finish the passage of time... once and for all
    # Extended periods from stake division
    for _ in range(9):
        ursula.commit_to_next_period()
        current_period += 1
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        testerchain.time_travel(periods=1)

    #
    # Collect Staking Reward
    #

    balance_before_collecting = staker.token_agent.get_balance(
        address=staker_address)

    collection_args = ('stake', 'rewards', 'withdraw', '--config-file',
                       stakeholder_configuration_file_location, '--no-fees',
                       '--tokens', '--staking-address', staker_address,
                       '--force')

    result = click_runner.invoke(nucypher_cli,
                                 collection_args,
                                 input=INSECURE_DEVELOPMENT_PASSWORD,
                                 catch_exceptions=False)
    assert result.exit_code == 0

    # The staker has withdrawn her staking rewards
    assert staker.token_agent.get_balance(
        address=staker_address) > balance_before_collecting
Example #7
0
class CharacterConfiguration(BaseConfiguration):
    """
    'Sideways Engagement' of Character classes; a reflection of input parameters.
    """

    VERSION = 4  # bump when static payload scheme changes

    CHARACTER_CLASS = NotImplemented
    DEFAULT_CONTROLLER_PORT = NotImplemented
    MNEMONIC_KEYSTORE = False
    DEFAULT_DOMAIN = NetworksInventory.DEFAULT
    DEFAULT_NETWORK_MIDDLEWARE = RestMiddleware
    TEMP_CONFIGURATION_DIR_PREFIX = 'tmp-nucypher'
    SIGNER_ENVVAR = None

    # When we begin to support other threshold schemes,
    # this will be one of the concepts that makes us want a factory.  #571
    known_node_class = Ursula

    # Gas
    DEFAULT_GAS_STRATEGY = 'fast'

    # Payments
    DEFAULT_PAYMENT_METHOD = 'SubscriptionManager'
    DEFAULT_PAYMENT_NETWORK = 'polygon'
    DEFAULT_FEDERATED_PAYMENT_METHOD = 'Free'

    # Fields specified here are *not* passed into the Character's constructor
    # and can be understood as configuration fields only.
    _CONFIG_FIELDS = (
        'config_root',
        'poa',
        'light',
        'registry_filepath',
        'gas_strategy',
        'max_gas_price',  # gwei
        'signer_uri',
        'keystore_path',
        'payment_provider',
        'payment_network')

    def __init__(
            self,

            # Base
            emitter=None,
            config_root: Optional[Path] = None,
            filepath: Optional[Path] = None,

            # Mode
            dev_mode: bool = False,
            federated_only: bool = False,

            # Identity
            checksum_address: str = None,
            crypto_power: CryptoPower = None,

            # Keystore
            keystore: Keystore = None,
            keystore_path: Optional[Path] = None,

            # Learner
            learn_on_same_thread: bool = False,
            abort_on_learning_error: bool = False,
            start_learning_now: bool = True,

            # Network
            controller_port: int = None,
            domain: str = DEFAULT_DOMAIN,
            network_middleware: RestMiddleware = None,
            lonely: bool = False,

            # Node Storage
            known_nodes: set = None,
            node_storage: NodeStorage = None,
            reload_metadata: bool = True,
            save_metadata: bool = True,

            # Blockchain
            poa: bool = None,
            light: bool = False,
            eth_provider_uri: str = None,
            gas_strategy: Union[Callable, str] = DEFAULT_GAS_STRATEGY,
            max_gas_price: Optional[int] = None,
            signer_uri: str = None,

            # Payments
            # TODO: Resolve code prefixing below, possibly with the use of nested configuration fields
            payment_method: str = None,
            payment_provider: str = None,
            payment_network: str = None,

            # Registries
            registry: BaseContractRegistry = None,
            registry_filepath: Optional[Path] = None,
            policy_registry: BaseContractRegistry = None,
            policy_registry_filepath: Optional[Path] = None,

            # Deployed Operators
            worker_data: dict = None):

        self.log = Logger(self.__class__.__name__)

        # This constant is used to signal that a path can be generated if one is not provided.
        UNINITIALIZED_CONFIGURATION.bool_value(False)

        # Identity
        # NOTE: NodeConfigurations can only be used with Self-Characters
        self.is_me = True
        self.checksum_address = checksum_address

        # Keystore
        self.crypto_power = crypto_power
        if keystore_path and not keystore:
            keystore = Keystore(keystore_path=keystore_path)
        self.__keystore = self.__keystore = keystore or NO_KEYSTORE_ATTACHED.bool_value(
            False)
        self.keystore_dir = Path(
            keystore.keystore_path
        ).parent if keystore else UNINITIALIZED_CONFIGURATION

        # Contract Registry
        if registry and registry_filepath:
            if registry.filepath != registry_filepath:
                error = f"Inconsistent registry filepaths for '{registry.filepath.absolute()}'" \
                        f" and '{registry_filepath.absolute()}'."
                raise ValueError(error)
            else:
                self.log.warn(
                    f"Registry and registry filepath were both passed.")
        self.registry = registry or NO_BLOCKCHAIN_CONNECTION.bool_value(False)
        self.registry_filepath = registry_filepath or UNINITIALIZED_CONFIGURATION

        self.policy_registry = policy_registry or NO_BLOCKCHAIN_CONNECTION.bool_value(
            False)
        self.policy_registry_filepath = policy_registry_filepath or UNINITIALIZED_CONFIGURATION

        # Blockchain
        self.poa = poa
        self.is_light = light
        self.eth_provider_uri = eth_provider_uri or NO_BLOCKCHAIN_CONNECTION
        self.signer_uri = signer_uri or None

        # Learner
        self.federated_only = federated_only
        self.domain = domain
        self.learn_on_same_thread = learn_on_same_thread
        self.abort_on_learning_error = abort_on_learning_error
        self.start_learning_now = start_learning_now
        self.save_metadata = save_metadata
        self.reload_metadata = reload_metadata
        self.known_nodes = known_nodes or set()  # handpicked
        self.lonely = lonely

        # Configuration
        self.__dev_mode = dev_mode
        self.config_file_location = filepath or UNINITIALIZED_CONFIGURATION
        self.config_root = UNINITIALIZED_CONFIGURATION

        # Deployed Operators
        self.worker_data = worker_data

        #
        # Federated vs. Blockchain arguments consistency
        #

        #
        # Federated
        #

        if self.federated_only:
            # Check for incompatible values
            blockchain_args = {
                'filepath': registry_filepath,
                'poa': poa,
                'eth_provider_uri': eth_provider_uri,
                'payment_provider': payment_provider,
                'gas_strategy': gas_strategy,
                'max_gas_price': max_gas_price
            }
            if any(blockchain_args.values()):
                bad_args = ", ".join(f"{arg}={val}"
                                     for arg, val in blockchain_args.items()
                                     if val)
                self.log.warn(
                    f"Arguments {bad_args} are incompatible with federated_only. "
                    f"Overridden with a sane default.")

                # Clear decentralized attributes to ensure consistency with a
                # federated configuration.
                self.poa = False
                self.is_light = False
                self.eth_provider_uri = None
                self.registry_filepath = None
                self.policy_registry_filepath = None
                self.gas_strategy = None
                self.max_gas_price = None

            # Federated Payments
            self.payment_method = payment_method or self.DEFAULT_FEDERATED_PAYMENT_METHOD
            self.payment_network = payment_network
            self.payment_provider = payment_provider

        #
        # Decentralized
        #

        else:
            self.gas_strategy = gas_strategy
            self.max_gas_price = max_gas_price  # gwei
            is_initialized = BlockchainInterfaceFactory.is_interface_initialized(
                eth_provider_uri=self.eth_provider_uri)
            if not is_initialized and eth_provider_uri:
                BlockchainInterfaceFactory.initialize_interface(
                    eth_provider_uri=self.eth_provider_uri,
                    poa=self.poa,
                    light=self.is_light,
                    emitter=emitter,
                    gas_strategy=self.gas_strategy,
                    max_gas_price=self.max_gas_price)
            else:
                self.log.warn(
                    f"Using existing blockchain interface connection ({self.eth_provider_uri})."
                )

            if not self.registry:
                # TODO: These two code blocks are untested.
                if not self.registry_filepath:  # TODO: Registry URI  (goerli://speedynet.json) :-)
                    self.log.info(f"Fetching latest registry from source.")
                    self.registry = InMemoryContractRegistry.from_latest_publication(
                        network=self.domain)
                else:
                    self.registry = LocalContractRegistry(
                        filepath=self.registry_filepath)
                    self.log.info(f"Using local registry ({self.registry}).")

            self.testnet = self.domain != NetworksInventory.MAINNET
            self.signer = Signer.from_signer_uri(self.signer_uri,
                                                 testnet=self.testnet)

            #
            # Onchain Payments & Policies
            #

            # FIXME: Enforce this for Ursula/Alice but not Bob?
            from nucypher.config.characters import BobConfiguration
            if not isinstance(self, BobConfiguration):
                # if not payment_provider:
                #     raise self.ConfigurationError("payment provider is required.")
                self.payment_method = payment_method or self.DEFAULT_PAYMENT_METHOD
                self.payment_network = payment_network or self.DEFAULT_PAYMENT_NETWORK
                self.payment_provider = payment_provider or (
                    self.eth_provider_uri or None)  # default to L1 payments

                # TODO: Dedupe
                if not self.policy_registry:
                    if not self.policy_registry_filepath:
                        self.log.info(
                            f"Fetching latest policy registry from source.")
                        self.policy_registry = InMemoryContractRegistry.from_latest_publication(
                            network=self.payment_network)
                    else:
                        self.policy_registry = LocalContractRegistry(
                            filepath=self.policy_registry_filepath)
                        self.log.info(
                            f"Using local policy registry ({self.policy_registry})."
                        )

        if dev_mode:
            self.__temp_dir = UNINITIALIZED_CONFIGURATION
            self._setup_node_storage()
            self.initialize(password=DEVELOPMENT_CONFIGURATION)
        else:
            self.__temp_dir = LIVE_CONFIGURATION
            self.config_root = config_root or self.DEFAULT_CONFIG_ROOT
            self._cache_runtime_filepaths()
            self._setup_node_storage(node_storage=node_storage)

        # Network
        self.controller_port = controller_port or self.DEFAULT_CONTROLLER_PORT
        self.network_middleware = network_middleware or self.DEFAULT_NETWORK_MIDDLEWARE(
            registry=self.registry)

        super().__init__(filepath=self.config_file_location,
                         config_root=self.config_root)

    def __call__(self, **character_kwargs):
        return self.produce(**character_kwargs)

    @property
    def keystore(self) -> Keystore:
        return self.__keystore

    def attach_keystore(self, keystore: Keystore) -> None:
        self.__keystore = keystore

    @classmethod
    def checksum_address_from_filepath(cls, filepath: Path) -> str:
        pattern = re.compile(
            r'''
                             (^\w+)-
                             (0x{1}           # Then, 0x the start of the string, exactly once
                             [0-9a-fA-F]{40}) # Followed by exactly 40 hex chars
                             ''', re.VERBOSE)

        filename = filepath.name
        match = pattern.match(filename)

        if match:
            character_name, checksum_address = match.groups()
        else:
            # Extract from default by "peeking" inside the configuration file.
            default_name = cls.generate_filename()
            if filename == default_name:
                checksum_address = cls.peek(filepath=filepath,
                                            field='checksum_address')
            else:
                raise ValueError(
                    f"Cannot extract checksum from filepath '{filepath}'")

        if not is_checksum_address(checksum_address):
            raise RuntimeError(
                f"Invalid checksum address detected in configuration file at '{filepath}'."
            )
        return checksum_address

    def update(self, **kwargs) -> None:
        """
        A facility for updating existing attributes on existing configuration instances.

        Warning: This method allows mutation and may result in an inconsistent configuration.
        """
        # config file should exist and we we override -> no need for modifier
        return super().update(filepath=self.config_file_location, **kwargs)

    @classmethod
    def generate(cls,
                 password: str,
                 key_material: Optional[bytes] = None,
                 *args,
                 **kwargs):
        """Shortcut: Hook-up a new initial installation and configuration."""
        node_config = cls(dev_mode=False, *args, **kwargs)
        node_config.initialize(key_material=key_material, password=password)
        return node_config

    def cleanup(self) -> None:
        if self.__dev_mode:
            self.__temp_dir.cleanup()

    @property
    def dev_mode(self) -> bool:
        return self.__dev_mode

    def _setup_node_storage(self, node_storage=None) -> None:
        # TODO: Disables node metadata persistence..
        # if self.dev_mode:
        #     node_storage = ForgetfulNodeStorage(registry=self.registry, federated_only=self.federated_only)

        # TODO: Forcibly clears the filesystem of any stored node metadata and certificates...
        local_node_storage = LocalFileBasedNodeStorage(
            registry=self.registry,
            config_root=self.config_root,
            federated_only=self.federated_only)
        local_node_storage.clear()
        self.log.info(
            f'Cleared peer metadata from {local_node_storage.root_dir}')

        # TODO: Always sets up nodes for in-memory node metadata storage
        node_storage = ForgetfulNodeStorage(registry=self.registry,
                                            federated_only=self.federated_only)
        self.node_storage = node_storage

    def forget_nodes(self) -> None:
        self.node_storage.clear()
        message = "Removed all stored node node metadata and certificates"
        self.log.debug(message)

    def destroy(self) -> None:
        """Parse a node configuration and remove all associated files from the filesystem"""
        self.config_file_location.unlink()

    def generate_parameters(self, **overrides) -> dict:
        """
        Warning: This method allows mutation and may result in an inconsistent configuration.
        """
        merged_parameters = {
            **self.static_payload(),
            **self.dynamic_payload,
            **overrides
        }
        character_init_params = filter(
            lambda t: t[0] not in self._CONFIG_FIELDS,
            merged_parameters.items())
        return dict(character_init_params)

    def produce(self, **overrides) -> CHARACTER_CLASS:
        """Initialize a new character instance and return it."""
        merged_parameters = self.generate_parameters(**overrides)
        character = self.CHARACTER_CLASS(**merged_parameters)
        return character

    @classmethod
    def assemble(cls, filepath: Optional[Path] = None, **overrides) -> dict:
        """
        Warning: This method allows mutation and may result in an inconsistent configuration.
        """
        payload = cls._read_configuration_file(filepath=filepath)
        node_storage = cls.load_node_storage(
            storage_payload=payload['node_storage'],
            federated_only=payload['federated_only'])
        max_gas_price = payload.get('max_gas_price')  # gwei
        if max_gas_price:
            max_gas_price = Decimal(max_gas_price)

        # Assemble
        payload.update(
            dict(node_storage=node_storage, max_gas_price=max_gas_price))
        payload = cast_paths_from(cls, payload)

        # Filter out None values from **overrides to detect, well, overrides...
        # Acts as a shim for optional CLI flags.
        overrides = {k: v for k, v in overrides.items() if v is not None}
        payload = {**payload, **overrides}
        return payload

    @classmethod
    def from_configuration_file(
            cls,
            filepath: Optional[Path] = None,
            **overrides  # < ---- Inlet for CLI Flags
    ) -> 'CharacterConfiguration':
        """Initialize a CharacterConfiguration from a JSON file."""
        filepath = filepath or cls.default_filepath()
        assembled_params = cls.assemble(filepath=filepath, **overrides)
        node_configuration = cls(filepath=filepath, **assembled_params)
        return node_configuration

    def validate(self) -> bool:

        # Top-level
        if not self.config_root.exists():
            raise self.ConfigurationError(
                f'No configuration directory found at {self.config_root}.')

        # Sub-paths
        filepaths = self.runtime_filepaths
        for field, path in filepaths.items():
            if path and not path.exists():
                message = 'Missing configuration file or directory: {}.'
                if 'registry' in path:
                    message += ' Did you mean to pass --federated-only?'
                raise CharacterConfiguration.InvalidConfiguration(
                    message.format(path))
        return True

    def static_payload(self) -> dict:
        """JSON-Exported static configuration values for initializing Ursula"""
        keystore_path = str(
            self.keystore.keystore_path) if self.keystore else None
        payload = dict(

            # Identity
            federated_only=self.federated_only,
            checksum_address=self.checksum_address,
            keystore_path=keystore_path,

            # Behavior
            domain=self.domain,
            learn_on_same_thread=self.learn_on_same_thread,
            abort_on_learning_error=self.abort_on_learning_error,
            start_learning_now=self.start_learning_now,
            save_metadata=self.save_metadata,
            node_storage=self.node_storage.payload(),
            lonely=self.lonely,
        )

        # Optional values (mode)
        if not self.federated_only:
            if self.eth_provider_uri:
                if not self.signer_uri:
                    self.signer_uri = self.eth_provider_uri
                payload.update(
                    dict(eth_provider_uri=self.eth_provider_uri,
                         poa=self.poa,
                         light=self.is_light,
                         signer_uri=self.signer_uri))
            if self.registry_filepath:
                payload.update(dict(registry_filepath=self.registry_filepath))

            # Gas Price
            __max_price = str(
                self.max_gas_price) if self.max_gas_price else None
            payload.update(
                dict(gas_strategy=self.gas_strategy,
                     max_gas_price=__max_price))

        # Merge with base payload
        base_payload = super().static_payload()
        base_payload.update(payload)

        return payload

    @property
    def dynamic_payload(self) -> dict:
        """
        Exported dynamic configuration values for initializing Ursula.
        These values are used to init a character instance but are *not*
        saved to the JSON configuration.
        """
        payload = dict()
        if not self.federated_only:
            payload.update(dict(registry=self.registry, signer=self.signer))

        payload.update(
            dict(network_middleware=self.network_middleware
                 or self.DEFAULT_NETWORK_MIDDLEWARE(),
                 known_nodes=self.known_nodes,
                 node_storage=self.node_storage,
                 keystore=self.keystore,
                 crypto_power_ups=self.derive_node_power_ups()))

        return payload

    def generate_filepath(self,
                          filepath: Optional[Path] = None,
                          modifier: str = None,
                          override: bool = False) -> Path:
        modifier = modifier or self.checksum_address
        filepath = super().generate_filepath(filepath=filepath,
                                             modifier=modifier,
                                             override=override)
        return filepath

    @property
    def runtime_filepaths(self) -> dict:
        filepaths = dict(config_root=self.config_root,
                         keystore_dir=self.keystore_dir,
                         registry_filepath=self.registry_filepath)
        return filepaths

    @classmethod
    def generate_runtime_filepaths(cls, config_root: Path) -> dict:
        """Dynamically generate paths based on configuration root directory"""
        filepaths = dict(config_root=config_root,
                         config_file_location=config_root /
                         cls.generate_filename(),
                         keystore_dir=config_root / 'keystore')
        return filepaths

    def _cache_runtime_filepaths(self) -> None:
        """Generate runtime filepaths and cache them on the config object"""
        filepaths = self.generate_runtime_filepaths(
            config_root=self.config_root)
        for field, filepath in filepaths.items():
            if getattr(self, field) is UNINITIALIZED_CONFIGURATION:
                setattr(self, field, filepath)

    def derive_node_power_ups(self) -> List[CryptoPowerUp]:
        power_ups = list()
        if self.is_me and not self.dev_mode:
            for power_class in self.CHARACTER_CLASS._default_crypto_powerups:
                power_up = self.keystore.derive_crypto_power(power_class)
                power_ups.append(power_up)
        return power_ups

    def initialize(self,
                   password: str,
                   key_material: Optional[bytes] = None) -> str:
        """Initialize a new configuration and write installation files to disk."""

        # Development
        if self.dev_mode:
            self.__temp_dir = TemporaryDirectory(
                prefix=self.TEMP_CONFIGURATION_DIR_PREFIX)
            self.config_root = Path(self.__temp_dir.name)

        # Persistent
        else:
            self._ensure_config_root_exists()
            self.write_keystore(key_material=key_material,
                                password=password,
                                interactive=self.MNEMONIC_KEYSTORE)

        self._cache_runtime_filepaths()
        self.node_storage.initialize()

        # Validate
        if not self.__dev_mode:
            self.validate()

        # Success
        message = "Created nucypher installation files at {}".format(
            self.config_root)
        self.log.debug(message)
        return Path(self.config_root)

    def write_keystore(self,
                       password: str,
                       key_material: Optional[bytes] = None,
                       interactive: bool = True) -> Keystore:
        if key_material:
            self.__keystore = Keystore.import_secure(
                key_material=key_material,
                password=password,
                keystore_dir=self.keystore_dir)
        else:
            if interactive:
                self.__keystore = Keystore.generate(
                    password=password,
                    keystore_dir=self.keystore_dir,
                    interactive=interactive)
            else:
                self.__keystore, _ = Keystore.generate(
                    password=password,
                    keystore_dir=self.keystore_dir,
                    interactive=interactive)

        return self.keystore

    @classmethod
    def load_node_storage(cls, storage_payload: dict, federated_only: bool):
        from nucypher.config.storages import NodeStorage
        node_storage_subclasses = {
            storage._name: storage
            for storage in NodeStorage.__subclasses__()
        }
        storage_type = storage_payload[NodeStorage._TYPE_LABEL]
        storage_class = node_storage_subclasses[storage_type]
        node_storage = storage_class.from_payload(
            payload=storage_payload, federated_only=federated_only)
        return node_storage

    def configure_payment_method(self):
        # TODO: finalize config fields
        # Strategy-Based (current implementation, inflexible & hardcoded)
        # 'payment_strategy': 'SubscriptionManager'
        # 'payment_network': 'matic'
        # 'payment_provider': 'https:///matic.infura.io....'
        # Contract-Targeted (alternative implementation, flexible & generic)
        # 'payment': {
        #     'contract': '0xdeadbeef'
        #     'abi': '/home/abi/sm.json'
        #     'function': 'isPolicyActive'
        #     'provider': 'https:///matic.infura.io....'
        # }

        try:
            payment_class = PAYMENT_METHODS[self.payment_method]
        except KeyError:
            raise KeyError(f'Unknown payment verifier "{self.payment_method}"')

        if payment_class.ONCHAIN:
            # on-chain payment strategies require a blockchain connection
            payment_strategy = payment_class(
                network=self.payment_network,
                eth_provider=self.payment_provider,
                registry=self.policy_registry)
        else:
            payment_strategy = payment_class()
        return payment_strategy
Example #8
0
class RetryRequestMiddleware:
    """
    Automatically retries rpc requests whenever a 429 status code is returned.
    """
    def __init__(self,
                 make_request: Callable[[RPCEndpoint, Any], RPCResponse],
                 w3: Web3,
                 retries: int = 3,
                 exponential_backoff: bool = True):
        self.w3 = w3
        self.make_request = make_request
        self.retries = retries
        self.exponential_backoff = exponential_backoff
        self.logger = Logger(self.__class__.__name__)

    def is_request_result_retry(self, result: Union[RPCResponse,
                                                    Exception]) -> bool:
        # default retry functionality - look for 429 codes
        # override for additional checks
        if isinstance(result, HTTPError):
            # HTTPError 429
            status_code = result.response.status_code
            if status_code == 429:
                return True
        elif not isinstance(result, Exception):
            # must be RPCResponse
            if 'error' in result:
                error = result['error']
                # either instance of RPCError or str
                if not isinstance(error, str) and error.get('code') == 429:
                    return True

        # not retry result
        return False

    def __call__(self, method, params):
        result = None
        num_iterations = 1 + self.retries  # initial call and subsequent retries
        for i in range(num_iterations):
            try:
                response = self.make_request(method, params)
            except Exception as e:  # type: ignore
                result = e
            else:
                result = response

            # completed request
            if not self.is_request_result_retry(result):
                if i > 0:
                    # not initial call and retry was actually performed
                    self.logger.debug(
                        f'Retried rpc request completed after {i} retries')
                break

            # max retries with no completion
            if i == self.retries:
                self.logger.warn(
                    f'RPC request retried {self.retries} times but was not completed'
                )
                break

            # backoff before next call
            if self.exponential_backoff:
                time.sleep(2**(i +
                               1))  # exponential back-off - 2^(retry number)

        if isinstance(result, Exception):
            raise result
        else:
            # RPCResponse
            return result
Example #9
0
class LocalFileBasedNodeStorage(NodeStorage):
    _name = 'local'
    __METADATA_FILENAME_TEMPLATE = '{}.node'

    class NoNodeMetadataFileFound(FileNotFoundError, NodeStorage.UnknownNode):
        pass

    def __init__(self,
                 config_root: str = None,
                 storage_root: str = None,
                 metadata_dir: str = None,
                 certificates_dir: str = None,
                 *args,
                 **kwargs) -> None:

        super().__init__(*args, **kwargs)
        self.log = Logger(self.__class__.__name__)

        self.root_dir = storage_root
        self.metadata_dir = metadata_dir
        self.certificates_dir = certificates_dir
        self._cache_storage_filepaths(config_root=config_root)

    @property
    def source(self) -> str:
        """Human readable source string"""
        return self.root_dir

    @staticmethod
    def _generate_storage_filepaths(config_root: str = None,
                                    storage_root: str = None,
                                    metadata_dir: str = None,
                                    certificates_dir: str = None):

        storage_root = storage_root or os.path.join(
            config_root or DEFAULT_CONFIG_ROOT, 'known_nodes')
        metadata_dir = metadata_dir or os.path.join(storage_root, 'metadata')
        certificates_dir = certificates_dir or os.path.join(
            storage_root, 'certificates')

        payload = {
            'storage_root': storage_root,
            'metadata_dir': metadata_dir,
            'certificates_dir': certificates_dir
        }

        return payload

    def _cache_storage_filepaths(self, config_root: str = None):
        filepaths = self._generate_storage_filepaths(
            config_root=config_root,
            storage_root=self.root_dir,
            metadata_dir=self.metadata_dir,
            certificates_dir=self.certificates_dir)
        self.root_dir = filepaths['storage_root']
        self.metadata_dir = filepaths['metadata_dir']
        self.certificates_dir = filepaths['certificates_dir']

    #
    # Certificates
    #

    @validate_checksum_address
    def __get_certificate_filename(self, checksum_address: str):
        return '{}.{}'.format(checksum_address, Encoding.PEM.name.lower())

    def __get_certificate_filepath(self, certificate_filename: str) -> str:
        return os.path.join(self.certificates_dir, certificate_filename)

    @validate_checksum_address
    def generate_certificate_filepath(self, checksum_address: str) -> str:
        certificate_filename = self.__get_certificate_filename(
            checksum_address)
        certificate_filepath = self.__get_certificate_filepath(
            certificate_filename=certificate_filename)
        return certificate_filepath

    @validate_checksum_address
    def __read_tls_public_certificate(
            self,
            filepath: str = None,
            checksum_address: str = None) -> Certificate:
        """Deserialize an X509 certificate from a filepath"""
        if not bool(filepath) ^ bool(checksum_address):
            raise ValueError(
                "Either pass filepath or checksum_address; Not both.")

        if not filepath and checksum_address is not None:
            filepath = self.generate_certificate_filepath(checksum_address)

        try:
            with open(filepath, 'rb') as certificate_file:
                cert = x509.load_pem_x509_certificate(
                    certificate_file.read(), backend=default_backend())
                return cert
        except FileNotFoundError:
            raise FileNotFoundError(
                "No SSL certificate found at {}".format(filepath))

    #
    # Metadata
    #

    @validate_checksum_address
    def __generate_metadata_filepath(self,
                                     checksum_address: str,
                                     metadata_dir: str = None) -> str:
        metadata_path = os.path.join(
            metadata_dir or self.metadata_dir,
            self.__METADATA_FILENAME_TEMPLATE.format(checksum_address))
        return metadata_path

    def __read_metadata(self, filepath: str, federated_only: bool):

        from nucypher.characters.lawful import Ursula

        try:
            with open(filepath, "rb") as seed_file:
                seed_file.seek(0)
                node_bytes = self.deserializer(seed_file.read())
                node = Ursula.from_bytes(node_bytes)
        except FileNotFoundError:
            raise self.UnknownNode
        return node

    def __write_metadata(self, filepath: str, node):
        os.makedirs(os.path.dirname(filepath), exist_ok=True)
        with open(filepath, "wb") as f:
            f.write(self.serializer(bytes(node)))
        self.log.info(
            "Wrote new node metadata to filesystem {}".format(filepath))
        return filepath

    #
    # API
    #
    def all(self,
            federated_only: bool,
            certificates_only: bool = False) -> Set[Union[Any, Certificate]]:
        filenames = os.listdir(
            self.certificates_dir if certificates_only else self.metadata_dir)
        self.log.info("Found {} known node metadata files at {}".format(
            len(filenames), self.metadata_dir))

        known_certificates = set()
        if certificates_only:
            for filename in filenames:
                certificate = self.__read_tls_public_certificate(
                    os.path.join(self.certificates_dir, filename))
                known_certificates.add(certificate)
            return known_certificates

        else:
            known_nodes = set()
            for filename in filenames:
                metadata_path = os.path.join(self.metadata_dir, filename)
                node = self.__read_metadata(
                    filepath=metadata_path,
                    federated_only=federated_only)  # TODO: 466
                known_nodes.add(node)
            return known_nodes

    @validate_checksum_address
    def get(self,
            checksum_address: str,
            federated_only: bool,
            certificate_only: bool = False):
        if certificate_only is True:
            certificate = self.__read_tls_public_certificate(
                checksum_address=checksum_address)
            return certificate
        metadata_path = self.__generate_metadata_filepath(
            checksum_address=checksum_address)
        node = self.__read_metadata(filepath=metadata_path,
                                    federated_only=federated_only)  # TODO: 466
        return node

    def store_node_certificate(self,
                               certificate: Certificate,
                               force: bool = True):
        certificate_filepath = self._write_tls_certificate(
            certificate=certificate, force=force)
        return certificate_filepath

    def store_node_metadata(self, node, filepath: str = None) -> str:
        address = node.checksum_address
        filepath = self.__generate_metadata_filepath(checksum_address=address,
                                                     metadata_dir=filepath)
        self.__write_metadata(filepath=filepath, node=node)
        return filepath

    def save_node(self, node, force) -> Tuple[str, str]:
        certificate_filepath = self.store_node_certificate(
            certificate=node.certificate, force=force)
        metadata_filepath = self.store_node_metadata(node=node)
        return metadata_filepath, certificate_filepath

    @validate_checksum_address
    def remove(self,
               checksum_address: str,
               metadata: bool = True,
               certificate: bool = True) -> None:

        if metadata is True:
            metadata_filepath = self.__generate_metadata_filepath(
                checksum_address=checksum_address)
            os.remove(metadata_filepath)
            self.log.debug(
                "Deleted {} from the filesystem".format(checksum_address))

        if certificate is True:
            certificate_filepath = self.generate_certificate_filepath(
                checksum_address=checksum_address)
            os.remove(certificate_filepath)
            self.log.debug(
                "Deleted {} from the filesystem".format(checksum_address))

        return

    def clear(self, metadata: bool = True, certificates: bool = True) -> None:
        """Forget all stored nodes and certificates"""
        def __destroy_dir_contents(path) -> None:
            try:
                paths_to_remove = os.listdir(path)
            except FileNotFoundError:
                return
            else:
                for file in paths_to_remove:
                    file_path = os.path.join(path, file)
                    if os.path.isfile(file_path):
                        os.unlink(file_path)

        if metadata is True:
            __destroy_dir_contents(self.metadata_dir)
        if certificates is True:
            __destroy_dir_contents(self.certificates_dir)

        return

    def payload(self) -> dict:
        payload = {
            'storage_type': self._name,
            'storage_root': self.root_dir,
            'metadata_dir': self.metadata_dir,
            'certificates_dir': self.certificates_dir
        }
        return payload

    @classmethod
    def from_payload(cls, payload: dict, *args,
                     **kwargs) -> 'LocalFileBasedNodeStorage':
        storage_type = payload[cls._TYPE_LABEL]
        if not storage_type == cls._name:
            raise cls.NodeStorageError(
                "Wrong storage type. got {}".format(storage_type))
        del payload['storage_type']

        return cls(*args, **payload, **kwargs)

    def initialize(self) -> bool:
        storage_dirs = (self.root_dir, self.metadata_dir,
                        self.certificates_dir)
        for storage_dir in storage_dirs:
            try:
                os.mkdir(storage_dir, mode=0o755)
            except FileExistsError:
                message = "There are pre-existing files at {}".format(
                    self.root_dir)
                self.log.info(message)
            except FileNotFoundError:
                raise self.NodeStorageError(
                    "There is no existing configuration at {}".format(
                        self.root_dir))

        return bool(
            all(
                map(os.path.isdir, (self.root_dir, self.metadata_dir,
                                    self.certificates_dir))))
Example #10
0
class NodeStorage(ABC):
    _name = NotImplemented
    _TYPE_LABEL = 'storage_type'
    NODE_SERIALIZER = binascii.hexlify
    NODE_DESERIALIZER = binascii.unhexlify
    TLS_CERTIFICATE_ENCODING = Encoding.PEM
    TLS_CERTIFICATE_EXTENSION = '.{}'.format(
        TLS_CERTIFICATE_ENCODING.name.lower())

    class NodeStorageError(Exception):
        pass

    class UnknownNode(NodeStorageError):
        pass

    class InvalidNodeCertificate(RuntimeError):
        """Raised when a TLS certificate is not a valid Teacher certificate."""

    def __init__(
        self,
        federated_only: bool,  # TODO# 466
        character_class=None,
        serializer: Callable = NODE_SERIALIZER,
        deserializer: Callable = NODE_DESERIALIZER,
        registry: BaseContractRegistry = None,
    ) -> None:

        from nucypher.characters.lawful import Ursula

        self.log = Logger(self.__class__.__name__)
        self.registry = registry
        self.serializer = serializer
        self.deserializer = deserializer
        self.federated_only = federated_only
        self.character_class = character_class or Ursula

    def __getitem__(self, item):
        return self.get(checksum_address=item,
                        federated_only=self.federated_only)

    def __setitem__(self, key, value):
        return self.store_node_metadata(node=value)

    def __delitem__(self, key):
        self.remove(checksum_address=key)

    def __iter__(self):
        return self.all(federated_only=self.federated_only)

    @property
    @abstractmethod
    def source(self) -> str:
        """Human readable source string"""
        return NotImplemented

    def _read_common_name(self, certificate: Certificate):
        x509 = OpenSSL.crypto.X509.from_cryptography(certificate)
        subject_components = x509.get_subject().get_components()
        common_name_as_bytes = subject_components[0][1]
        common_name_from_cert = common_name_as_bytes.decode()
        return common_name_from_cert

    def _write_tls_certificate(self,
                               certificate: Certificate,
                               host: str = None,
                               force: bool = True) -> str:

        # Read
        x509 = OpenSSL.crypto.X509.from_cryptography(certificate)
        subject_components = x509.get_subject().get_components()
        common_name_as_bytes = subject_components[0][1]
        common_name_on_certificate = common_name_as_bytes.decode()
        if not host:
            host = common_name_on_certificate

        try:
            pseudonym = certificate.subject.get_attributes_for_oid(
                NameOID.PSEUDONYM)[0]
        except IndexError:
            raise self.InvalidNodeCertificate(
                f"Missing checksum address on certificate for host '{host}'. "
                f"Does this certificate belong to an Ursula?")
        else:
            checksum_address = pseudonym.value

        if not is_checksum_address(checksum_address):
            raise self.InvalidNodeCertificate(
                "Invalid certificate wallet address encountered: {}".format(
                    checksum_address))

        # Validate
        # TODO: It's better for us to have checked this a while ago so that this situation is impossible.  #443
        if host and (host != common_name_on_certificate):
            raise ValueError(
                f"You passed a hostname ('{host}') that does not match the certificate's common name."
            )

        certificate_filepath = self.generate_certificate_filepath(
            checksum_address=checksum_address)
        certificate_already_exists = os.path.isfile(certificate_filepath)
        if force is False and certificate_already_exists:
            raise FileExistsError(
                'A TLS certificate already exists at {}.'.format(
                    certificate_filepath))

        # Write
        os.makedirs(os.path.dirname(certificate_filepath), exist_ok=True)
        with open(certificate_filepath, 'wb') as certificate_file:
            public_pem_bytes = certificate.public_bytes(
                self.TLS_CERTIFICATE_ENCODING)
            certificate_file.write(public_pem_bytes)

        self.log.debug(
            f"Saved TLS certificate for {checksum_address}: {certificate_filepath}"
        )

        return certificate_filepath

    @abstractmethod
    def store_node_certificate(self, certificate: Certificate) -> str:
        raise NotImplementedError

    @abstractmethod
    def store_node_metadata(self, node, filepath: str = None) -> str:
        """Save a single node's metadata and tls certificate"""
        raise NotImplementedError

    @abstractmethod
    def generate_certificate_filepath(self, checksum_address: str) -> str:
        raise NotImplementedError

    @abstractmethod
    def payload(self) -> dict:
        raise NotImplementedError

    @classmethod
    @abstractmethod
    def from_payload(self, data: dict, *args, **kwargs) -> 'NodeStorage':
        """Instantiate a storage object from a dictionary"""
        raise NotImplementedError

    @abstractmethod
    def initialize(self):
        """One-time initialization steps to establish a node storage backend"""
        raise NotImplementedError

    @abstractmethod
    def all(self,
            federated_only: bool,
            certificates_only: bool = False) -> set:
        """Return s set of all stored nodes"""
        raise NotImplementedError

    @abstractmethod
    def get(self, checksum_address: str, federated_only: bool):
        """Retrieve a single stored node"""
        raise NotImplementedError

    @abstractmethod
    def remove(self, checksum_address: str) -> bool:
        """Remove a single stored node"""
        raise NotImplementedError

    @abstractmethod
    def clear(self) -> bool:
        """Remove all stored nodes"""
        raise NotImplementedError
Example #11
0
class BlockchainInterface:
    """
    Interacts with a solidity compiler and a registry in order to instantiate compiled
    ethereum contracts with the given web3 provider backend.
    """

    TIMEOUT = 600  # seconds  # TODO: Correlate with the gas strategy - #2070

    DEFAULT_GAS_STRATEGY = 'fast'
    GAS_STRATEGIES = WEB3_GAS_STRATEGIES

    Web3 = Web3  # TODO: This is name-shadowing the actual Web3. Is this intentional?

    _CONTRACT_FACTORY = VersionedContract

    class InterfaceError(Exception):
        pass

    class NoProvider(InterfaceError):
        pass

    class UnsupportedProvider(InterfaceError):
        pass

    class ConnectionFailed(InterfaceError):
        pass

    class UnknownContract(InterfaceError):
        pass

    REASONS = {
        INSUFFICIENT_ETH: 'insufficient funds for gas * price + value',
    }

    class TransactionFailed(InterfaceError):

        IPC_CODE = -32000

        def __init__(self, message: str, transaction_dict: dict,
                     contract_function: Union[ContractFunction,
                                              ContractConstructor], *args):

            self.base_message = message
            self.name = get_transaction_name(
                contract_function=contract_function)
            self.payload = transaction_dict
            self.contract_function = contract_function
            self.failures = {
                BlockchainInterface.REASONS[INSUFFICIENT_ETH]:
                self.insufficient_eth
            }
            self.message = self.failures.get(self.base_message, self.default)
            super().__init__(self.message, *args)

        @property
        def default(self) -> str:
            sender = self.payload["from"]
            message = f'{self.name} from {sender[:6]}... \n' \
                      f'Sender balance: {prettify_eth_amount(self.get_balance())} \n' \
                      f'Reason: {self.base_message} \n' \
                      f'Transaction: {self.payload}'
            return message

        def get_balance(self):
            blockchain = BlockchainInterfaceFactory.get_interface()
            balance = blockchain.client.get_balance(
                account=self.payload['from'])
            return balance

        @property
        def insufficient_eth(self) -> str:
            try:
                transaction_fee = self.payload['gas'] * self.payload['gasPrice']
            except KeyError:
                return self.default
            else:
                cost = transaction_fee + self.payload.get('value', 0)
                message = f'{self.name} from {self.payload["from"][:8]} - {self.base_message}.' \
                          f'Calculated cost is {prettify_eth_amount(cost)},' \
                          f'but sender only has {prettify_eth_amount(self.get_balance())}.'
            return message

    def __init__(
            self,
            emitter=None,  # TODO # 1754
            poa: bool = None,
            light: bool = False,
            provider_uri: str = NO_BLOCKCHAIN_CONNECTION,
            provider: BaseProvider = NO_BLOCKCHAIN_CONNECTION,
            gas_strategy: Optional[Union[str, Callable]] = None,
            max_gas_price: Optional[int] = None):
        """
        TODO: #1502 - Move to API docs.

         Filesystem          Configuration           Node              Client                  EVM
        ================ ====================== =============== =====================  ===========================

         Solidity Files -- SolidityCompiler -                      --- HTTPProvider ------ ...
                                            |                    |
                                            |                    |
                                            |                    |
                                            - *BlockchainInterface* -- IPCProvider ----- External EVM (geth, parity...)
                                                       |         |
                                                       |         |
                                                 TestProvider ----- EthereumTester -------------
                                                                                                |
                                                                                                |
                                                                                        PyEVM (Development Chain)

         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

         Runtime Files --                 --BlockchainInterface ----> Registry
                        |                |             ^
                        |                |             |
                        |                |             |
         Key Files ------ CharacterConfiguration     Agent                          ... (Contract API)
                        |                |             ^
                        |                |             |
                        |                |             |
                        |                |           Actor                          ...Blockchain-Character API)
                        |                |             ^
                        |                |             |
                        |                |             |
         Config File ---                  --------- Character                       ... (Public API)
                                                       ^
                                                       |
                                                     Human


        The Blockchain is the junction of the solidity compiler, a contract registry, and a collection of
        web3 network providers as a means of interfacing with the ethereum blockchain to execute
        or deploy contract code on the network.


        Compiler and Registry Usage
        -----------------------------

        Contracts are freshly re-compiled if an instance of SolidityCompiler is passed; otherwise,
        The registry will read contract data saved to disk that is be used to retrieve contact address and op-codes.
        Optionally, A registry instance can be passed instead.


        Provider Usage
        ---------------
        https: // github.com / ethereum / eth - tester     # available-backends


        * HTTP Provider - Web3 HTTP provider, typically JSON RPC 2.0 over HTTP
        * Websocket Provider - Web3 WS provider, typically JSON RPC 2.0 over WS, supply endpoint uri and websocket=True
        * IPC Provider - Web3 File based IPC provider transported over standard I/O
        * Custom Provider - A pre-initialized web3.py provider instance to attach to this interface

        """

        self.log = Logger('Blockchain')
        self.poa = poa
        self.provider_uri = provider_uri
        self._provider = provider
        self.w3 = NO_BLOCKCHAIN_CONNECTION
        self.client = NO_BLOCKCHAIN_CONNECTION
        self.is_light = light

        # TODO: Not ready to give users total flexibility. Let's stick for the moment to known values. See #2447
        if gas_strategy not in ('slow', 'medium', 'fast', 'free',
                                None):  # FIXME: What is 'None' doing here?
            raise ValueError(f"'{gas_strategy}' is an invalid gas strategy")
        self.gas_strategy = gas_strategy or self.DEFAULT_GAS_STRATEGY
        self.max_gas_price = max_gas_price

    def __repr__(self):
        r = '{name}({uri})'.format(name=self.__class__.__name__,
                                   uri=self.provider_uri)
        return r

    def get_blocktime(self):
        return self.client.get_blocktime()

    @property
    def is_connected(self) -> bool:
        """
        https://web3py.readthedocs.io/en/stable/__provider.html#examples-using-automated-detection
        """
        if self.client is NO_BLOCKCHAIN_CONNECTION:
            return False
        return self.client.is_connected

    @classmethod
    def get_gas_strategy(cls,
                         gas_strategy: Union[str,
                                             Callable] = None) -> Callable:
        try:
            gas_strategy = cls.GAS_STRATEGIES[gas_strategy]
        except KeyError:
            if gas_strategy:
                if not callable(gas_strategy):
                    raise ValueError(
                        f"{gas_strategy} must be callable to be a valid gas strategy."
                    )
            else:
                gas_strategy = cls.GAS_STRATEGIES[cls.DEFAULT_GAS_STRATEGY]
        return gas_strategy

    def attach_middleware(self):
        chain_id = int(self.client.chain_id)
        if self.poa is None:  # If POA is not set explicitly, try to autodetect from chain id
            self.poa = chain_id in POA_CHAINS

        self.log.debug(
            f'Ethereum chain: {self.client.chain_name} (chain_id={chain_id}, poa={self.poa})'
        )

        # For use with Proof-Of-Authority test-blockchains
        if self.poa is True:
            self.log.debug('Injecting POA middleware at layer 0')
            self.client.inject_middleware(geth_poa_middleware, layer=0)

        self.client.add_middleware(middleware.time_based_cache_middleware)
        # self.client.add_middleware(middleware.latest_block_based_cache_middleware)  # TODO: This line causes failed tests and nonce reuse in tests. See #2348.
        self.client.add_middleware(middleware.simple_cache_middleware)

        self.configure_gas_strategy()

    def configure_gas_strategy(self,
                               gas_strategy: Optional[Callable] = None
                               ) -> None:

        if gas_strategy:
            reported_gas_strategy = f"fixed/{gas_strategy.name}"

        elif isinstance(self.client, InfuraClient):
            gas_strategy = construct_datafeed_median_strategy(
                speed=self.gas_strategy)
            reported_gas_strategy = f"datafeed/{self.gas_strategy}"

        else:
            reported_gas_strategy = f"web3/{self.gas_strategy}"
            gas_strategy = self.get_gas_strategy(self.gas_strategy)

        configuration_message = f"Using gas strategy '{reported_gas_strategy}'"

        if self.max_gas_price:
            __price = Web3.toWei(self.max_gas_price,
                                 'gwei')  # from gwei to wei
            gas_strategy = max_price_gas_strategy_wrapper(
                gas_strategy=gas_strategy, max_gas_price_wei=__price)
            configuration_message += f", with a max price of {self.max_gas_price} gwei."

        self.client.set_gas_strategy(gas_strategy=gas_strategy)

        # TODO: This line must not be called prior to establishing a connection
        #        Move it down to a lower layer, near the client.
        # gwei_gas_price = Web3.fromWei(self.client.gas_price_for_transaction(), 'gwei')

        self.log.info(configuration_message)
        # self.log.debug(f"Gas strategy currently reports a gas price of {gwei_gas_price} gwei.")

    def connect(self):

        provider_uri = self.provider_uri
        self.log.info(f"Using external Web3 Provider '{self.provider_uri}'")

        # Attach Provider
        self._attach_provider(provider=self._provider,
                              provider_uri=provider_uri)
        self.log.info("Connecting to {}".format(self.provider_uri))
        if self._provider is NO_BLOCKCHAIN_CONNECTION:
            raise self.NoProvider(
                "There are no configured blockchain providers")

        # Connect if not connected
        try:
            self.w3 = self.Web3(provider=self._provider)
            self.client = EthereumClient.from_w3(w3=self.w3)
        except requests.ConnectionError:  # RPC
            raise self.ConnectionFailed(
                f'Connection Failed - {str(self.provider_uri)} - is RPC enabled?'
            )
        except FileNotFoundError:  # IPC File Protocol
            raise self.ConnectionFailed(
                f'Connection Failed - {str(self.provider_uri)} - is IPC enabled?'
            )
        else:
            self.attach_middleware()

        return self.is_connected

    @property
    def provider(self) -> BaseProvider:
        return self._provider

    def _attach_provider(self,
                         provider: Optional[BaseProvider] = None,
                         provider_uri: str = None) -> None:
        """
        https://web3py.readthedocs.io/en/latest/providers.html#providers
        """

        if not provider_uri and not provider:
            raise self.NoProvider("No URI or provider instances supplied.")

        if provider_uri and not provider:
            uri_breakdown = urlparse(provider_uri)

            if uri_breakdown.scheme == 'tester':
                providers = {
                    'pyevm': _get_pyevm_test_provider,
                    'mock': _get_mock_test_provider
                }
                provider_scheme = uri_breakdown.netloc

            else:
                providers = {
                    'auto': _get_auto_provider,
                    'ipc': _get_IPC_provider,
                    'file': _get_IPC_provider,
                    'ws': _get_websocket_provider,
                    'wss': _get_websocket_provider,
                    'http': _get_HTTP_provider,
                    'https': _get_HTTP_provider,
                }
                provider_scheme = uri_breakdown.scheme

            # auto-detect for file based ipc
            if not provider_scheme:
                if os.path.exists(provider_uri):
                    # file is available - assume ipc/file scheme
                    provider_scheme = 'file'
                    self.log.info(
                        f"Auto-detected provider scheme as 'file://' for provider {provider_uri}"
                    )

            try:
                self._provider = providers[provider_scheme](provider_uri)
            except KeyError:
                raise self.UnsupportedProvider(
                    f"{provider_uri} is an invalid or unsupported blockchain provider URI"
                )
            else:
                self.provider_uri = provider_uri or NO_BLOCKCHAIN_CONNECTION
        else:
            self._provider = provider

    @classmethod
    def _handle_failed_transaction(cls,
                                   exception: Exception,
                                   transaction_dict: dict,
                                   contract_function: Union[
                                       ContractFunction, ContractConstructor],
                                   logger: Logger = None) -> None:
        """
        Re-raising error handler and context manager for transaction broadcast or
        build failure events at the interface layer. This method is a last line of defense
        against unhandled exceptions caused by transaction failures and must raise an exception.
        # TODO: #1504 - Additional Handling of validation failures (gas limits, invalid fields, etc.)
        """

        response = exception.args[0]

        # Assume this error is formatted as an RPC response
        try:
            code = int(response['code'])
            message = response['message']
        except Exception:
            # TODO: #1504 - Try even harder to determine if this is insufficient funds causing the issue,
            #               This may be best handled at the agent or actor layer for registry and token interactions.
            # Worst case scenario - raise the exception held in context implicitly
            raise exception

        if code != cls.TransactionFailed.IPC_CODE:
            # Only handle client-specific exceptions
            # https://www.jsonrpc.org/specification Section 5.1
            raise exception

        if logger:
            logger.critical(message)  # simple context

        transaction_failed = cls.TransactionFailed(
            message=message,  # rich error (best case)
            contract_function=contract_function,
            transaction_dict=transaction_dict)
        raise transaction_failed from exception

    def __log_transaction(self, transaction_dict: dict,
                          contract_function: ContractFunction):
        """
        Format and log a transaction dict and return the transaction name string.
        This method *must not* mutate the original transaction dict.
        """
        # Do not mutate the original transaction dict
        tx = dict(transaction_dict).copy()

        # Format
        if tx.get('to'):
            tx['to'] = to_checksum_address(contract_function.address)
        try:
            tx['selector'] = contract_function.selector
        except AttributeError:
            pass
        tx['from'] = to_checksum_address(tx['from'])
        tx.update({
            f: prettify_eth_amount(v)
            for f, v in tx.items() if f in ('gasPrice', 'value')
        })
        payload_pprint = ', '.join("{}: {}".format(k, v)
                                   for k, v in tx.items())

        # Log
        transaction_name = get_transaction_name(
            contract_function=contract_function)
        self.log.debug(f"[TX-{transaction_name}] | {payload_pprint}")

    @validate_checksum_address
    def build_payload(
        self,
        sender_address: str,
        payload: dict = None,
        transaction_gas_limit: int = None,
        use_pending_nonce: bool = True,
    ) -> dict:

        nonce = self.client.get_transaction_count(account=sender_address,
                                                  pending=use_pending_nonce)
        base_payload = {
            'chainId': int(self.client.chain_id),
            'nonce': nonce,
            'from': sender_address
        }

        # Aggregate
        if not payload:
            payload = {}
        payload.update(base_payload)
        # Explicit gas override - will skip gas estimation in next operation.
        if transaction_gas_limit:
            payload['gas'] = int(transaction_gas_limit)
        return payload

    @validate_checksum_address
    def build_contract_transaction(
        self,
        contract_function: ContractFunction,
        sender_address: str,
        payload: dict = None,
        transaction_gas_limit: Optional[int] = None,
        gas_estimation_multiplier: Optional[float] = None,
        use_pending_nonce: Optional[bool] = None,
    ) -> dict:

        # Sanity checks for the gas estimation multiplier
        if gas_estimation_multiplier is not None:
            if not 1 <= gas_estimation_multiplier <= 3:  # TODO: Arbitrary upper bound.
                raise ValueError(
                    f"The gas estimation multiplier should be a float between 1 and 3, "
                    f"but we received {gas_estimation_multiplier}.")
            elif transaction_gas_limit is not None:
                raise ValueError(
                    "'transaction_gas_limit' and 'gas_estimation_multiplier' can't be used together."
                )

        payload = self.build_payload(
            sender_address=sender_address,
            payload=payload,
            transaction_gas_limit=transaction_gas_limit,
            use_pending_nonce=use_pending_nonce)
        self.__log_transaction(transaction_dict=payload,
                               contract_function=contract_function)
        try:
            if 'gas' not in payload:
                # As web3 buildTransaction() will estimate gas with block identifier "pending" by default,
                # explicitly estimate gas here with block identifier 'latest' if not otherwise specified
                # as a pending transaction can cause gas estimation to fail, notably in case of worklock refunds.
                payload['gas'] = contract_function.estimateGas(
                    payload, block_identifier='latest')
            transaction_dict = contract_function.buildTransaction(payload)
        except (TestTransactionFailed, ValidationError, ValueError) as error:
            # Note: Geth (1.9.15) raises ValueError in the same condition that pyevm raises ValidationError here.
            # Treat this condition as "Transaction Failed" during gas estimation.
            raise self._handle_failed_transaction(
                exception=error,
                transaction_dict=payload,
                contract_function=contract_function,
                logger=self.log)

        # Overestimate the transaction gas limit according to the gas estimation multiplier, if any
        if gas_estimation_multiplier:
            gas_estimation = transaction_dict['gas']
            overestimation = int(
                math.ceil(gas_estimation * gas_estimation_multiplier))
            self.log.debug(
                f"Gas limit for this TX was increased from {gas_estimation} to {overestimation}, "
                f"using a multiplier of {gas_estimation_multiplier}.")
            transaction_dict['gas'] = overestimation
            # TODO: What if we're going over the block limit? Not likely, but perhaps worth checking (NRN)

        return transaction_dict

    def sign_and_broadcast_transaction(
            self,
            transacting_power: TransactingPower,
            transaction_dict: TransactionDict,
            transaction_name: str = "",
            confirmations: int = 0,
            fire_and_forget: bool = False) -> Union[TxReceipt, HexBytes]:
        """
        Takes a transaction dictionary, signs it with the configured signer, then broadcasts the signed
        transaction using the ethereum provider's eth_sendRawTransaction RPC endpoint.
        Optionally blocks for receipt and confirmation with 'confirmations', and 'fire_and_forget' flags.

        If 'fire and forget' is True this method returns the transaction hash only, without waiting for a receipt -
        otherwise return the transaction receipt.

        """
        #
        # Setup
        #

        # TODO # 1754 - Move this to singleton - I do not approve... nor does Bogdan?
        if GlobalLoggerSettings._json_ipc:
            emitter = JSONRPCStdoutEmitter()
        else:
            emitter = StdoutEmitter()

        #
        # Sign
        #

        # TODO: Show the USD Price:  https://api.coinmarketcap.com/v1/ticker/ethereum/
        price = transaction_dict['gasPrice']
        price_gwei = Web3.fromWei(price, 'gwei')
        cost_wei = price * transaction_dict['gas']
        cost = Web3.fromWei(cost_wei, 'ether')

        if transacting_power.is_device:
            emitter.message(
                f'Confirm transaction {transaction_name} on hardware wallet... '
                f'({cost} ETH @ {price_gwei} gwei)',
                color='yellow')
        signed_raw_transaction = transacting_power.sign_transaction(
            transaction_dict)

        #
        # Broadcast
        #
        emitter.message(
            f'Broadcasting {transaction_name} Transaction ({cost} ETH @ {price_gwei} gwei)',
            color='yellow')
        try:
            txhash = self.client.send_raw_transaction(
                signed_raw_transaction)  # <--- BROADCAST
            emitter.message(f'TXHASH {txhash.hex()}', color='yellow')
        except (TestTransactionFailed, ValueError):
            raise  # TODO: Unify with Transaction failed handling -- Entry point for _handle_failed_transaction
        else:
            if fire_and_forget:
                return txhash

        #
        # Receipt
        #

        try:  # TODO: Handle block confirmation exceptions
            waiting_for = 'receipt'
            if confirmations:
                waiting_for = f'{confirmations} confirmations'
            emitter.message(
                f'Waiting {self.TIMEOUT} seconds for {waiting_for}',
                color='yellow')
            receipt = self.client.wait_for_receipt(txhash,
                                                   timeout=self.TIMEOUT,
                                                   confirmations=confirmations)
        except TimeExhausted:
            # TODO: #1504 - Handle transaction timeout
            raise
        else:
            self.log.debug(
                f"[RECEIPT-{transaction_name}] | txhash: {receipt['transactionHash'].hex()}"
            )

        #
        # Confirmations
        #

        # Primary check
        transaction_status = receipt.get('status', UNKNOWN_TX_STATUS)
        if transaction_status == 0:
            failure = f"Transaction transmitted, but receipt returned status code 0. " \
                      f"Full receipt: \n {pprint.pformat(receipt, indent=2)}"
            raise self.InterfaceError(failure)

        if transaction_status is UNKNOWN_TX_STATUS:
            self.log.info(
                f"Unknown transaction status for {txhash} (receipt did not contain a status field)"
            )

            # Secondary check
            tx = self.client.get_transaction(txhash)
            if tx["gas"] == receipt["gasUsed"]:
                raise self.InterfaceError(
                    f"Transaction consumed 100% of transaction gas."
                    f"Full receipt: \n {pprint.pformat(receipt, indent=2)}")

        return receipt

    @validate_checksum_address
    def send_transaction(
        self,
        contract_function: Union[ContractFunction, ContractConstructor],
        transacting_power: TransactingPower,
        payload: dict = None,
        transaction_gas_limit: Optional[int] = None,
        gas_estimation_multiplier: Optional[float] = None,
        confirmations: int = 0,
        fire_and_forget: bool = False,  # do not wait for receipt.  See #2385
        replace: bool = False,
    ) -> Union[TxReceipt, HexBytes]:

        if fire_and_forget:
            if confirmations > 0:
                raise ValueError(
                    "Transaction Prevented: "
                    "Cannot use 'confirmations' and 'fire_and_forget' options together."
                )

            use_pending_nonce = False  # TODO: #2385
        else:
            use_pending_nonce = replace  # TODO: #2385

        transaction = self.build_contract_transaction(
            contract_function=contract_function,
            sender_address=transacting_power.account,
            payload=payload,
            transaction_gas_limit=transaction_gas_limit,
            gas_estimation_multiplier=gas_estimation_multiplier,
            use_pending_nonce=use_pending_nonce)

        # Get transaction name
        try:
            transaction_name = contract_function.fn_name.upper()
        except AttributeError:
            transaction_name = 'DEPLOY' if isinstance(
                contract_function, ContractConstructor) else 'UNKNOWN'

        txhash_or_receipt = self.sign_and_broadcast_transaction(
            transacting_power=transacting_power,
            transaction_dict=transaction,
            transaction_name=transaction_name,
            confirmations=confirmations,
            fire_and_forget=fire_and_forget)
        return txhash_or_receipt

    def get_contract_by_name(
            self,
            registry: BaseContractRegistry,
            contract_name: str,
            contract_version: str = None,
            enrollment_version: Union[int, str] = None,
            proxy_name: str = None,
            use_proxy_address: bool = True) -> VersionedContract:
        """
        Instantiate a deployed contract from registry data,
        and assimilate it with its proxy if it is upgradeable.
        """
        target_contract_records = registry.search(
            contract_name=contract_name, contract_version=contract_version)

        if not target_contract_records:
            raise self.UnknownContract(
                f"No such contract records with name {contract_name}:{contract_version}."
            )

        if proxy_name:

            # Lookup proxies; Search for a published proxy that targets this contract record
            proxy_records = registry.search(contract_name=proxy_name)

            results = list()
            for proxy_name, proxy_version, proxy_address, proxy_abi in proxy_records:
                proxy_contract = self.client.w3.eth.contract(
                    abi=proxy_abi,
                    address=proxy_address,
                    version=proxy_version,
                    ContractFactoryClass=self._CONTRACT_FACTORY)

                # Read this dispatcher's target address from the blockchain
                proxy_live_target_address = proxy_contract.functions.target(
                ).call()
                for target_name, target_version, target_address, target_abi in target_contract_records:

                    if target_address == proxy_live_target_address:
                        if use_proxy_address:
                            triplet = (proxy_address, target_version,
                                       target_abi)
                        else:
                            triplet = (target_address, target_version,
                                       target_abi)
                    else:
                        continue

                    results.append(triplet)

            if len(results) > 1:
                address, _version, _abi = results[0]
                message = "Multiple {} deployments are targeting {}".format(
                    proxy_name, address)
                raise self.InterfaceError(message.format(contract_name))

            else:
                try:
                    selected_address, selected_version, selected_abi = results[
                        0]
                except IndexError:
                    raise self.UnknownContract(
                        f"There are no Dispatcher records targeting '{contract_name}':{contract_version}"
                    )

        else:
            # TODO: use_proxy_address doesnt' work in this case. Should we raise if used?

            # NOTE: 0 must be allowed as a valid version number
            if len(target_contract_records) != 1:
                if enrollment_version is None:
                    m = f"{len(target_contract_records)} records enrolled " \
                        f"for contract {contract_name}:{contract_version} " \
                        f"and no version index was supplied."
                    raise self.InterfaceError(m)
                enrollment_version = self.__get_enrollment_version_index(
                    name=contract_name,
                    contract_version=contract_version,
                    version_index=enrollment_version,
                    enrollments=len(target_contract_records))

            else:
                enrollment_version = -1  # default

            _contract_name, selected_version, selected_address, selected_abi = target_contract_records[
                enrollment_version]

        # Create the contract from selected sources
        unified_contract = self.client.w3.eth.contract(
            abi=selected_abi,
            address=selected_address,
            version=selected_version,
            ContractFactoryClass=self._CONTRACT_FACTORY)

        return unified_contract

    @staticmethod
    def __get_enrollment_version_index(version_index: Union[int, str],
                                       enrollments: int, name: str,
                                       contract_version: str):
        version_names = {'latest': -1, 'earliest': 0}
        try:
            version = version_names[version_index]
        except KeyError:
            try:
                version = int(version_index)
            except ValueError:
                what_is_this = version_index
                raise ValueError(
                    f"'{what_is_this}' is not a valid enrollment version number"
                )
            else:
                if version > enrollments - 1:
                    message = f"Version index '{version}' is larger than the number of enrollments " \
                              f"for {name}:{contract_version}."
                    raise ValueError(message)
        return version
Example #12
0
class BlockchainInterface:
    """
    Interacts with a solidity compiler and a registry in order to instantiate compiled
    ethereum contracts with the given web3 provider backend.
    """

    TIMEOUT = 600  # seconds  # TODO: Correlate with the gas strategy - #2070

    DEFAULT_GAS_STRATEGY = 'fast'
    GAS_STRATEGIES = {'glacial': time_based.glacial_gas_price_strategy,     # 24h
                      'slow': time_based.slow_gas_price_strategy,           # 1h
                      'medium': time_based.medium_gas_price_strategy,       # 5m
                      'fast': time_based.fast_gas_price_strategy            # 60s
                      }

    process = NO_PROVIDER_PROCESS.bool_value(False)
    Web3 = Web3

    _contract_factory = VersionedContract

    class InterfaceError(Exception):
        pass

    class NoProvider(InterfaceError):
        pass

    class UnsupportedProvider(InterfaceError):
        pass

    class ConnectionFailed(InterfaceError):
        pass

    class UnknownContract(InterfaceError):
        pass

    REASONS = {
        INSUFFICIENT_ETH: 'insufficient funds for gas * price + value',
    }

    class TransactionFailed(InterfaceError):

        IPC_CODE = -32000  # (geth)

        def __init__(self,
                     message: str,
                     transaction_dict: dict,
                     contract_function: Union[ContractFunction, ContractConstructor],
                     *args):

            self.base_message = message
            self.name = get_transaction_name(contract_function=contract_function)
            self.payload = transaction_dict
            self.contract_function = contract_function
            self.failures = {
                BlockchainInterface.REASONS[INSUFFICIENT_ETH]: self.insufficient_eth
            }
            self.message = self.failures.get(self.base_message, self.default)
            super().__init__(self.message, *args)

        @property
        def default(self) -> str:
            sender = self.payload["from"]
            message = f'{self.name} from {sender[:6]}... \n' \
                      f'Sender balance: {self.get_balance()} ETH \n' \
                      f'Reason: {self.base_message} \n' \
                      f'Transaction: {self.payload}'
            return message

        def get_balance(self):
            blockchain = BlockchainInterfaceFactory.get_interface()
            balance = blockchain.client.get_balance(account=self.payload['from'])
            return balance

        @property
        def insufficient_eth(self) -> str:
            gas = (self.payload.get('gas', 1) * self.payload['gasPrice'])  # FIXME: If gas is not included...
            cost = gas + self.payload.get('value', 0)
            message = f'{self.payload} from {self.payload["from"][:8]} - {self.base_message}.' \
                      f'Calculated cost is {cost} but sender only has {self.get_balance()}.'
            return message

    def __init__(self,
                 emitter = None,  # TODO # 1754
                 poa: bool = None,
                 light: bool = False,
                 provider_process=NO_PROVIDER_PROCESS,
                 provider_uri: str = NO_BLOCKCHAIN_CONNECTION,
                 provider: Web3Providers = NO_BLOCKCHAIN_CONNECTION,
                 gas_strategy: Union[str, Callable] = DEFAULT_GAS_STRATEGY):

        """
        TODO: #1502 - Move to API docs.

         Filesystem          Configuration           Node              Client                  EVM
        ================ ====================== =============== =====================  ===========================

         Solidity Files -- SolidityCompiler -                      --- HTTPProvider ------ ...
                                            |                    |
                                            |                    |
                                            |                    |
                                            - *BlockchainInterface* -- IPCProvider ----- External EVM (geth, parity...)
                                                       |         |
                                                       |         |
                                                 TestProvider ----- EthereumTester -------------
                                                                                                |
                                                                                                |
                                                                                        PyEVM (Development Chain)

         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

         Runtime Files --                 --BlockchainInterface ----> Registry
                        |                |             ^
                        |                |             |
                        |                |             |
         Key Files ------ CharacterConfiguration     Agent                          ... (Contract API)
                        |                |             ^
                        |                |             |
                        |                |             |
                        |                |           Actor                          ...Blockchain-Character API)
                        |                |             ^
                        |                |             |
                        |                |             |
         Config File ---                  --------- Character                       ... (Public API)
                                                       ^
                                                       |
                                                     Human


        The Blockchain is the junction of the solidity compiler, a contract registry, and a collection of
        web3 network providers as a means of interfacing with the ethereum blockchain to execute
        or deploy contract code on the network.


        Compiler and Registry Usage
        -----------------------------

        Contracts are freshly re-compiled if an instance of SolidityCompiler is passed; otherwise,
        The registry will read contract data saved to disk that is be used to retrieve contact address and op-codes.
        Optionally, A registry instance can be passed instead.


        Provider Usage
        ---------------
        https: // github.com / ethereum / eth - tester     # available-backends


        * HTTP Provider - Web3 HTTP provider, typically JSON RPC 2.0 over HTTP
        * Websocket Provider - Web3 WS provider, typically JSON RPC 2.0 over WS, supply endpoint uri and websocket=True
        * IPC Provider - Web3 File based IPC provider transported over standard I/O
        * Custom Provider - A pre-initialized web3.py provider instance to attach to this interface

        """

        self.log = Logger('Blockchain')
        self.poa = poa
        self.provider_uri = provider_uri
        self._provider = provider
        self._provider_process = provider_process
        self.w3 = NO_BLOCKCHAIN_CONNECTION
        self.client = NO_BLOCKCHAIN_CONNECTION         # type: EthereumClient
        self.transacting_power = READ_ONLY_INTERFACE
        self.is_light = light
        self.gas_strategy = self.get_gas_strategy(gas_strategy)

    def __repr__(self):
        r = '{name}({uri})'.format(name=self.__class__.__name__, uri=self.provider_uri)
        return r

    @classmethod
    def from_dict(cls, payload: dict, **overrides) -> 'BlockchainInterface':
        payload.update({k: v for k, v in overrides.items() if v is not None})
        blockchain = cls(**payload)
        return blockchain

    def to_dict(self) -> dict:
        payload = dict(provider_uri=self.provider_uri, poa=self.poa, light=self.is_light)
        return payload

    @property
    def is_connected(self) -> bool:
        """
        https://web3py.readthedocs.io/en/stable/__provider.html#examples-using-automated-detection
        """
        if self.client is NO_BLOCKCHAIN_CONNECTION:
            return False
        return self.client.is_connected

    @classmethod
    def get_gas_strategy(cls, gas_strategy: Union[str, Callable] = None) -> Callable:
        try:
            gas_strategy = cls.GAS_STRATEGIES[gas_strategy]
        except KeyError:
            if gas_strategy:
                if not callable(gas_strategy):
                    raise ValueError(f"{gas_strategy} must be callable to be a valid gas strategy.")
            else:
                gas_strategy = cls.GAS_STRATEGIES[cls.DEFAULT_GAS_STRATEGY]
        return gas_strategy

    def attach_middleware(self):
        chain_id = int(self.client.chain_id)
        if self.poa is None:  # If POA is not set explicitly, try to autodetect from chain id
            self.poa = chain_id in POA_CHAINS

        self.log.debug(f'Ethereum chain: {self.client.chain_name} (chain_id={chain_id}, poa={self.poa})')

        # For use with Proof-Of-Authority test-blockchains
        if self.poa is True:
            self.log.debug('Injecting POA middleware at layer 0')
            self.client.inject_middleware(geth_poa_middleware, layer=0)

        # Gas Price Strategy:
        # Bundled web3 strategies are too expensive for Infura (it takes ~1 minute to get a price),
        # so we use external gas price oracles, instead (see #2139)
        if isinstance(self.client, InfuraClient):
            gas_strategy = datafeed_fallback_gas_price_strategy
        else:
            gas_strategy = self.gas_strategy
        self.client.set_gas_strategy(gas_strategy=gas_strategy)
        gwei_gas_price = Web3.fromWei(self.client.gas_price_for_transaction(), 'gwei')
        self.log.debug(f"Currently, our gas strategy returns a gas price of {gwei_gas_price} gwei")

        self.client.add_middleware(middleware.time_based_cache_middleware)
        self.client.add_middleware(middleware.latest_block_based_cache_middleware)
        self.client.add_middleware(middleware.simple_cache_middleware)

    def connect(self):

        # Spawn child process
        if self._provider_process:
            self._provider_process.start()
            provider_uri = self._provider_process.provider_uri(scheme='file')
        else:
            provider_uri = self.provider_uri
            self.log.info(f"Using external Web3 Provider '{self.provider_uri}'")

        # Attach Provider
        self._attach_provider(provider=self._provider, provider_uri=provider_uri)
        self.log.info("Connecting to {}".format(self.provider_uri))
        if self._provider is NO_BLOCKCHAIN_CONNECTION:
            raise self.NoProvider("There are no configured blockchain providers")

        # Connect if not connected
        try:
            self.w3 = self.Web3(provider=self._provider)
            self.client = EthereumClient.from_w3(w3=self.w3)
        except requests.ConnectionError:  # RPC
            raise self.ConnectionFailed(f'Connection Failed - {str(self.provider_uri)} - is RPC enabled?')
        except FileNotFoundError:         # IPC File Protocol
            raise self.ConnectionFailed(f'Connection Failed - {str(self.provider_uri)} - is IPC enabled?')
        else:
            self.attach_middleware()

        return self.is_connected

    def sync(self, emitter=None) -> None:

        sync_state = self.client.sync()
        if emitter is not None:

            emitter.echo(f"Syncing: {self.client.chain_name.capitalize()}. Waiting for sync to begin.", verbosity=1)

            while not len(self.client.peers):
                emitter.echo("waiting for peers...", verbosity=1)
                time.sleep(5)

            peer_count = len(self.client.peers)
            emitter.echo(
                f"Found {'an' if peer_count == 1 else peer_count} Ethereum peer{('s' if peer_count > 1 else '')}.",
                verbosity=1)

            try:
                emitter.echo("Beginning sync...", verbosity=1)
                initial_state = next(sync_state)
            except StopIteration:  # will occur if no syncing needs to happen
                emitter.echo("Local blockchain data is already synced.", verbosity=1)
                return

            prior_state = initial_state
            total_blocks_to_sync = int(initial_state.get('highestBlock', 0)) - int(
                initial_state.get('currentBlock', 0))
            with click.progressbar(
                    length=total_blocks_to_sync,
                    label="sync progress",
                    file=emitter.get_stream(verbosity=1)
            ) as bar:
                for syncdata in sync_state:
                    if syncdata:
                        blocks_accomplished = int(syncdata['currentBlock']) - int(
                            prior_state.get('currentBlock', 0))
                        bar.update(blocks_accomplished)
                        prior_state = syncdata
        else:
            try:
                for syncdata in sync_state:
                    self.client.log.info(f"Syncing {syncdata['currentBlock']}/{syncdata['highestBlock']}")
            except TypeError:  # it's already synced
                return
        return

    @property
    def provider(self) -> Union[IPCProvider, WebsocketProvider, HTTPProvider]:
        return self._provider

    def _attach_provider(self,
                         provider: Web3Providers = None,
                         provider_uri: str = None) -> None:
        """
        https://web3py.readthedocs.io/en/latest/providers.html#providers
        """

        if not provider_uri and not provider:
            raise self.NoProvider("No URI or provider instances supplied.")

        if provider_uri and not provider:
            uri_breakdown = urlparse(provider_uri)

            if uri_breakdown.scheme == 'tester':
                providers = {
                    'pyevm': _get_pyevm_test_provider,
                    'geth': _get_test_geth_parity_provider,
                    'parity-ethereum': _get_test_geth_parity_provider,
                    'mock': _get_mock_test_provider
                }
                provider_scheme = uri_breakdown.netloc

            else:
                providers = {
                    'auto': _get_auto_provider,
                    'infura': _get_infura_provider,
                    'ipc': _get_IPC_provider,
                    'file': _get_IPC_provider,
                    'ws': _get_websocket_provider,
                    'wss': _get_websocket_provider,
                    'http': _get_HTTP_provider,
                    'https': _get_HTTP_provider,
                }
                provider_scheme = uri_breakdown.scheme

            # auto-detect for file based ipc
            if not provider_scheme:
                if os.path.exists(provider_uri):
                    # file is available - assume ipc/file scheme
                    provider_scheme = 'file'
                    self.log.info(f"Auto-detected provider scheme as 'file://' for provider {provider_uri}")

            try:
                self._provider = providers[provider_scheme](provider_uri)
            except KeyError:
                raise self.UnsupportedProvider(f"{provider_uri} is an invalid or unsupported blockchain provider URI")
            else:
                self.provider_uri = provider_uri or NO_BLOCKCHAIN_CONNECTION
        else:
            self._provider = provider

    def __transaction_failed(self,
                             exception: Exception,
                             transaction_dict: dict,
                             contract_function: Union[ContractFunction, ContractConstructor]
                             ) -> None:
        """
        Re-raising error handler and context manager for transaction broadcast or
        build failure events at the interface layer. This method is a last line of defense
        against unhandled exceptions caused by transaction failures and must raise an exception.
        # TODO: #1504 - Additional Handling of validation failures (gas limits, invalid fields, etc.)
        """

        try:
            # Assume this error is formatted as an IPC response
            code, message = exception.args[0].values()

        except (ValueError, IndexError, AttributeError):
            # TODO: #1504 - Try even harder to determine if this is insufficient funds causing the issue,
            #               This may be best handled at the agent or actor layer for registry and token interactions.
            # Worst case scenario - raise the exception held in context implicitly
            raise exception

        else:
            if int(code) != self.TransactionFailed.IPC_CODE:
                # Only handle client-specific exceptions
                # https://www.jsonrpc.org/specification Section 5.1
                raise exception
            self.log.critical(message)                     # simple context
            transaction_failed = self.TransactionFailed(message=message,  # rich error (best case)
                                                        contract_function=contract_function,
                                                        transaction_dict=transaction_dict)
            raise transaction_failed from exception

    def __log_transaction(self, transaction_dict: dict, contract_function: ContractFunction):
        """
        Format and log a transaction dict and return the transaction name string.
        This method *must not* mutate the original transaction dict.
        """
        # Do not mutate the original transaction dict
        tx = dict(transaction_dict).copy()

        # Format
        if tx.get('to'):
            tx['to'] = to_checksum_address(contract_function.address)
        try:
            tx['selector'] = contract_function.selector
        except AttributeError:
            pass
        tx['from'] = to_checksum_address(tx['from'])
        tx.update({f: prettify_eth_amount(v) for f, v in tx.items() if f in ('gasPrice', 'value')})
        payload_pprint = ', '.join("{}: {}".format(k, v) for k, v in tx.items())

        # Log
        transaction_name = get_transaction_name(contract_function=contract_function)
        self.log.debug(f"[TX-{transaction_name}] | {payload_pprint}")

    @validate_checksum_address
    def build_payload(self,
                      sender_address: str,
                      payload: dict = None,
                      transaction_gas_limit: int = None,
                      ) -> dict:

        base_payload = {'chainId': int(self.client.chain_id),
                        'nonce': self.client.w3.eth.getTransactionCount(sender_address, 'pending'),
                        'from': sender_address}

        # Aggregate
        if not payload:
            payload = {}
        payload.update(base_payload)
        # Explicit gas override - will skip gas estimation in next operation.
        if transaction_gas_limit:
            payload['gas'] = int(transaction_gas_limit)
        return payload

    @validate_checksum_address
    def build_contract_transaction(self,
                                   contract_function: ContractFunction,
                                   sender_address: str,
                                   payload: dict = None,
                                   transaction_gas_limit: int = None,
                                   ) -> dict:
        payload = self.build_payload(sender_address=sender_address,
                                     payload=payload,
                                     transaction_gas_limit=transaction_gas_limit)
        self.__log_transaction(transaction_dict=payload, contract_function=contract_function)
        try:
            transaction_dict = contract_function.buildTransaction(payload)  # Gas estimation occurs here
        except (TestTransactionFailed, ValidationError, ValueError) as error:
            # Note: Geth raises ValueError in the same condition that pyevm raises ValidationError here.
            # Treat this condition as "Transaction Failed" during gas estimation.
            raise self.__transaction_failed(exception=error, transaction_dict=payload, contract_function=contract_function)
        return transaction_dict

    def sign_and_broadcast_transaction(self,
                                       transaction_dict,
                                       transaction_name: str = "",
                                       confirmations: int = 0
                                       ) -> dict:

        #
        # Setup
        #

        # TODO # 1754 - Move this to singleton - I do not approve... nor does Bogdan?
        if GlobalLoggerSettings._json_ipc:
            emitter = JSONRPCStdoutEmitter()
        else:
            emitter = StdoutEmitter()

        if self.transacting_power is READ_ONLY_INTERFACE:
            raise self.InterfaceError(str(READ_ONLY_INTERFACE))

        #
        # Sign
        #

        # TODO: Show the USD Price:  https://api.coinmarketcap.com/v1/ticker/ethereum/
        price = transaction_dict['gasPrice']
        price_gwei = Web3.fromWei(price, 'gwei')
        cost_wei = price * transaction_dict['gas']
        cost = Web3.fromWei(cost_wei, 'ether')

        if self.transacting_power.is_device:
            emitter.message(f'Confirm transaction {transaction_name} on hardware wallet... '
                            f'({cost} ETH @ {price_gwei} gwei)',
                            color='yellow')
        signed_raw_transaction = self.transacting_power.sign_transaction(transaction_dict)

        #
        # Broadcast
        #

        emitter.message(f'Broadcasting {transaction_name} Transaction ({cost} ETH @ {price_gwei} gwei)...',
                        color='yellow')
        try:
            txhash = self.client.send_raw_transaction(signed_raw_transaction)  # <--- BROADCAST
        except (TestTransactionFailed, ValueError) as error:
            raise  # TODO: Unify with Transaction failed handling

        #
        # Receipt
        #

        try:  # TODO: Handle block confirmation exceptions
            receipt = self.client.wait_for_receipt(txhash, timeout=self.TIMEOUT, confirmations=confirmations)
        except TimeExhausted:
            # TODO: #1504 - Handle transaction timeout
            raise
        else:
            self.log.debug(f"[RECEIPT-{transaction_name}] | txhash: {receipt['transactionHash'].hex()}")

        #
        # Confirmations
        #

        # Primary check
        transaction_status = receipt.get('status', UNKNOWN_TX_STATUS)
        if transaction_status == 0:
            failure = f"Transaction transmitted, but receipt returned status code 0. " \
                      f"Full receipt: \n {pprint.pformat(receipt, indent=2)}"
            raise self.InterfaceError(failure)

        if transaction_status is UNKNOWN_TX_STATUS:
            self.log.info(f"Unknown transaction status for {txhash} (receipt did not contain a status field)")

            # Secondary check
            tx = self.client.get_transaction(txhash)
            if tx["gas"] == receipt["gasUsed"]:
                raise self.InterfaceError(f"Transaction consumed 100% of transaction gas."
                                          f"Full receipt: \n {pprint.pformat(receipt, indent=2)}")

        return receipt

    def get_blocktime(self):
        return self.client.get_blocktime()

    @validate_checksum_address
    def send_transaction(self,
                         contract_function: Union[ContractFunction, ContractConstructor],
                         sender_address: str,
                         payload: dict = None,
                         transaction_gas_limit: int = None,
                         confirmations: int = 0
                         ) -> dict:

        transaction = self.build_contract_transaction(contract_function=contract_function,
                                                      sender_address=sender_address,
                                                      payload=payload,
                                                      transaction_gas_limit=transaction_gas_limit)

        # Get transaction name
        try:
            transaction_name = contract_function.fn_name.upper()
        except AttributeError:
            transaction_name = 'DEPLOY' if isinstance(contract_function, ContractConstructor) else 'UNKNOWN'

        receipt = self.sign_and_broadcast_transaction(transaction_dict=transaction,
                                                      transaction_name=transaction_name,
                                                      confirmations=confirmations)
        return receipt

    def get_contract_by_name(self,
                             registry: BaseContractRegistry,
                             contract_name: str,
                             contract_version: str = None,
                             enrollment_version: Union[int, str] = None,
                             proxy_name: str = None,
                             use_proxy_address: bool = True
                             ) -> VersionedContract:
        """
        Instantiate a deployed contract from registry data,
        and assimilate it with its proxy if it is upgradeable.
        """
        target_contract_records = registry.search(contract_name=contract_name, contract_version=contract_version)

        if not target_contract_records:
            raise self.UnknownContract(f"No such contract records with name {contract_name}:{contract_version}.")

        if proxy_name:

            # Lookup proxies; Search for a published proxy that targets this contract record
            proxy_records = registry.search(contract_name=proxy_name)

            results = list()
            for proxy_name, proxy_version, proxy_address, proxy_abi in proxy_records:
                proxy_contract = self.client.w3.eth.contract(abi=proxy_abi,
                                                             address=proxy_address,
                                                             version=proxy_version,
                                                             ContractFactoryClass=self._contract_factory)

                # Read this dispatcher's target address from the blockchain
                proxy_live_target_address = proxy_contract.functions.target().call()
                for target_name, target_version, target_address, target_abi in target_contract_records:

                    if target_address == proxy_live_target_address:
                        if use_proxy_address:
                            triplet = (proxy_address, target_version, target_abi)
                        else:
                            triplet = (target_address, target_version, target_abi)
                    else:
                        continue

                    results.append(triplet)

            if len(results) > 1:
                address, _version, _abi = results[0]
                message = "Multiple {} deployments are targeting {}".format(proxy_name, address)
                raise self.InterfaceError(message.format(contract_name))

            else:
                try:
                    selected_address, selected_version, selected_abi = results[0]
                except IndexError:
                    raise self.UnknownContract(
                        f"There are no Dispatcher records targeting '{contract_name}':{contract_version}")

        else:
            # TODO: use_proxy_address doesnt' work in this case. Should we raise if used?

            # NOTE: 0 must be allowed as a valid version number
            if len(target_contract_records) != 1:
                if enrollment_version is None:
                    m = f"{len(target_contract_records)} records enrolled " \
                        f"for contract {contract_name}:{contract_version} " \
                        f"and no version index was supplied."
                    raise self.InterfaceError(m)
                enrollment_version = self.__get_enrollment_version_index(name=contract_name,
                                                                         contract_version=contract_version,
                                                                         version_index=enrollment_version,
                                                                         enrollments=len(target_contract_records))

            else:
                enrollment_version = -1  # default

            _contract_name, selected_version, selected_address, selected_abi = target_contract_records[enrollment_version]

        # Create the contract from selected sources
        unified_contract = self.client.w3.eth.contract(abi=selected_abi,
                                                       address=selected_address,
                                                       version=selected_version,
                                                       ContractFactoryClass=self._contract_factory)

        return unified_contract

    @staticmethod
    def __get_enrollment_version_index(version_index: Union[int, str],
                                       enrollments: int,
                                       name: str,
                                       contract_version: str):
        version_names = {'latest': -1, 'earliest': 0}
        try:
            version = version_names[version_index]
        except KeyError:
            try:
                version = int(version_index)
            except ValueError:
                what_is_this = version_index
                raise ValueError(f"'{what_is_this}' is not a valid enrollment version number")
            else:
                if version > enrollments - 1:
                    message = f"Version index '{version}' is larger than the number of enrollments " \
                              f"for {name}:{contract_version}."
                    raise ValueError(message)
        return version
def test_collect_rewards_integration(
        click_runner, testerchain, agency_local_registry,
        stakeholder_configuration_file_location, blockchain_alice,
        blockchain_bob, random_policy_label, beneficiary,
        preallocation_escrow_agent, mock_allocation_registry, manual_worker,
        token_economics, mock_transacting_power_activation, stake_value,
        policy_value, policy_rate):
    # Disable re-staking
    restake_args = ('stake', 'restake', '--disable', '--config-file',
                    stakeholder_configuration_file_location,
                    '--allocation-filepath',
                    MOCK_INDIVIDUAL_ALLOCATION_FILEPATH, '--force')

    result = click_runner.invoke(nucypher_cli,
                                 restake_args,
                                 input=INSECURE_DEVELOPMENT_PASSWORD,
                                 catch_exceptions=False)
    assert result.exit_code == 0

    half_stake_time = token_economics.minimum_locked_periods // 2  # Test setup
    logger = Logger("Test-CLI")  # Enter the Teacher's Logger, and
    current_period = 0  # State the initial period for incrementing

    staker_address = preallocation_escrow_agent.principal_contract.address
    worker_address = manual_worker

    # The staker is staking.
    stakes = StakeList(registry=agency_local_registry,
                       checksum_address=staker_address)
    stakes.refresh()
    assert stakes

    staking_agent = ContractAgency.get_agent(StakingEscrowAgent,
                                             registry=agency_local_registry)
    assert worker_address == staking_agent.get_worker_from_staker(
        staker_address=staker_address)

    ursula_port = select_test_port()
    ursula = Ursula(is_me=True,
                    checksum_address=staker_address,
                    worker_address=worker_address,
                    registry=agency_local_registry,
                    rest_host='127.0.0.1',
                    rest_port=ursula_port,
                    start_working_now=False,
                    network_middleware=MockRestMiddleware(),
                    db_filepath=tempfile.mkdtemp())

    MOCK_KNOWN_URSULAS_CACHE[ursula_port] = ursula
    assert ursula.worker_address == worker_address
    assert ursula.checksum_address == staker_address

    mock_transacting_power_activation(account=worker_address,
                                      password=INSECURE_DEVELOPMENT_PASSWORD)

    # Make a commitment for half the first stake duration
    for _ in range(half_stake_time):
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        ursula.commit_to_next_period()
        testerchain.time_travel(periods=1)
        current_period += 1

    # Alice creates a policy and grants Bob access
    blockchain_alice.selection_buffer = 1

    M, N = 1, 1
    days = 3
    now = testerchain.w3.eth.getBlock(block_identifier='latest').timestamp
    expiration = maya.MayaDT(now).add(days=days - 1)
    blockchain_policy = blockchain_alice.grant(bob=blockchain_bob,
                                               label=random_policy_label,
                                               m=M,
                                               n=N,
                                               value=policy_value,
                                               expiration=expiration,
                                               handpicked_ursulas={ursula})

    # Ensure that the handpicked Ursula was selected for the policy
    arrangement = list(blockchain_policy._accepted_arrangements)[0]
    assert arrangement.ursula == ursula

    # Bob learns about the new staker and joins the policy
    blockchain_bob.start_learning_loop()
    blockchain_bob.remember_node(node=ursula)
    blockchain_bob.join_policy(random_policy_label,
                               bytes(blockchain_alice.stamp))

    # Enrico Encrypts (of course)
    enrico = Enrico(policy_encrypting_key=blockchain_policy.public_key,
                    network_middleware=MockRestMiddleware())

    verifying_key = blockchain_alice.stamp.as_umbral_pubkey()

    for index in range(half_stake_time - 5):
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        ursula.commit_to_next_period()

        # Encrypt
        random_data = os.urandom(random.randrange(20, 100))
        message_kit, signature = enrico.encrypt_message(plaintext=random_data)

        # Decrypt
        cleartexts = blockchain_bob.retrieve(message_kit,
                                             enrico=enrico,
                                             alice_verifying_key=verifying_key,
                                             label=random_policy_label)
        assert random_data == cleartexts[0]

        # Ursula Staying online and the clock advancing
        testerchain.time_travel(periods=1)
        current_period += 1

    # Finish the passage of time
    for _ in range(
            5 - 1
    ):  # minus 1 because the first period was already committed to in test_ursula_run
        logger.debug(
            f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
        ursula.commit_to_next_period()
        current_period += 1
        testerchain.time_travel(periods=1)

    #
    # WHERES THE MONEY URSULA?? - Collecting Rewards
    #

    balance = testerchain.client.get_balance(beneficiary)

    # Rewards will be unlocked after the
    # final committed period has passed (+1).
    logger.debug(f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")
    testerchain.time_travel(periods=1)
    current_period += 1
    logger.debug(f">>>>>>>>>>> TEST PERIOD {current_period} <<<<<<<<<<<<<<<<")

    # Since we are mocking the blockchain connection, manually consume the transacting power of the Beneficiary.
    mock_transacting_power_activation(account=beneficiary,
                                      password=INSECURE_DEVELOPMENT_PASSWORD)

    # Collect Policy Fee
    collection_args = ('stake', 'collect-reward', '--config-file',
                       stakeholder_configuration_file_location, '--policy-fee',
                       '--no-staking-reward', '--withdraw-address',
                       beneficiary, '--allocation-filepath',
                       MOCK_INDIVIDUAL_ALLOCATION_FILEPATH, '--force')

    result = click_runner.invoke(nucypher_cli,
                                 collection_args,
                                 input=INSECURE_DEVELOPMENT_PASSWORD,
                                 catch_exceptions=False)
    assert result.exit_code == 0

    # Policy Fee
    collected_policy_fee = testerchain.client.get_balance(beneficiary)
    assert collected_policy_fee > balance

    #
    # Collect Staking Reward
    #
    token_agent = ContractAgency.get_agent(agent_class=NucypherTokenAgent,
                                           registry=agency_local_registry)
    balance_before_collecting = token_agent.get_balance(address=staker_address)

    collection_args = ('stake', 'collect-reward', '--config-file',
                       stakeholder_configuration_file_location,
                       '--no-policy-fee', '--staking-reward',
                       '--allocation-filepath',
                       MOCK_INDIVIDUAL_ALLOCATION_FILEPATH, '--force')

    result = click_runner.invoke(nucypher_cli,
                                 collection_args,
                                 input=INSECURE_DEVELOPMENT_PASSWORD,
                                 catch_exceptions=False)
    assert result.exit_code == 0

    # The beneficiary has withdrawn her staking rewards, which are now in the staking contract
    assert token_agent.get_balance(
        address=staker_address) >= balance_before_collecting
Example #14
0
class NodeStorage(ABC):
    _name = NotImplemented
    _TYPE_LABEL = 'storage_type'

    TLS_CERTIFICATE_ENCODING = Encoding.PEM
    TLS_CERTIFICATE_EXTENSION = '.{}'.format(TLS_CERTIFICATE_ENCODING.name.lower())

    class NodeStorageError(Exception):
        pass

    class UnknownNode(NodeStorageError):
        pass

    def __init__(self,
                 federated_only: bool = False,  # TODO# 466
                 character_class=None,
                 registry: BaseContractRegistry = None,
                 ) -> None:

        from nucypher.characters.lawful import Ursula

        self.log = Logger(self.__class__.__name__)
        self.registry = registry
        self.federated_only = federated_only
        self.character_class = character_class or Ursula

    def __getitem__(self, item):
        return self.get(checksum_address=item, federated_only=self.federated_only)

    def __setitem__(self, key, value):
        return self.store_node_metadata(node=value)

    def __iter__(self):
        return self.all(federated_only=self.federated_only)

    @property
    @abstractmethod
    def source(self) -> str:
        """Human readable source string"""
        return NotImplemented

    def encode_node_bytes(self, node_bytes):
        return binascii.hexlify(node_bytes)

    def decode_node_bytes(self, encoded_node) -> bytes:
        return binascii.unhexlify(encoded_node)

    def _read_common_name(self, certificate: Certificate):
        x509 = OpenSSL.crypto.X509.from_cryptography(certificate)
        subject_components = x509.get_subject().get_components()
        common_name_as_bytes = subject_components[0][1]
        common_name_from_cert = common_name_as_bytes.decode()
        return common_name_from_cert

    def _write_tls_certificate(self,
                               port: int,  # used to avoid duplicate certs with the same IP
                               certificate: Certificate,
                               force: bool = True) -> Path:

        # Read
        x509 = OpenSSL.crypto.X509.from_cryptography(certificate)
        subject_components = x509.get_subject().get_components()
        common_name_as_bytes = subject_components[0][1]
        common_name_on_certificate = common_name_as_bytes.decode()
        host = common_name_on_certificate

        certificate_filepath = self.generate_certificate_filepath(host=host, port=port)
        certificate_already_exists = certificate_filepath.is_file()
        if force is False and certificate_already_exists:
            raise FileExistsError('A TLS certificate already exists at {}.'.format(certificate_filepath))

        # Write
        certificate_filepath.parent.mkdir(parents=True, exist_ok=True)
        with open(certificate_filepath, 'wb') as certificate_file:
            public_pem_bytes = certificate.public_bytes(self.TLS_CERTIFICATE_ENCODING)
            certificate_file.write(public_pem_bytes)

        self.log.debug(f"Saved TLS certificate for {host} to {certificate_filepath}")
        return certificate_filepath

    @abstractmethod
    def store_node_certificate(self, certificate: Certificate, port: int) -> Path:
        raise NotImplementedError

    @abstractmethod
    def store_node_metadata(self, node, filepath: Optional[Path] = None) -> Path:
        """Save a single node's metadata and tls certificate"""
        raise NotImplementedError

    @abstractmethod
    def generate_certificate_filepath(self, host: str, port: int) -> Path:
        raise NotImplementedError

    @abstractmethod
    def payload(self) -> dict:
        raise NotImplementedError

    @classmethod
    @abstractmethod
    def from_payload(self, data: dict, *args, **kwargs) -> 'NodeStorage':
        """Instantiate a storage object from a dictionary"""
        raise NotImplementedError

    @abstractmethod
    def initialize(self):
        """One-time initialization steps to establish a node storage backend"""
        raise NotImplementedError

    @abstractmethod
    def all(self, federated_only: bool, certificates_only: bool = False) -> set:
        """Return s set of all stored nodes"""
        raise NotImplementedError

    @abstractmethod
    def get(self, checksum_address: str, federated_only: bool):
        """Retrieve a single stored node"""
        raise NotImplementedError

    @abstractmethod
    def clear(self) -> bool:
        """Remove all stored nodes"""
        raise NotImplementedError
Example #15
0
class Porter(Learner):

    BANNER = r"""

 ______
(_____ \           _
 _____) )__   ____| |_  ____  ____
|  ____/ _ \ / ___)  _)/ _  )/ ___)
| |   | |_| | |   | |_( (/ /| |
|_|    \___/|_|    \___)____)_|

the Pipe for PRE Application network operations
"""

    APP_NAME = "Porter"

    _SHORT_LEARNING_DELAY = 2
    _LONG_LEARNING_DELAY = 30
    _ROUNDS_WITHOUT_NODES_AFTER_WHICH_TO_SLOW_DOWN = 25

    DEFAULT_EXECUTION_TIMEOUT = 15  # 15s

    DEFAULT_PORT = 9155

    _interface_class = PorterInterface

    class UrsulaInfo(NamedTuple):
        """Simple object that stores relevant Ursula information resulting from sampling."""
        checksum_address: ChecksumAddress
        uri: str
        encrypting_key: PublicKey

    def __init__(self,
                 domain: str = None,
                 registry: BaseContractRegistry = None,
                 controller: bool = True,
                 federated_only: bool = False,
                 node_class: object = Ursula,
                 eth_provider_uri: str = None,
                 execution_timeout: int = DEFAULT_EXECUTION_TIMEOUT,
                 *args,
                 **kwargs):
        self.federated_only = federated_only

        if not self.federated_only:
            if not eth_provider_uri:
                raise ValueError(
                    'ETH Provider URI is required for decentralized Porter.')

            if not BlockchainInterfaceFactory.is_interface_initialized(
                    eth_provider_uri=eth_provider_uri):
                BlockchainInterfaceFactory.initialize_interface(
                    eth_provider_uri=eth_provider_uri)

            self.registry = registry or InMemoryContractRegistry.from_latest_publication(
                network=domain)
            self.application_agent = ContractAgency.get_agent(
                PREApplicationAgent, registry=self.registry)
        else:
            self.registry = NO_BLOCKCHAIN_CONNECTION.bool_value(False)
            node_class.set_federated_mode(federated_only)

        super().__init__(save_metadata=True,
                         domain=domain,
                         node_class=node_class,
                         *args,
                         **kwargs)

        self.log = Logger(self.__class__.__name__)
        self.execution_timeout = execution_timeout

        # Controller Interface
        self.interface = self._interface_class(porter=self)
        self.controller = NO_CONTROL_PROTOCOL
        if controller:
            # TODO need to understand this better - only made it analogous to what was done for characters
            self.make_cli_controller()
        self.log.info(self.BANNER)

    def get_ursulas(
        self,
        quantity: int,
        exclude_ursulas: Optional[Sequence[ChecksumAddress]] = None,
        include_ursulas: Optional[Sequence[ChecksumAddress]] = None
    ) -> List[UrsulaInfo]:
        reservoir = self._make_reservoir(quantity, exclude_ursulas,
                                         include_ursulas)
        value_factory = PrefetchStrategy(reservoir, quantity)

        def get_ursula_info(ursula_address) -> Porter.UrsulaInfo:
            if to_checksum_address(ursula_address) not in self.known_nodes:
                raise ValueError(f"{ursula_address} is not known")

            ursula_address = to_checksum_address(ursula_address)
            ursula = self.known_nodes[ursula_address]
            try:
                # ensure node is up and reachable
                self.network_middleware.ping(ursula)
                return Porter.UrsulaInfo(
                    checksum_address=ursula_address,
                    uri=f"{ursula.rest_interface.formal_uri}",
                    encrypting_key=ursula.public_keys(DecryptingPower))
            except Exception as e:
                self.log.debug(
                    f"Ursula ({ursula_address}) is unreachable: {str(e)}")
                raise

        self.block_until_number_of_known_nodes_is(
            quantity,
            timeout=self.execution_timeout,
            learn_on_this_thread=True,
            eager=True)

        worker_pool = WorkerPool(worker=get_ursula_info,
                                 value_factory=value_factory,
                                 target_successes=quantity,
                                 timeout=self.execution_timeout,
                                 stagger_timeout=1)
        worker_pool.start()
        try:
            successes = worker_pool.block_until_target_successes()
        finally:
            worker_pool.cancel()
            # don't wait for it to stop by "joining" - too slow...

        ursulas_info = successes.values()
        return list(ursulas_info)

    def retrieve_cfrags(
        self,
        treasure_map: TreasureMap,
        retrieval_kits: Sequence[RetrievalKit],
        alice_verifying_key: PublicKey,
        bob_encrypting_key: PublicKey,
        bob_verifying_key: PublicKey,
    ) -> List[RetrievalResult]:
        client = RetrievalClient(self)
        return client.retrieve_cfrags(treasure_map, retrieval_kits,
                                      alice_verifying_key, bob_encrypting_key,
                                      bob_verifying_key)

    def _make_reservoir(
            self,
            quantity: int,
            exclude_ursulas: Optional[Sequence[ChecksumAddress]] = None,
            include_ursulas: Optional[Sequence[ChecksumAddress]] = None):
        if self.federated_only:
            sample_size = quantity - (len(include_ursulas)
                                      if include_ursulas else 0)
            if not self.block_until_number_of_known_nodes_is(
                    sample_size,
                    timeout=self.execution_timeout,
                    learn_on_this_thread=True):
                raise ValueError("Unable to learn about sufficient Ursulas")
            return make_federated_staker_reservoir(
                known_nodes=self.known_nodes,
                exclude_addresses=exclude_ursulas,
                include_addresses=include_ursulas)
        else:
            return make_decentralized_staking_provider_reservoir(
                application_agent=self.application_agent,
                exclude_addresses=exclude_ursulas,
                include_addresses=include_ursulas)

    def make_cli_controller(self, crash_on_error: bool = False):
        controller = PorterCLIController(app_name=self.APP_NAME,
                                         crash_on_error=crash_on_error,
                                         interface=self.interface)
        self.controller = controller
        return controller

    def make_rpc_controller(self, crash_on_error: bool = False):
        controller = JSONRPCController(app_name=self.APP_NAME,
                                       crash_on_error=crash_on_error,
                                       interface=self.interface)

        self.controller = controller
        return controller

    def make_web_controller(self,
                            crash_on_error: bool = False,
                            htpasswd_filepath: Path = None,
                            cors_allow_origins_list: List[str] = None):
        controller = WebController(
            app_name=self.APP_NAME,
            crash_on_error=crash_on_error,
            interface=self._interface_class(porter=self))
        self.controller = controller

        # Register Flask Decorator
        porter_flask_control = controller.make_control_transport()

        # CORS origins
        if cors_allow_origins_list:
            try:
                from flask_cors import CORS
            except ImportError:
                raise ImportError(
                    'Porter installation is required for to specify CORS origins '
                    '- run "pip install nucypher[porter]" and try again.')
            _ = CORS(app=porter_flask_control, origins=cors_allow_origins_list)

        # Basic Auth
        if htpasswd_filepath:
            try:
                from flask_htpasswd import HtPasswdAuth
            except ImportError:
                raise ImportError(
                    'Porter installation is required for basic authentication '
                    '- run "pip install nucypher[porter]" and try again.')

            porter_flask_control.config['FLASK_HTPASSWD_PATH'] = str(
                htpasswd_filepath.absolute())
            # ensure basic auth required for all endpoints
            porter_flask_control.config['FLASK_AUTH_ALL'] = True
            _ = HtPasswdAuth(app=porter_flask_control)

        #
        # Porter Control HTTP Endpoints
        #
        @porter_flask_control.route('/get_ursulas', methods=['GET'])
        def get_ursulas() -> Response:
            """Porter control endpoint for sampling Ursulas on behalf of Alice."""
            response = controller(method_name='get_ursulas',
                                  control_request=request)
            return response

        @porter_flask_control.route("/revoke", methods=['POST'])
        def revoke():
            """Porter control endpoint for off-chain revocation of a policy on behalf of Alice."""
            response = controller(method_name='revoke',
                                  control_request=request)
            return response

        @porter_flask_control.route("/retrieve_cfrags", methods=['POST'])
        def retrieve_cfrags() -> Response:
            """Porter control endpoint for executing a PRE work order on behalf of Bob."""
            response = controller(method_name='retrieve_cfrags',
                                  control_request=request)
            return response

        return controller
Example #16
0
class AnalyzeGas:
    """
    Callable twisted log observer with built-in record-keeping for gas estimation runs.
    """

    # Logging
    LOG_NAME = 'estimate-gas'
    LOG_FILENAME = '{}.log.json'.format(LOG_NAME)
    OUTPUT_DIR = os.path.join(abspath(dirname(__file__)), 'results')
    JSON_OUTPUT_FILENAME = '{}.json'.format(LOG_NAME)

    _PATTERN = re.compile(r'''
                          ^          # Anchor at the start of a string
                          (.+)       # Any character sequence longer than 1; Captured
                          \s=\s      # Space-Equal-Space
                          (\d+)      # A sequence of digits; Captured
                          \s\|\s     # Space-Slash-Space
                          (\d+)      # A sequence of digits; Captured
                          $          # Anchor at the end of the string
                          ''', re.VERBOSE)

    def __init__(self) -> None:

        self.log = Logger(self.__class__.__name__)
        self.gas_estimations = dict()

        if not os.path.isdir(self.OUTPUT_DIR):
            os.mkdir(self.OUTPUT_DIR)

    @provider(ILogObserver)
    def __call__(self, event, *args, **kwargs) -> None:

        if event.get('log_namespace') == self.LOG_NAME:
            message = event.get("log_format")

            matches = self._PATTERN.match(message)
            if not matches:
                self.log.debug("No match for {} with pattern {}".format(message, self._PATTERN))
                return

            label, estimates, gas_used = matches.groups()
            self.paint_line(label, estimates, gas_used)
            self.gas_estimations[label] = int(gas_used)

    @staticmethod
    def paint_line(label: str, estimates: str, gas_used: str) -> None:
        print('{label} {estimates:7,} | {gas:7,}'.format(
            label=label.ljust(72, '.'), estimates=int(estimates), gas=int(gas_used)))

    def to_json_file(self) -> None:
        print('Saving JSON Output...')

        epoch_time = str(int(time.time()))
        timestamped_filename = '{}-{}'.format(epoch_time, self.JSON_OUTPUT_FILENAME)
        filepath = os.path.join(self.OUTPUT_DIR, timestamped_filename)
        with open(filepath, 'w') as file:
            file.write(json.dumps(self.gas_estimations, indent=4))

    def start_collection(self) -> None:
        print("Starting Data Collection...")

        json_filepath = os.path.join(self.OUTPUT_DIR, AnalyzeGas.LOG_FILENAME)
        json_io = io.open(json_filepath, "w")
        json_observer = jsonFileLogObserver(json_io)
        globalLogPublisher.addObserver(json_observer)
        globalLogPublisher.addObserver(self)
Example #17
0
class SolidityCompiler:

    __default_contract_version = 'v0.0.0'
    __default_contract_dir = os.path.join(dirname(abspath(__file__)), 'source')

    __compiled_contracts_dir = 'contracts'
    __zeppelin_library_dir = 'zeppelin'
    __aragon_library_dir = 'aragon'

    optimization_runs = 200

    class CompilerError(Exception):
        pass

    class VersionError(Exception):
        pass

    @classmethod
    def default_contract_dir(cls):
        return cls.__default_contract_dir

    def __init__(self,
                 source_dirs: List[SourceDirs] = None,
                 ignore_solidity_check: bool = False) -> None:

        # Allow for optional installation
        from solcx.install import get_executable

        self.log = Logger('solidity-compiler')

        version = SOLIDITY_COMPILER_VERSION if not ignore_solidity_check else None
        self.__sol_binary_path = get_executable(version=version)

        if source_dirs is None or len(source_dirs) == 0:
            self.source_dirs = [
                SourceDirs(root_source_dir=self.__default_contract_dir)
            ]
        else:
            self.source_dirs = source_dirs

    def compile(self) -> dict:
        interfaces = dict()
        for root_source_dir, other_source_dirs in self.source_dirs:
            if root_source_dir is None:
                self.log.warn("One of the root directories is None")
                continue

            raw_interfaces = self._compile(root_source_dir, other_source_dirs)
            for name, data in raw_interfaces.items():
                # Extract contract version from docs
                version_search = re.search(
                    r"""
                
                \"details\":  # @dev tag in contract docs
                \".*?         # Skip any data in the beginning of details
                \|            # Beginning of version definition |
                (v            # Capture version starting from symbol v
                \d+           # At least one digit of major version
                \.            # Digits splitter
                \d+           # At least one digit of minor version
                \.            # Digits splitter
                \d+           # At least one digit of patch
                )             # End of capturing
                \|            # End of version definition |
                .*?\"         # Skip any data in the end of details
                
                """, data['devdoc'], re.VERBOSE)
                version = version_search.group(
                    1) if version_search else self.__default_contract_version
                try:
                    existence_data = interfaces[name]
                except KeyError:
                    existence_data = dict()
                    interfaces.update({name: existence_data})
                if version not in existence_data:
                    existence_data.update({version: data})
        return interfaces

    def _compile(self, root_source_dir: str, other_source_dirs: [str]) -> dict:
        """Executes the compiler with parameters specified in the json config"""

        # Allow for optional installation
        from solcx import compile_files
        from solcx.exceptions import SolcError

        self.log.info("Using solidity compiler binary at {}".format(
            self.__sol_binary_path))
        contracts_dir = os.path.join(root_source_dir,
                                     self.__compiled_contracts_dir)
        self.log.info(
            "Compiling solidity source files at {}".format(contracts_dir))

        source_paths = set()
        source_walker = os.walk(top=contracts_dir, topdown=True)
        if other_source_dirs is not None:
            for source_dir in other_source_dirs:
                other_source_walker = os.walk(top=source_dir, topdown=True)
                source_walker = itertools.chain(source_walker,
                                                other_source_walker)

        for root, dirs, files in source_walker:
            for filename in files:
                if filename.endswith('.sol'):
                    path = os.path.join(root, filename)
                    source_paths.add(path)
                    self.log.debug(
                        "Collecting solidity source {}".format(path))

        # Compile with remappings: https://github.com/ethereum/py-solc
        zeppelin_dir = os.path.join(root_source_dir,
                                    self.__zeppelin_library_dir)
        aragon_dir = os.path.join(root_source_dir, self.__aragon_library_dir)

        remappings = (
            "contracts={}".format(contracts_dir),
            "zeppelin={}".format(zeppelin_dir),
            "aragon={}".format(aragon_dir),
        )

        self.log.info("Compiling with import remappings {}".format(
            ", ".join(remappings)))

        optimization_runs = self.optimization_runs

        try:
            compiled_sol = compile_files(source_files=source_paths,
                                         solc_binary=self.__sol_binary_path,
                                         import_remappings=remappings,
                                         allow_paths=root_source_dir,
                                         optimize=True,
                                         optimize_runs=optimization_runs)

            self.log.info(
                "Successfully compiled {} contracts with {} optimization runs".
                format(len(compiled_sol), optimization_runs))

        except FileNotFoundError:
            raise RuntimeError(
                "The solidity compiler is not at the specified path. "
                "Check that the file exists and is executable.")
        except PermissionError:
            raise RuntimeError(
                "The solidity compiler binary at {} is not executable. "
                "Check the file's permissions.".format(self.__sol_binary_path))

        except SolcError:
            raise

        # Cleanup the compiled data keys
        interfaces = {
            name.split(':')[-1]: compiled_sol[name]
            for name in compiled_sol
        }
        return interfaces