Example #1
0
    def setup(self, socket_type, complete_or_error_queue):
        """Setup the asyncio event loop.

        Args:
            socket_type (int from zmq.*): One of zmq.DEALER or zmq.ROUTER
            complete_or_error_queue (queue.Queue): A way to propagate errors
                back to the calling thread. Needed since this function is
                directly used in Thread.

        Returns:
            None
        """
        try:
            if self._secured:
                if self._server_public_key is None or \
                        self._server_private_key is None:
                    raise LocalConfigurationError(
                        "Attempting to start socket in secure mode, "
                        "but complete server keys were not provided")

            self._event_loop = zmq.asyncio.ZMQEventLoop()
            asyncio.set_event_loop(self._event_loop)
            self._context = zmq.asyncio.Context()
            self._socket = self._context.socket(socket_type)

            if socket_type == zmq.DEALER:
                self._socket.identity = "{}-{}".format(
                    self._zmq_identity,
                    hashlib.sha512(uuid.uuid4().hex.encode()).hexdigest()
                    [:23]).encode('ascii')

                if self._secured:
                    # Generate ephemeral certificates for this connection

                    pubkey, secretkey = zmq.curve_keypair()
                    self._socket.curve_publickey = pubkey
                    self._socket.curve_secretkey = secretkey
                    self._socket.curve_serverkey = self._server_public_key

                self._socket.connect(self._address)
            elif socket_type == zmq.ROUTER:
                if self._secured:
                    auth = AsyncioAuthenticator(self._context)
                    self._auth = auth
                    auth.start()
                    auth.configure_curve(domain='*',
                                         location=zmq.auth.CURVE_ALLOW_ANY)

                    self._socket.curve_secretkey = self._server_private_key
                    self._socket.curve_publickey = self._server_public_key
                    self._socket.curve_server = True

                try:
                    self._socket.bind(self._address)
                except zmq.error.ZMQError as e:
                    raise LocalConfigurationError(
                        "Can't bind to {}: {}".format(self._address, str(e)))
                else:
                    LOGGER.info("Listening on %s", self._address)

            self._dispatcher.add_send_message(self._connection,
                                              self.send_message)
            self._dispatcher.add_send_last_message(self._connection,
                                                   self.send_last_message)

            asyncio.ensure_future(self._receive_message(),
                                  loop=self._event_loop)
            if self._monitor:
                self._monitor_fd = "inproc://monitor.s-{}".format(
                    _generate_id()[0:5])
                self._monitor_sock = self._socket.get_monitor_socket(
                    zmq.EVENT_DISCONNECTED, addr=self._monitor_fd)
                asyncio.ensure_future(self._monitor_disconnects(),
                                      loop=self._event_loop)

        except Exception as e:
            # Put the exception on the queue where in start we are waiting
            # for it.
            complete_or_error_queue.put_nowait(e)
            raise

        if self._heartbeat:
            asyncio.ensure_future(self._do_heartbeat(), loop=self._event_loop)

        # Put a 'complete with the setup tasks' sentinel on the queue.
        complete_or_error_queue.put_nowait(_STARTUP_COMPLETE_SENTINEL)

        asyncio.ensure_future(self._notify_started(), loop=self._event_loop)

        self._event_loop.run_forever()
        # event_loop.stop called elsewhere will cause the loop to break out
        # of run_forever then it can be closed and the context destroyed.
        self._event_loop.close()
        self._socket.close(linger=0)
        if self._monitor:
            self._monitor_sock.close(linger=0)
        self._context.destroy(linger=0)
Example #2
0
def load_toml_validator_config(filename):
    """Returns a ValidatorConfig created by loading a TOML file from the
    filesystem.
    """
    if not os.path.exists(filename):
        LOGGER.info(
            "Skipping validator config loading from non-existent config file:"
            " %s", filename)
        return ValidatorConfig()

    LOGGER.info("Loading validator information from config: %s", filename)

    try:
        with open(filename) as fd:
            raw_config = fd.read()
    except IOError as e:
        raise LocalConfigurationError(
            "Unable to load validator configuration file: {}".format(str(e)))

    toml_config = toml.loads(raw_config)
    invalid_keys = set(toml_config.keys()).difference(
        ['bind', 'endpoint', 'peering', 'seeds', 'peers', 'network_public_key',
         'network_private_key', 'scheduler', 'permissions', 'roles',
         'opentsdb_url', 'opentsdb_db', 'opentsdb_username',
         'opentsdb_password', 'minimum_peer_connectivity',
         'maximum_peer_connectivity', 'state_pruning_block_depth',
         'fork_cache_keep_time'])
    if invalid_keys:
        raise LocalConfigurationError(
            "Invalid keys in validator config: "
            "{}".format(", ".join(sorted(list(invalid_keys)))))
    bind_network = None
    bind_component = None
    bind_consensus = None
    for bind in toml_config.get("bind", []):
        if "network" in bind:
            bind_network = bind[bind.find(":") + 1:]
        if "component" in bind:
            bind_component = bind[bind.find(":") + 1:]
        if "consensus" in bind:
            bind_consensus = bind[bind.find(":") + 1:]

    network_public_key = None
    network_private_key = None

    if toml_config.get("network_public_key") is not None:
        network_public_key = toml_config.get("network_public_key").encode()

    if toml_config.get("network_private_key") is not None:
        network_private_key = toml_config.get("network_private_key").encode()

    config = ValidatorConfig(
        bind_network=bind_network,
        bind_component=bind_component,
        bind_consensus=bind_consensus,
        endpoint=toml_config.get("endpoint", None),
        peering=toml_config.get("peering", None),
        seeds=toml_config.get("seeds", None),
        peers=toml_config.get("peers", None),
        network_public_key=network_public_key,
        network_private_key=network_private_key,
        scheduler=toml_config.get("scheduler", None),
        permissions=parse_permissions(toml_config.get("permissions", None)),
        roles=toml_config.get("roles", None),
        opentsdb_url=toml_config.get("opentsdb_url", None),
        opentsdb_db=toml_config.get("opentsdb_db", None),
        opentsdb_username=toml_config.get("opentsdb_username", None),
        opentsdb_password=toml_config.get("opentsdb_password", None),
        minimum_peer_connectivity=toml_config.get(
            "minimum_peer_connectivity", None),
        maximum_peer_connectivity=toml_config.get(
            "maximum_peer_connectivity", None),
        state_pruning_block_depth=toml_config.get(
            "state_pruning_block_depth", None),
        fork_cache_keep_time=toml_config.get(
            "fork_cache_keep_time", None),
    )

    return config