Ejemplo n.º 1
0
class Subscriber(object):
    def __init__(self, validator_url):
        LOGGER.info('Connecting to validator: %s', validator_url)
        self._stream = Stream(validator_url)
        self._event_handlers = []

    def add_handler(self, handler):
        self._event_handlers.append(handler)

    def listen_to_event(self):
        self._stream.wait_for_ready()
        # Step 1: Construct a Subscription
        block_sub = EventSubscription(event_type='sawtooth/block-commit')

        # Step 2: Submit the Event Subscription
        request = ClientEventsSubscribeRequest(subscriptions=[block_sub])

        response_future = self._stream.send(
            Message.CLIENT_EVENTS_SUBSCRIBE_REQUEST,
            request.SerializeToString())

        response = ClientEventsSubscribeResponse()
        response.ParseFromString(response_future.result().content)

        # Listen for events in an infinite loop
        LOGGER.warning("Listening to events.")
        while True:
            msg = self._stream.receive()
            event_list = EventList()
            event_list.ParseFromString(msg.result().content)
            for handler in self._event_handlers:
                handler(event_list.events)
Ejemplo n.º 2
0
def main():
    stream = None
    try:
        opts = parse_args(sys.argv[1:])
        stream = Stream(opts.stream_url)

        log_config = get_log_config(filename="rest_api_log_config.toml")
        if log_config is not None:
            log_configuration(log_config=log_config)
        else:
            log_dir = get_log_dir()
            log_configuration(log_dir=log_dir, name="sawtooth_rest_api")
        init_console_logging(verbose_level=opts.verbose)

        start_rest_api(
            opts.host,
            int(opts.port),
            stream,
            int(opts.timeout))
        # pylint: disable=broad-except
    except Exception as e:
        print("Error: {}".format(e), file=sys.stderr)
        sys.exit(1)
    finally:
        if stream is not None:
            stream.close()
Ejemplo n.º 3
0
def do_load(args):
    with open(args.filename, mode='rb') as fd:
        batches = batch_pb2.BatchList()
        batches.ParseFromString(fd.read())

    stream = Stream(args.url)
    futures = []
    start = time.time()

    for batch_list in _split_batch_list(batches):
        future = stream.send(
            message_type=Message.CLIENT_BATCH_SUBMIT_REQUEST,
            content=batch_list.SerializeToString())
        futures.append(future)

    for future in futures:
        result = future.result()
        try:
            assert result.message_type == Message.CLIENT_BATCH_SUBMIT_RESPONSE
        except ValidatorConnectionError as vce:
            LOGGER.warning("the future resolved to %s", vce)

    stop = time.time()
    print("batches: {} batch/sec: {}".format(
        str(len(batches.batches)),
        len(batches.batches) / (stop - start)))

    stream.close()
Ejemplo n.º 4
0
    def start(self, endpoint):
        self._stream = Stream(endpoint)

        (chain_head, peers) = self._register()

        self._updates = Queue()

        engine_thread = Thread(
            target=self._engine.start,
            args=(self._updates,
                  ZmqService(stream=self._stream,
                             timeout=SERVICE_TIMEOUT,
                             name=self._engine.name(),
                             version=self._engine.version()), chain_head,
                  peers))

        engine_thread.start()

        while True:
            if self._exit:
                self._engine.stop()
                engine_thread.join()
                break

            try:
                message = self._stream.receive().result(0.1)
            except concurrent.futures.TimeoutError:
                continue

            result = self._process(message)

            self._updates.put(result)
Ejemplo n.º 5
0
def listen_to_events(delta_filters=None):
    '''Listen to cookiejar state-delta events.'''

    # Subscribe to events
    block_commit_subscription = events_pb2.EventSubscription(
        event_type="sawtooth/block-commit")
    state_delta_subscription = events_pb2.EventSubscription(
        event_type="sawtooth/state-delta", filters=delta_filters)
    fine_subscription = events_pb2.EventSubscription(
        event_type="AC is in good condition")    
    problem_subscription = events_pb2.EventSubscription(
        event_type="AC is malfunctioning")
    fixed_subscription = events_pb2.EventSubscription(
        event_type="Maintenance fixed the AC")
    notfixed_subscription = events_pb2.EventSubscription(
        event_type="Maintenance hasn't fixed the AC yet")    
    request = client_event_pb2.ClientEventsSubscribeRequest(
        subscriptions=[fine_subscription,problem_subscription,fixed_subscription,notfixed_subscription])

    # Send the subscription request
    stream = Stream(DEFAULT_VALIDATOR_URL)
    msg = stream.send(message_type=Message.CLIENT_EVENTS_SUBSCRIBE_REQUEST,
                      content=request.SerializeToString()).result()
    assert msg.message_type == Message.CLIENT_EVENTS_SUBSCRIBE_RESPONSE

    # Parse the subscription response
    response = client_event_pb2.ClientEventsSubscribeResponse()
    response.ParseFromString(msg.content)
    assert response.status == \
           client_event_pb2.ClientEventsSubscribeResponse.OK

    # Listen for events in an infinite loop
    print("Listening to events.")
    
    msg = stream.receive().result()
    assert msg.message_type == Message.CLIENT_EVENTS

    # Parse the response
    event_list = events_pb2.EventList()
    event_list.ParseFromString(msg.content)
    print("Received the following events: ----------")
    notification=[]
    for event in event_list.events:
        
        notification.append((event.event_type,event.attributes))
    
        #server_socket(notification)

    # Unsubscribe from events
    request = client_event_pb2.ClientEventsUnsubscribeRequest()
    msg = stream.send(Message.CLIENT_EVENTS_UNSUBSCRIBE_REQUEST,
                      request.SerializeToString()).result()
    assert msg.message_type == Message.CLIENT_EVENTS_UNSUBSCRIBE_RESPONSE

    # Parse the unsubscribe response
    response = client_event_pb2.ClientEventsUnsubscribeResponse()
    response.ParseFromString(msg.content)
    assert response.status == \
           client_event_pb2.ClientEventsUnsubscribeResponse.OK
    return notification
    def start(self, endpoint):
        self._stream = Stream(endpoint)

        startup_state = self._register()

        # Validators version 1.1 send startup info with the registration
        # response; newer versions will send an activation message with the
        # startup info
        if startup_state is None:
            startup_state = self._wait_until_active()

        self._updates = Queue()

        driver_thread = Thread(
            target=self._driver_loop)
        driver_thread.start()

        try:
            self._engine.start(
                self._updates,
                ZmqService(
                    stream=self._stream,
                    timeout=SERVICE_TIMEOUT),
                startup_state)
        except Exception:  # pylint: disable=broad-except
            LOGGER.exception("Uncaught engine exception")

        self.stop()
        driver_thread.join()
Ejemplo n.º 7
0
 def __init__(self, url):
     """
     Args:
         url (string): The URL of the validator
     """
     self._stream = Stream(url)
     self._url = url
     self._handlers = []
Ejemplo n.º 8
0
class eventHandler:
    def __init__(self, validatorUrl):
        self.stream = Stream(validatorUrl)

    def generateFilters(self):
        pass

    def generateSubscritionRequest(self, eventTypes, deltaFilters):
        susbscriptions = []
        if (len(eventTypes) == len(deltaFilters)):
            print("same length")
            for i in range(0, len(events)):
                subscription = events_pb2.EventSubscription(
                    event_type=event[i], filters=deltaFilters[i])
                susbscriptions.append(subscription)
            print(susbscriptions)
            subscritionRequest = client_event_pb2.ClientEventsSubscribeRequest(
                subscriptions=susbscriptions)
            return subscritionRequest
        else:
            print("Error : EventType and delta filter length mismatch ! ",
                  flush=True)

    def listenToEvents(self, eventTypes, subsciptionRequest):

        print("here", eventTypes)
        message = self.stream.send(
            message_type=Message.CLIENT_EVENTS_SUBSCRIBE_REQUEST,
            content=subsciptionRequest.SerializeToString()).result()
        assert message.message_tyype == Message.CLIENT_EVENTS_SUBSCRIBE_RESPONSE, "Undefined Message Responce Type"
        response = client_event_pb2.ClientEventsSubscribeResponse()
        response.ParseFromString(message.content)
        assert response.status == client_event_pb2.ClientEventsSubscribeResponse.OK, "Status : Message Responce Not Okay "
        while True:
            streamMsg = self.stream.receive().result()
            assert streamMsg.message_type == Message.CLIENT_EVENTS, "Stream Message Type Undefined"
            # Parse the response
            eventList = events_pb2.EventList()
            eventList.ParseFromString(streamMsg.content)
            for event in eventList:
                if (event.event_type in eventTypes):
                    print("Event Of Type " + eventType + " Received",
                          flush=True)
                    print("Event : " + event, flush=True)

    def unsubscribeEvent(self):
        request = client_event_pb2.ClientEventsUnsubscribeRequest()
        msg = stream.send(Message.CLIENT_EVENTS_UNSUBSCRIBE_REQUEST,
                          request.SerializeToString()).result()
        assert msg.message_type == Message.CLIENT_EVENTS_UNSUBSCRIBE_RESPONSE

        response = client_event_pb2.ClientEventsUnsubscribeResponse()
        response.ParseFromString(msg.content)
        assert response.status == client_event_pb2.ClientEventsUnsubscribeResponse.OK
        print("Evens Unsubscribed ! ", flush=True)
 def __init__(self, url):
     """
     Args:
         url (string): The URL of the validator
     """
     self._stream = Stream(url)
     self._url = url
     self._handlers = []
     self._highest_sdk_feature_requested = \
         self._FeatureVersion.FEATURE_UNUSED
     self._header_style = TpRegisterRequest.HEADER_STYLE_UNSET
Ejemplo n.º 10
0
def do_subscribe(opts):
    opts_config = SubscriberConfig(
        connect=opts.connect,
        database_name=opts.database_name,
        database_host=opts.database_host,
        database_port=opts.database_port,
        database_user=opts.database_user,
        database_password=opts.database_password)
    subscriber_config = load_subscriber_config(opts_config)

    subscriber = None
    stream = None
    connection = None
    # pylint: disable=broad-except
    try:

        url = None
        if "tcp://" not in subscriber_config.connect:
            url = "tcp://" + subscriber_config.connect
        else:
            url = subscriber_config.connect

        stream = Stream(url)
        connection = psycopg2.connect(
            dbname=subscriber_config.database_name,
            host=subscriber_config.database_host,
            port=subscriber_config.database_port,
            user=subscriber_config.database_user,
            password=subscriber_config.database_password)
        subscriber = Subscriber(stream, connection)

        log_config = get_log_config(
            filename="supplychain_sds_log_config.toml")
        if log_config is not None:
            log_configuration(log_config=log_config)
        else:
            log_dir = get_log_dir()
            # use the stream zmq identity for filename
            log_configuration(
                log_dir=log_dir,
                name="supplychain-sds-" + str(stream.zmq_id)[2:-1])

        subscriber.start()
    except KeyboardInterrupt:
        pass
    except Exception as e:
        print('Error: {}'.format(e), file=sys.stderr)
    finally:
        if subscriber is not None:
            subscriber.shutdown()
        if stream is not None:
            stream.close()
        if connection is not None:
            connection.close()
Ejemplo n.º 11
0
def listen_to_events(delta_filters=None):
    '''Listen to cookiejar state-delta events.'''

    # Subscribe to events
    block_commit_subscription = events_pb2.EventSubscription(
        event_type="sawtooth/block-commit")
    state_delta_subscription = events_pb2.EventSubscription(
        event_type="sawtooth/state-delta", filters=delta_filters)
    bake_subscription = events_pb2.EventSubscription(
        event_type="cookiejar/bake")
    eat_subscription = events_pb2.EventSubscription(event_type="cookiejar/eat")
    request = client_event_pb2.ClientEventsSubscribeRequest(subscriptions=[
        block_commit_subscription, state_delta_subscription, bake_subscription,
        eat_subscription
    ])

    # Send the subscription request
    stream = Stream(DEFAULT_VALIDATOR_URL)
    msg = stream.send(message_type=Message.CLIENT_EVENTS_SUBSCRIBE_REQUEST,
                      content=request.SerializeToString()).result()
    assert msg.message_type == Message.CLIENT_EVENTS_SUBSCRIBE_RESPONSE

    # Parse the subscription response
    response = client_event_pb2.ClientEventsSubscribeResponse()
    response.ParseFromString(msg.content)
    assert response.status == \
           client_event_pb2.ClientEventsSubscribeResponse.OK

    # Listen for events in an infinite loop
    print("Listening to events.")
    while True:
        msg = stream.receive().result()
        assert msg.message_type == Message.CLIENT_EVENTS

        # Parse the response
        event_list = events_pb2.EventList()
        event_list.ParseFromString(msg.content)
        print("Received the following events: ----------")
        for event in event_list.events:
            print(event)

    # Unsubscribe from events
    request = client_event_pb2.ClientEventsUnsubscribeRequest()
    msg = stream.send(Message.CLIENT_EVENTS_UNSUBSCRIBE_REQUEST,
                      request.SerializeToString()).result()
    assert msg.message_type == Message.CLIENT_EVENTS_UNSUBSCRIBE_RESPONSE

    # Parse the unsubscribe response
    response = client_event_pb2.ClientEventsUnsubscribeResponse()
    response.ParseFromString(msg.content)
    assert response.status == \
           client_event_pb2.ClientEventsUnsubscribeResponse.OK
Ejemplo n.º 12
0
def unsubscribe_from_events():
    # Unsubscribe from events
    stream = Stream(DEFAULT_VALIDATOR_URL)
    request = client_event_pb2.ClientEventsUnsubscribeRequest()
    msg = stream.send(Message.CLIENT_EVENTS_UNSUBSCRIBE_REQUEST,
                      request.SerializeToString()).result()
    assert msg.message_type == Message.CLIENT_EVENTS_UNSUBSCRIBE_RESPONSE

    # Parse the unsubscribe response
    response = client_event_pb2.ClientEventsUnsubscribeResponse()
    response.ParseFromString(msg.content)
    assert response.status == \
           client_event_pb2.ClientEventsUnsubscribeResponse.OK
Ejemplo n.º 13
0
    def __init__(self,
                 family_handler,
                 test_helper=None,
                 keyfile=PRIV_KEY_FILE):
        self.url = REST_API_URL
        self._family_handler = family_handler
        self.test_helper = test_helper
        self._stream = Stream(ZMQ_URL)

        try:
            self._signer = self.get_signer_priv_key_from_file(keyfile)
        except ClientException as e:
            LOGGER.warn('Could not set up signer from file, detailed: %s', e)
            self._signer = self.generate_signer(keyfile)
Ejemplo n.º 14
0
def listen_to_events():
    # Listen for events in an infinite loop
    stream = Stream(DEFAULT_VALIDATOR_URL)
    print("Listening to events.")
    while True:
        msg = stream.receive().result()
        assert msg.message_type == Message.CLIENT_EVENTS

        # Parse the response
        event_list = events_pb2.EventList()
        event_list.ParseFromString(msg.content)
        print("Received the following events: ----------")
        for event in event_list.events:
            print(event)
Ejemplo n.º 15
0
    def __init__(self, url, benchmark, expected_event_count):
        self._event = '{}-{}'.format(benchmark, EVENT_SUFFIX)
        self._url = url
        self.counter = 0
        self._is_active = False
        self._expected_event_count = expected_event_count
        self._stream = Stream('tcp://{}:4004'.format(self._url))

        date = datetime.date.today()
        logging_file_name = '{}-{}-benchmark-events.csv'.format(
            str(date), benchmark)

        self._event_log_file = path.join(DEFAULT_MONITORING_FOLDER,
                                         logging_file_name)
Ejemplo n.º 16
0
    def start(self, endpoint):
        self._stream = Stream(endpoint)

        startup_state = self._register()

        # Validators version 1.1 send startup info with the registration
        # response; newer versions will send an activation message with the
        # startup info
        if startup_state is None:
            startup_state = self._wait_until_active()

        self._updates = Queue()

        driver_thread = Thread(
            target=self._driver_loop)
        driver_thread.start()

        try:
            self._engine.start(
                self._updates,
                ZmqService(
                    stream=self._stream,
                    timeout=SERVICE_TIMEOUT),
                startup_state)
        except Exception:  # pylint: disable=broad-except
            LOGGER.exception("Uncaught engine exception")

        self.stop()
        driver_thread.join()
Ejemplo n.º 17
0
    def start(self, endpoint):
        self._stream = Stream(endpoint)

        startup_state = self._register()

        self._updates = Queue()

        driver_thread = Thread(
            target=self._driver_loop)
        driver_thread.start()

        try:
            self._engine.start(
                self._updates,
                ZmqService(
                    stream=self._stream,
                    timeout=SERVICE_TIMEOUT,
                    name=self._engine.name(),
                    version=self._engine.version()),
                startup_state)
        except Exception:  # pylint: disable=broad-except
            LOGGER.exception("Uncaught engine exception")

        self.stop()
        driver_thread.join()
Ejemplo n.º 18
0
 def __init__(self, url):
     """
     Args:
         url (string): The URL of the validator
     """
     self._stream = Stream(url)
     self._url = url
     self._handlers = []
Ejemplo n.º 19
0
def main():
    stream = None
    try:
        opts = parse_args(sys.argv[1:])
        stream = Stream(opts.stream_url)
        start_rest_api(
            opts.host,
            int(opts.port),
            stream,
            int(opts.timeout))
        # pylint: disable=broad-except
    except Exception as e:
        print("Error: {}".format(e), file=sys.stderr)
        sys.exit(1)
    finally:
        if stream is not None:
            stream.close()
Ejemplo n.º 20
0
    def __init__(self, family_handler, test_helper=None, keyfile=None):
        config = load_toml_with_defaults(
            '/config/remme-client-config.toml')['remme']['client']

        self.url = config['validator_rest_api_url']
        self._family_handler = family_handler
        self.test_helper = test_helper
        self._stream = Stream(
            f'tcp://{ config["validator_ip"] }:{ config["validator_port"] }')

        if keyfile is None:
            keyfile = PRIV_KEY_FILE

        try:
            self._signer = self.get_signer_priv_key_from_file(keyfile)
        except ClientException as e:
            LOGGER.warn('Could not set up signer from file, detailed: %s', e)
            self._signer = self.generate_signer(keyfile)
Ejemplo n.º 21
0
    def start(self, endpoint):
        self._stream = Stream(endpoint)

        startup_state = self._register()

        self._updates = Queue()

        driver_thread = Thread(target=self._driver_loop)
        driver_thread.start()

        try:
            self._engine.start(
                self._updates,
                ZmqService(stream=self._stream, timeout=SERVICE_TIMEOUT),
                startup_state)
        except Exception:  # pylint: disable=broad-except
            LOGGER.exception("Uncaught engine exception")

        self.stop()
        driver_thread.join()
Ejemplo n.º 22
0
    def __init__(self, service, component_endpoint, config_dir, data_dir,
                 key_dir):
        self._config_dir = config_dir
        self._data_dir = data_dir
        self._signer = _load_identity_signer(key_dir, 'validator')
        self._validator_id = self._signer.get_public_key().as_hex()

        stream = Stream(component_endpoint)

        self._block_cache = _BlockCacheProxy(service, stream)
        self._state_view_factory = _StateViewFactoryProxy(service)

        self._batch_publisher = _BatchPublisherProxy(stream, self._signer)
        self._publisher = None
Ejemplo n.º 23
0
    def start(self, endpoint):
        LOGGER.debug('ZmqDriver: start endpoint=%s', endpoint)
        self._stream = Stream(endpoint)

        startup_state = self._register()

        self._updates = Queue()

        driver_thread = Thread(target=self._driver_loop)
        driver_thread.start()

        try:
            self._engine.start(
                self._updates,
                ZmqService(stream=self._stream,
                           timeout=SERVICE_TIMEOUT,
                           name=self._engine.name(),
                           version=self._engine.version()), startup_state)
        except Exception as ex:  # pylint: disable=broad-except
            LOGGER.exception("Uncaught engine exception(%s)", ex)

        self.stop()
        driver_thread.join()
Ejemplo n.º 24
0
def subscribe_to_events(delta_filters=None):
    '''Listen to attestation state-delta events.'''


    # Subscribe to events
    trust_path_subscription = events_pb2.EventSubscription(
        event_type="attestation/trustpath", filters=delta_filters)
    trust_entry_subscription = events_pb2.EventSubscription(
        event_type="attestation/entrypoint", filters=delta_filters)
    request = client_event_pb2.ClientEventsSubscribeRequest(
        subscriptions=[trust_path_subscription, trust_entry_subscription])


    # Send the subscription request
    stream = Stream(DEFAULT_VALIDATOR_URL)
    msg = stream.send(message_type=Message.CLIENT_EVENTS_SUBSCRIBE_REQUEST,
                      content=request.SerializeToString()).result()
    assert msg.message_type == Message.CLIENT_EVENTS_SUBSCRIBE_RESPONSE

    # Parse the subscription response
    response = client_event_pb2.ClientEventsSubscribeResponse()
    response.ParseFromString(msg.content)
    assert response.status == \
           client_event_pb2.ClientEventsSubscribeResponse.OK
Ejemplo n.º 25
0
    def __init__(self, service, component_endpoint, config_dir, data_dir,
                 key_dir):
        self._config_dir = config_dir
        self._data_dir = data_dir
        LOGGER.debug('BgtOracle: Stream key_dir=%s', key_dir)
        self._signer = _load_identity_signer(key_dir, 'validator')
        self._validator_id = self._signer.get_public_key().as_hex()
        self._service = service
        LOGGER.debug('BgtOracle: Stream component_endpoint=%s',
                     component_endpoint)
        stream = Stream(component_endpoint)

        self._block_cache = _BlockCacheProxy(service, stream)
        self._state_view_factory = _StateViewFactoryProxy(service)

        self._batch_publisher = _BatchPublisherProxy(stream, self._signer)
        self._publisher = None
        self._can_fail_block = False  #True
        LOGGER.debug('BgtOracle:validator=%s init DONE',
                     _short_id(self._validator_id))
Ejemplo n.º 26
0
    def __init__(self, service, component_endpoint, config_dir, data_dir,
                 key_dir):

        self._config_dir = config_dir
        self._data_dir = data_dir
        self._service = service
        LOGGER.debug('PbftOracle: Stream key_dir=%s', key_dir)
        self._signer = _load_identity_signer(key_dir, 'validator')
        self._validator_id = self._signer.get_public_key().as_hex()

        LOGGER.debug('PbftOracle: Stream component_endpoint=%s ',
                     component_endpoint)
        stream = Stream(component_endpoint)

        self._block_cache = _BlockCacheProxy(service, stream)
        self._state_view_factory = _StateViewFactoryProxy(service)

        self._batch_publisher = _BatchPublisherProxy(stream, self._signer)
        self._publisher = None
        self._consensus_state_store = ConsensusStateStore(
            data_dir=self._data_dir, validator_id=self._validator_id)
        LOGGER.debug('PbftOracle: _validator_id=%s init DONE',
                     self._validator_id)
Ejemplo n.º 27
0
def main():
    stream = None
    try:
        opts = parse_args(sys.argv[1:])
        opts_config = RestApiConfig(bind=opts.bind,
                                    connect=opts.connect,
                                    timeout=opts.timeout)
        rest_api_config = load_rest_api_config(opts_config)
        if "tcp://" not in rest_api_config.connect:
            stream = Stream("tcp://" + rest_api_config.connect)
        else:
            stream = Stream(rest_api_config.connect)
        log_config = get_log_config(filename="rest_api_log_config.toml")
        if log_config is not None:
            log_configuration(log_config=log_config)
        else:
            log_dir = get_log_dir()
            log_configuration(log_dir=log_dir, name="sawtooth_rest_api")
        init_console_logging(verbose_level=opts.verbose)

        try:
            host, port = rest_api_config.bind[0].split(":")
            port = int(port)
        except ValueError as e:
            print("Unable to parse binding {}: Must be in the format"
                  " host:port".format(rest_api_config.bind[0]))
            sys.exit(1)

        start_rest_api(host, port, stream, int(rest_api_config.timeout))
        # pylint: disable=broad-except
    except Exception as e:
        print("Error: {}".format(e), file=sys.stderr)
        sys.exit(1)
    finally:
        if stream is not None:
            stream.close()
Ejemplo n.º 28
0
 def setUp(self):
     self.url = "tcp://validator:4004"
     self.stream = Stream(self.url)
Ejemplo n.º 29
0
class ZmqDriver(Driver):
    def __init__(self, engine):
        super().__init__(engine)
        self._engine = engine
        self._stream = None
        self._exit = False
        self._updates = None

    def start(self, endpoint):
        self._stream = Stream(endpoint)

        startup_state = self._register()

        # Validators version 1.1 send startup info with the registration
        # response; newer versions will send an activation message with the
        # startup info
        if startup_state is None:
            startup_state = self._wait_until_active()

        self._updates = Queue()

        driver_thread = Thread(
            target=self._driver_loop)
        driver_thread.start()

        try:
            self._engine.start(
                self._updates,
                ZmqService(
                    stream=self._stream,
                    timeout=SERVICE_TIMEOUT),
                startup_state)
        except Exception:  # pylint: disable=broad-except
            LOGGER.exception("Uncaught engine exception")

        self.stop()
        driver_thread.join()

    def _driver_loop(self):
        try:
            future = self._stream.receive()
            while True:
                if self._exit:
                    self._engine.stop()
                    break

                try:
                    message = future.result(1)
                    future = self._stream.receive()
                except concurrent.futures.TimeoutError:
                    continue

                result = self._process(message)

                self._updates.put(result)
        except Exception:  # pylint: disable=broad-except
            LOGGER.exception("Uncaught driver exception")

    def stop(self):
        self._exit = True
        self._engine.stop()
        self._stream.close()

    def _register(self):
        self._stream.wait_for_ready()

        request = consensus_pb2.ConsensusRegisterRequest(
            name=self._engine.name(),
            version=self._engine.version(),
        ).SerializeToString()

        while True:
            future = self._stream.send(
                message_type=Message.CONSENSUS_REGISTER_REQUEST,
                content=request)
            response = consensus_pb2.ConsensusRegisterResponse()
            response.ParseFromString(future.result(REGISTER_TIMEOUT).content)

            if (
                response.status
                == consensus_pb2.ConsensusRegisterResponse.NOT_READY
            ):
                continue

            if response.status == consensus_pb2.ConsensusRegisterResponse.OK:
                if (
                    response.HasField('chain_head')
                    and response.HasField('local_peer_info')
                ):
                    return StartupState(
                        response.chain_head,
                        response.peers,
                        response.local_peer_info)

                return None

            raise exceptions.ReceiveError(
                'Registration failed with status {}'.format(response.status))

    def _wait_until_active(self):
        future = self._stream.receive()
        while True:
            try:
                message = future.result(1)
            except concurrent.futures.TimeoutError:
                continue

            if (
                message.message_type
                == Message.CONSENSUS_NOTIFY_ENGINE_ACTIVATED
            ):
                notification = \
                    consensus_pb2.ConsensusNotifyEngineActivated()
                notification.ParseFromString(message.content)

                startup_state = StartupState(
                    notification.chain_head,
                    notification.peers,
                    notification.local_peer_info)

                LOGGER.info(
                    'Received activation message with startup state: %s',
                    startup_state)

                self._stream.send_back(
                    message_type=Message.CONSENSUS_NOTIFY_ACK,
                    correlation_id=message.correlation_id,
                    content=consensus_pb2.ConsensusNotifyAck()
                                         .SerializeToString())

                return startup_state

            LOGGER.warning('Received message type %s while waiting for \
                activation message', message.message_type)
            future = self._stream.receive()

    def _process(self, message):
        type_tag = message.message_type

        if type_tag == Message.CONSENSUS_NOTIFY_PEER_CONNECTED:
            notification = consensus_pb2.ConsensusNotifyPeerConnected()
            notification.ParseFromString(message.content)

            data = notification.peer_info

        elif type_tag == Message.CONSENSUS_NOTIFY_PEER_DISCONNECTED:
            notification = consensus_pb2.ConsensusNotifyPeerDisconnected()
            notification.ParseFromString(message.content)

            data = notification.peer_id

        elif type_tag == Message.CONSENSUS_NOTIFY_PEER_MESSAGE:
            notification = consensus_pb2.ConsensusNotifyPeerMessage()
            notification.ParseFromString(message.content)

            header = consensus_pb2.ConsensusPeerMessageHeader()
            header.ParseFromString(notification.message.header)

            peer_message = PeerMessage(
                header=header,
                header_bytes=notification.message.header,
                header_signature=notification.message.header_signature,
                content=notification.message.content)

            data = peer_message, notification.sender_id

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_NEW:
            notification = consensus_pb2.ConsensusNotifyBlockNew()
            notification.ParseFromString(message.content)

            data = notification.block

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_VALID:
            notification = consensus_pb2.ConsensusNotifyBlockValid()
            notification.ParseFromString(message.content)

            data = notification.block_id

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_INVALID:
            notification = consensus_pb2.ConsensusNotifyBlockInvalid()
            notification.ParseFromString(message.content)

            data = notification.block_id

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_COMMIT:
            notification = consensus_pb2.ConsensusNotifyBlockCommit()
            notification.ParseFromString(message.content)

            data = notification.block_id

        elif type_tag == Message.CONSENSUS_NOTIFY_ENGINE_DEACTIVATED:
            self.stop()
            data = None

        else:
            raise exceptions.ReceiveError(
                'Received unexpected message type: {}'.format(type_tag))

        self._stream.send_back(
            message_type=Message.CONSENSUS_NOTIFY_ACK,
            correlation_id=message.correlation_id,
            content=consensus_pb2.ConsensusNotifyAck().SerializeToString())

        return type_tag, data
Ejemplo n.º 30
0
 def on_validator_discovered(self, url):
     stream = Stream(url)
     self._streams.append(stream)
Ejemplo n.º 31
0
 def __init__(self, validator_url):
     LOGGER.info('Connecting to validator: %s', validator_url)
     self._stream = Stream(validator_url)
     self._event_handlers = []
     self._is_active = False
Ejemplo n.º 32
0
class ZmqDriver(Driver):
    def __init__(self, engine):
        super().__init__(engine)
        self._engine = engine
        self._stream = None
        self._exit = False
        self._updates = None

    def start(self, endpoint):
        self._stream = Stream(endpoint)

        (chain_head, peers) = self._register()

        self._updates = Queue()

        engine_thread = Thread(
            target=self._engine.start,
            args=(self._updates,
                  ZmqService(stream=self._stream,
                             timeout=SERVICE_TIMEOUT,
                             name=self._engine.name(),
                             version=self._engine.version()), chain_head,
                  peers))

        engine_thread.start()

        while True:
            if self._exit:
                self._engine.stop()
                engine_thread.join()
                break

            try:
                message = self._stream.receive().result(0.1)
            except concurrent.futures.TimeoutError:
                continue

            result = self._process(message)

            self._updates.put(result)

    def stop(self):
        self._exit = True
        self._stream.close()

    def _register(self):
        self._stream.wait_for_ready()

        request = consensus_pb2.ConsensusRegisterRequest(
            name=self._engine.name(),
            version=self._engine.version(),
        ).SerializeToString()

        while True:
            future = self._stream.send(
                message_type=Message.CONSENSUS_REGISTER_REQUEST,
                content=request)
            response = consensus_pb2.ConsensusRegisterResponse()
            response.ParseFromString(future.result(REGISTER_TIMEOUT).content)

            if (response.status ==
                    consensus_pb2.ConsensusRegisterResponse.NOT_READY):
                continue

            if response.status == consensus_pb2.ConsensusRegisterResponse.OK:
                return (response.chain_head, response.peers)

            raise exceptions.ReceiveError(
                'Registration failed with status {}'.format(response.status))

    def _process(self, message):
        type_tag = message.message_type

        if type_tag == Message.CONSENSUS_NOTIFY_PEER_CONNECTED:
            notification = consensus_pb2.ConsensusNotifyPeerConnected()
            notification.ParseFromString(message.content)

            data = notification.peer_info

        elif type_tag == Message.CONSENSUS_NOTIFY_PEER_DISCONNECTED:
            notification = consensus_pb2.ConsensusNotifyPeerDisconnected()
            notification.ParseFromString(message.content)

            data = notification.peer_id

        elif type_tag == Message.CONSENSUS_NOTIFY_PEER_MESSAGE:
            notification = consensus_pb2.ConsensusNotifyPeerMessage()
            notification.ParseFromString(message.content)

            data = notification.message, notification.sender_id

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_NEW:
            notification = consensus_pb2.ConsensusNotifyBlockNew()
            notification.ParseFromString(message.content)

            data = notification.block

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_VALID:
            notification = consensus_pb2.ConsensusNotifyBlockValid()
            notification.ParseFromString(message.content)

            data = notification.block_id

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_INVALID:
            notification = consensus_pb2.ConsensusNotifyBlockInvalid()
            notification.ParseFromString(message.content)

            data = notification.block_id

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_COMMIT:
            notification = consensus_pb2.ConsensusNotifyBlockCommit()
            notification.ParseFromString(message.content)

            data = notification.block_id

        else:
            raise exceptions.ReceiveError(
                'Received unexpected message type: {}'.format(type_tag))

        self._stream.send_back(
            message_type=Message.CONSENSUS_NOTIFY_ACK,
            correlation_id=message.correlation_id,
            content=consensus_pb2.ConsensusNotifyAck().SerializeToString())

        return type_tag, data
Ejemplo n.º 33
0
class TransactionProcessor(object):
    def __init__(self, url):
        self._stream = Stream(url)
        self._url = url
        self._handlers = []

    @property
    def zmq_id(self):
        return self._stream.zmq_id

    def add_handler(self, handler):
        """Add a transaction family handler
        :param handler:
        """
        self._handlers.append(handler)

    def _matches(self, handler, header):
        return header.family_name == handler.family_name \
            and header.family_version in handler.family_versions \
            and header.payload_encoding in handler.encodings

    def _find_handler(self, header):
        """Find a handler for a particular (family_name,
        family_versions, payload_encoding)
        :param header transaction_pb2.TransactionHeader:
        :return: handler
        """
        try:
            return next(handler for handler in self._handlers
                        if self._matches(handler, header))
        except StopIteration:
            LOGGER.debug("Missing handler for header: %s", header)
            return None

    def _register_requests(self):
        """Returns all of the TpRegisterRequests for handlers

        :return (list): list of TpRegisterRequests
        """
        return itertools.chain.from_iterable(  # flattens the nested list
            [[
                TpRegisterRequest(family=n,
                                  version=v,
                                  encoding=e,
                                  namespaces=h.namespaces)
                for n, v, e in itertools.product(
                    [h.family_name], h.family_versions, h.encodings)
            ] for h in self._handlers])

    def _unregister_request(self):
        """Returns a single TP_UnregisterRequest that requests
        that the validator stop sending transactions for previously
        registered handlers.

        :return (processor_pb2.TpUnregisterRequest):
        """
        return TpUnregisterRequest()

    def _process(self, msg):
        if msg.message_type != Message.TP_PROCESS_REQUEST:
            LOGGER.debug(
                "Transaction Processor recieved invalid message type. "
                "Message type should be TP_PROCESS_REQUEST,"
                " but is %s", Message.MessageType.Name(msg.message_type))
            return

        request = TpProcessRequest()
        request.ParseFromString(msg.content)
        state = State(self._stream, request.context_id)
        header = TransactionHeader()
        header.ParseFromString(request.header)
        try:
            if not self._stream.is_ready():
                raise ValidatorConnectionError()
            handler = self._find_handler(header)
            if handler is None:
                return
            handler.apply(request, state)
            self._stream.send_back(
                message_type=Message.TP_PROCESS_RESPONSE,
                correlation_id=msg.correlation_id,
                content=TpProcessResponse(
                    status=TpProcessResponse.OK).SerializeToString())
        except InvalidTransaction as it:
            LOGGER.warning("Invalid Transaction %s", it)
            try:
                self._stream.send_back(
                    message_type=Message.TP_PROCESS_RESPONSE,
                    correlation_id=msg.correlation_id,
                    content=TpProcessResponse(
                        status=TpProcessResponse.INVALID_TRANSACTION,
                        message=str(it),
                        extended_data=it.extended_data).SerializeToString())
            except ValidatorConnectionError as vce:
                # TP_PROCESS_REQUEST has made it through the
                # handler.apply and an INVALID_TRANSACTION would have been
                # sent back but the validator has disconnected and so it
                # doesn't care about the response.
                LOGGER.warning("during invalid transaction response: %s", vce)
        except InternalError as ie:
            LOGGER.warning("internal error: %s", ie)
            try:
                self._stream.send_back(
                    message_type=Message.TP_PROCESS_RESPONSE,
                    correlation_id=msg.correlation_id,
                    content=TpProcessResponse(
                        status=TpProcessResponse.INTERNAL_ERROR,
                        message=str(ie),
                        extended_data=ie.extended_data).SerializeToString())
            except ValidatorConnectionError as vce:
                # Same as the prior except block, but an internal error has
                # happened, but because of the disconnect the validator
                # probably doesn't care about the response.
                LOGGER.warning("during internal error response: %s", vce)
        except ValidatorConnectionError as vce:
            # Somewhere within handler.apply a future resolved with an
            # error status that the validator has disconnected. There is
            # nothing left to do but reconnect.
            LOGGER.warning(
                "during handler.apply a future was resolved "
                "with error status: %s", vce)

    def _process_future(self, future, timeout=None, sigint=False):
        try:
            msg = future.result(timeout)
        except CancelledError:
            # This error is raised when Task.cancel is called on
            # disconnect from the validator in stream.py, for
            # this future.
            return
        if msg is RECONNECT_EVENT:
            if sigint is False:
                LOGGER.info("reregistering with validator")
                self._stream.wait_for_ready()
                self._register()
        else:
            LOGGER.debug('received message of type: %s',
                         Message.MessageType.Name(msg.message_type))
            if msg.message_type == Message.TP_PING:
                self._stream.send_back(
                    message_type=Message.TP_PING_RESPONSE,
                    correlation_id=msg.correlation_id,
                    content=TpPingResponse(
                        status=TpPingResponse.OK).SerializeToString())
                return
            self._process(msg)

    def _register(self):
        futures = []
        for message in self._register_requests():
            self._stream.wait_for_ready()
            future = self._stream.send(
                message_type=Message.TP_REGISTER_REQUEST,
                content=message.SerializeToString())
            futures.append(future)

        for future in futures:
            resp = TpRegisterResponse()
            try:
                resp.ParseFromString(future.result().content)
                LOGGER.info("register attempt: %s",
                            TpRegisterResponse.Status.Name(resp.status))
            except ValidatorConnectionError as vce:
                LOGGER.info("during waiting for response on registration: %s",
                            vce)

    def _unregister(self):
        message = self._unregister_request()
        self._stream.wait_for_ready()
        future = self._stream.send(message_type=Message.TP_UNREGISTER_REQUEST,
                                   content=message.SerializeToString())
        response = TpUnregisterResponse()
        try:
            response.ParseFromString(future.result(1).content)
            LOGGER.info("unregister attempt: %s",
                        TpUnregisterResponse.Status.Name(response.status))
        except ValidatorConnectionError as vce:
            LOGGER.info("during waiting for response on unregistration: %s",
                        vce)

    def start(self):
        fut = None
        try:
            self._register()
            while True:
                # During long running processing this
                # is where the transaction processor will
                # spend most of its time
                fut = self._stream.receive()
                self._process_future(fut)
        except KeyboardInterrupt:
            try:
                # tell the validator to not send any more messages
                self._unregister()
                while True:
                    if fut is not None:
                        # process futures as long as the tp has them,
                        # if the TP_PROCESS_REQUEST doesn't come from
                        # zeromq->asyncio in 1 second raise a
                        # concurrent.futures.TimeOutError and be done.
                        self._process_future(fut, 1, sigint=True)
                        fut = self._stream.receive()
            except concurrent.futures.TimeoutError:
                # Where the tp will usually exit after
                # a KeyboardInterrupt. Caused by the 1 second
                # timeout in _process_future.
                pass
            except FutureTimeoutError:
                # If the validator is not able to respond to the
                # unregister request, exit.
                pass

    def stop(self):
        self._stream.close()
class ZmqDriver(Driver):
    def __init__(self, engine):
        super().__init__(engine)
        self._engine = engine
        self._stream = None
        self._exit = False
        self._updates = None

    def start(self, endpoint):
        self._stream = Stream(endpoint)

        startup_state = self._register()

        # Validators version 1.1 send startup info with the registration
        # response; newer versions will send an activation message with the
        # startup info
        if startup_state is None:
            startup_state = self._wait_until_active()

        self._updates = Queue()

        driver_thread = Thread(
            target=self._driver_loop)
        driver_thread.start()

        try:
            self._engine.start(
                self._updates,
                ZmqService(
                    stream=self._stream,
                    timeout=SERVICE_TIMEOUT),
                startup_state)
        except Exception:  # pylint: disable=broad-except
            LOGGER.exception("Uncaught engine exception")

        self.stop()
        driver_thread.join()

    def _driver_loop(self):
        try:
            future = self._stream.receive()
            while True:
                if self._exit:
                    self._engine.stop()
                    break

                try:
                    message = future.result(1)
                    future = self._stream.receive()
                except concurrent.futures.TimeoutError:
                    continue
                try:
                    result = self._process(message)
                    # if message was a ping ignore
                    if result[0] == Message.PING_REQUEST:
                        continue

                    self._updates.put(result)

                except exceptions.ReceiveError as err:
                    LOGGER.warning("%s", err)
                    continue
        except Exception:  # pylint: disable=broad-except
            LOGGER.exception("Uncaught driver exception")

    def stop(self):
        self._exit = True
        self._engine.stop()
        self._stream.close()

    def _register(self):
        self._stream.wait_for_ready()

        request = consensus_pb2.ConsensusRegisterRequest(
            name=self._engine.name(),
            version=self._engine.version(),
        )

        for (name, version) in self._engine.additional_protocols():
            protocol = request.additional_protocols.add()
            protocol.name = name
            protocol.version = version

        while True:
            future = self._stream.send(
                message_type=Message.CONSENSUS_REGISTER_REQUEST,
                content=request.SerializeToString())
            response = consensus_pb2.ConsensusRegisterResponse()
            response.ParseFromString(future.result(REGISTER_TIMEOUT).content)

            if (
                response.status
                == consensus_pb2.ConsensusRegisterResponse.NOT_READY
            ):
                continue

            if response.status == consensus_pb2.ConsensusRegisterResponse.OK:
                if (
                    response.HasField('chain_head')
                    and response.HasField('local_peer_info')
                ):
                    return StartupState(
                        response.chain_head,
                        response.peers,
                        response.local_peer_info)

                return None

            raise exceptions.ReceiveError(
                'Registration failed with status {}'.format(response.status))

    def _wait_until_active(self):
        future = self._stream.receive()
        while True:
            try:
                message = future.result(1)
            except concurrent.futures.TimeoutError:
                continue

            if (
                message.message_type
                == Message.CONSENSUS_NOTIFY_ENGINE_ACTIVATED
            ):
                notification = \
                    consensus_pb2.ConsensusNotifyEngineActivated()
                notification.ParseFromString(message.content)

                startup_state = StartupState(
                    notification.chain_head,
                    notification.peers,
                    notification.local_peer_info)

                LOGGER.info(
                    'Received activation message with startup state: %s',
                    startup_state)

                self._stream.send_back(
                    message_type=Message.CONSENSUS_NOTIFY_ACK,
                    correlation_id=message.correlation_id,
                    content=consensus_pb2.ConsensusNotifyAck()
                                         .SerializeToString())

                return startup_state

            LOGGER.warning('Received message type %s while waiting for \
                activation message', message.message_type)
            future = self._stream.receive()

    def _process(self, message):
        type_tag = message.message_type

        if type_tag == Message.CONSENSUS_NOTIFY_PEER_CONNECTED:
            notification = consensus_pb2.ConsensusNotifyPeerConnected()
            notification.ParseFromString(message.content)

            data = notification.peer_info

        elif type_tag == Message.CONSENSUS_NOTIFY_PEER_DISCONNECTED:
            notification = consensus_pb2.ConsensusNotifyPeerDisconnected()
            notification.ParseFromString(message.content)

            data = notification.peer_id

        elif type_tag == Message.CONSENSUS_NOTIFY_PEER_MESSAGE:
            notification = consensus_pb2.ConsensusNotifyPeerMessage()
            notification.ParseFromString(message.content)

            header = consensus_pb2.ConsensusPeerMessageHeader()
            header.ParseFromString(notification.message.header)

            peer_message = PeerMessage(
                header=header,
                header_bytes=notification.message.header,
                header_signature=notification.message.header_signature,
                content=notification.message.content)

            data = peer_message, notification.sender_id

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_NEW:
            notification = consensus_pb2.ConsensusNotifyBlockNew()
            notification.ParseFromString(message.content)

            data = notification.block

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_VALID:
            notification = consensus_pb2.ConsensusNotifyBlockValid()
            notification.ParseFromString(message.content)

            data = notification.block_id

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_INVALID:
            notification = consensus_pb2.ConsensusNotifyBlockInvalid()
            notification.ParseFromString(message.content)

            data = notification.block_id

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_COMMIT:
            notification = consensus_pb2.ConsensusNotifyBlockCommit()
            notification.ParseFromString(message.content)

            data = notification.block_id

        elif type_tag == Message.CONSENSUS_NOTIFY_ENGINE_DEACTIVATED:
            self.stop()
            data = None

        elif type_tag == Message.PING_REQUEST:
            data = None

        else:
            raise exceptions.ReceiveError(
                'Received unexpected message type: {}'.format(type_tag))

        self._stream.send_back(
            message_type=Message.CONSENSUS_NOTIFY_ACK,
            correlation_id=message.correlation_id,
            content=consensus_pb2.ConsensusNotifyAck().SerializeToString())

        return type_tag, data
Ejemplo n.º 35
0
 def __init__(self, validator_url):
     LOGGER.info("Connecting to validator: %s", validator_url)
     self._stream = Stream(validator_url)
     self._delta_handlers = []
     self._is_active = False
Ejemplo n.º 36
0
class TransactionProcessor:
    """TransactionProcessor is a generic class for communicating with a
    validator and routing transaction processing requests to a registered
    handler. It uses ZMQ and channels to handle requests concurrently.
    """

    def __init__(self, url):
        """
        Args:
            url (string): The URL of the validator
        """
        self._stream = Stream(url)
        self._url = url
        self._handlers = []

    @property
    def zmq_id(self):
        return self._stream.zmq_id

    def add_handler(self, handler):
        """Adds a transaction family handler
        Args:
            handler (TransactionHandler): the handler to be added
        """
        self._handlers.append(handler)

    def _matches(self, handler, header):
        return header.family_name == handler.family_name \
            and header.family_version in handler.family_versions

    def _find_handler(self, header):
        """Find a handler for a particular (family_name, family_versions)
        :param header transaction_pb2.TransactionHeader:
        :return: handler
        """
        try:
            return next(
                handler for handler in self._handlers
                if self._matches(handler, header))
        except StopIteration:
            LOGGER.debug("Missing handler for header: %s", header)
            return None

    def _register_requests(self):
        """Returns all of the TpRegisterRequests for handlers

        :return (list): list of TpRegisterRequests
        """
        return itertools.chain.from_iterable(  # flattens the nested list
            [
                [TpRegisterRequest(
                    family=n,
                    version=v,
                    namespaces=h.namespaces)
                 for n, v in itertools.product(
                    [h.family_name],
                     h.family_versions,)] for h in self._handlers])

    def _unregister_request(self):
        """Returns a single TP_UnregisterRequest that requests
        that the validator stop sending transactions for previously
        registered handlers.

        :return (processor_pb2.TpUnregisterRequest):
        """
        return TpUnregisterRequest()

    def _process(self, msg):
        if msg.message_type != Message.TP_PROCESS_REQUEST:
            LOGGER.debug(
                "Transaction Processor recieved invalid message type. "
                "Message type should be TP_PROCESS_REQUEST,"
                " but is %s", Message.MessageType.Name(msg.message_type))
            return

        request = TpProcessRequest()
        request.ParseFromString(msg.content)
        state = Context(self._stream, request.context_id)
        header = request.header
        try:
            if not self._stream.is_ready():
                raise ValidatorConnectionError()
            handler = self._find_handler(header)
            if handler is None:
                return
            handler.apply(request, state)
            self._stream.send_back(
                message_type=Message.TP_PROCESS_RESPONSE,
                correlation_id=msg.correlation_id,
                content=TpProcessResponse(
                    status=TpProcessResponse.OK
                ).SerializeToString())
        except InvalidTransaction as it:
            LOGGER.warning("Invalid Transaction %s", it)
            try:
                self._stream.send_back(
                    message_type=Message.TP_PROCESS_RESPONSE,
                    correlation_id=msg.correlation_id,
                    content=TpProcessResponse(
                        status=TpProcessResponse.INVALID_TRANSACTION,
                        message=str(it),
                        extended_data=it.extended_data
                    ).SerializeToString())
            except ValidatorConnectionError as vce:
                # TP_PROCESS_REQUEST has made it through the
                # handler.apply and an INVALID_TRANSACTION would have been
                # sent back but the validator has disconnected and so it
                # doesn't care about the response.
                LOGGER.warning("during invalid transaction response: %s", vce)
        except InternalError as ie:
            LOGGER.warning("internal error: %s", ie)
            try:
                self._stream.send_back(
                    message_type=Message.TP_PROCESS_RESPONSE,
                    correlation_id=msg.correlation_id,
                    content=TpProcessResponse(
                        status=TpProcessResponse.INTERNAL_ERROR,
                        message=str(ie),
                        extended_data=ie.extended_data
                    ).SerializeToString())
            except ValidatorConnectionError as vce:
                # Same as the prior except block, but an internal error has
                # happened, but because of the disconnect the validator
                # probably doesn't care about the response.
                LOGGER.warning("during internal error response: %s", vce)
        except ValidatorConnectionError as vce:
            # Somewhere within handler.apply a future resolved with an
            # error status that the validator has disconnected. There is
            # nothing left to do but reconnect.
            LOGGER.warning("during handler.apply a future was resolved "
                           "with error status: %s", vce)
        except AuthorizationException as ae:
            LOGGER.warning("AuthorizationException: %s", ae)
            try:
                self._stream.send_back(
                    message_type=Message.TP_PROCESS_RESPONSE,
                    correlation_id=msg.correlation_id,
                    content=TpProcessResponse(
                        status=TpProcessResponse.INVALID_TRANSACTION,
                        message=str(ae),
                    ).SerializeToString())
            except ValidatorConnectionError as vce:
                # TP_PROCESS_REQUEST has made it through the
                # handler.apply and an INVALID_TRANSACTION would have been
                # sent back but the validator has disconnected and so it
                # doesn't care about the response.
                LOGGER.warning("during invalid transaction response: %s", vce)

    def _process_future(self, future, timeout=None, sigint=False):
        try:
            msg = future.result(timeout)
        except CancelledError:
            # This error is raised when Task.cancel is called on
            # disconnect from the validator in stream.py, for
            # this future.
            return
        if msg is RECONNECT_EVENT:
            if sigint is False:
                LOGGER.info("reregistering with validator")
                self._stream.wait_for_ready()
                self._register()
        else:
            LOGGER.debug(
                'received message of type: %s',
                Message.MessageType.Name(msg.message_type))
            if msg.message_type == Message.PING_REQUEST:
                self._stream.send_back(
                    message_type=Message.PING_RESPONSE,
                    correlation_id=msg.correlation_id,
                    content=PingResponse().SerializeToString())
                return
            self._process(msg)

    def _register(self):
        futures = []
        for message in self._register_requests():
            self._stream.wait_for_ready()
            future = self._stream.send(
                message_type=Message.TP_REGISTER_REQUEST,
                content=message.SerializeToString())
            futures.append(future)

        for future in futures:
            resp = TpRegisterResponse()
            try:
                resp.ParseFromString(future.result().content)
                LOGGER.info("register attempt: %s",
                            TpRegisterResponse.Status.Name(resp.status))
            except ValidatorConnectionError as vce:
                LOGGER.info("during waiting for response on registration: %s",
                            vce)

    def _unregister(self):
        message = self._unregister_request()
        self._stream.wait_for_ready()
        future = self._stream.send(
            message_type=Message.TP_UNREGISTER_REQUEST,
            content=message.SerializeToString())
        response = TpUnregisterResponse()
        try:
            response.ParseFromString(future.result(1).content)
            LOGGER.info("unregister attempt: %s",
                        TpUnregisterResponse.Status.Name(response.status))
        except ValidatorConnectionError as vce:
            LOGGER.info("during waiting for response on unregistration: %s",
                        vce)

    def start(self):
        """Connects the transaction processor to a validator and starts
        listening for requests and routing them to an appropriate
        transaction handler.
        """
        fut = None
        try:
            self._register()
            while True:
                # During long running processing this
                # is where the transaction processor will
                # spend most of its time
                fut = self._stream.receive()
                self._process_future(fut)
        except KeyboardInterrupt:
            try:
                # tell the validator to not send any more messages
                self._unregister()
                while True:
                    if fut is not None:
                        # process futures as long as the tp has them,
                        # if the TP_PROCESS_REQUEST doesn't come from
                        # zeromq->asyncio in 1 second raise a
                        # concurrent.futures.TimeOutError and be done.
                        self._process_future(fut, 1, sigint=True)
                        fut = self._stream.receive()
            except concurrent.futures.TimeoutError:
                # Where the tp will usually exit after
                # a KeyboardInterrupt. Caused by the 1 second
                # timeout in _process_future.
                pass
            except FutureTimeoutError:
                # If the validator is not able to respond to the
                # unregister request, exit.
                pass

    def stop(self):
        """Closes the connection between the TransactionProcessor and the
        validator.
        """
        self._stream.close()
Ejemplo n.º 37
0
 def __init__(self, url):
     self._stream = Stream(url)
     self._url = url
     self._handlers = []
Ejemplo n.º 38
0
class TestEventsAndReceipts(unittest.TestCase):
    def test_subscribe_and_unsubscribe(self):
        """Tests that a client can subscribe and unsubscribe from events."""
        response = self._subscribe()
        self.assert_subscribe_response(response)

        response = self._unsubscribe()
        self.assert_unsubscribe_response(response)

    def test_subscribe_and_unsubscribe_with_catch_up(self):
        """Tests that a client can subscribe and unsubscribe from events."""
        response = self._subscribe(
            last_known_block_ids=[NULL_BLOCK_IDENTIFIER])
        self.assert_subscribe_response(response)

        # Ensure that it receives the genesis block
        msg = self.stream.receive().result()
        self.assertEqual(
            msg.message_type,
            validator_pb2.Message.CLIENT_EVENTS)
        event_list = events_pb2.EventList()
        event_list.ParseFromString(msg.content)
        events = event_list.events
        self.assertEqual(len(events), 2)
        self.assert_block_commit_event(events[0], 0)
        self.assert_state_event(events[1], '000000')

        response = self._unsubscribe()
        self.assert_unsubscribe_response(response)

    def test_block_commit_event_received(self):
        """Tests that block commit events are properly received on block
        boundaries."""
        self._subscribe()

        for i in range(1, 5):
            self.batch_submitter.submit_next_batch()
            msg = self.stream.receive().result()
            self.assertEqual(
                msg.message_type,
                validator_pb2.Message.CLIENT_EVENTS)
            event_list = events_pb2.EventList()
            event_list.ParseFromString(msg.content)
            events = event_list.events
            self.assertEqual(len(events), 2)
            self.assert_block_commit_event(events[0], i)
            self.assert_state_event(events[1], INTKEY_ADDRESS_PREFIX)

        self._unsubscribe()

    def test_get_events(self):
        """Tests that block commit events are properly received on block
        boundaries."""
        self._subscribe()

        self.batch_submitter.submit_next_batch()
        msg = self.stream.receive().result()
        self._unsubscribe()

        event_list = events_pb2.EventList()
        event_list.ParseFromString(msg.content)
        events = event_list.events
        block_commit_event = events[0]
        block_id = list(filter(
            lambda attr: attr.key == "block_id",
            block_commit_event.attributes))[0].value
        block_num = list(filter(
            lambda attr: attr.key == "block_num",
            block_commit_event.attributes))[0].value

        response = self._get_events(
            block_id,
            [events_pb2.EventSubscription(event_type="sawtooth/block-commit")])
        events = self.assert_events_get_response(response)
        self.assert_block_commit_event(events[0], block_num)

    def test_catchup(self):
        """Tests that a subscriber correctly receives catchup events."""
        self._subscribe()

        blocks = []
        for i in range(4):
            self.batch_submitter.submit_next_batch()
            msg = self.stream.receive().result()
            event_list = events_pb2.EventList()
            event_list.ParseFromString(msg.content)
            events = event_list.events
            block_commit_event = events[0]
            block_id = list(filter(
                lambda attr: attr.key == "block_id",
                block_commit_event.attributes))[0].value
            block_num = list(filter(
                lambda attr: attr.key == "block_num",
                block_commit_event.attributes))[0].value
            blocks.append((block_num, block_id))

        self._unsubscribe()

        self.assert_subscribe_response(
            self._subscribe(last_known_block_ids=[blocks[0][1]]))
        LOGGER.warning("Waiting for catchup events")
        for i in range(3):
            msg = self.stream.receive().result()
            LOGGER.warning("Got catchup events: ")
            event_list = events_pb2.EventList()
            event_list.ParseFromString(msg.content)
            events = event_list.events
            self.assertEqual(len(events), 2)
            block_commit_event = events[0]
            block_id = list(filter(
                lambda attr: attr.key == "block_id",
                block_commit_event.attributes))[0].value
            block_num = list(filter(
                lambda attr: attr.key == "block_num",
                block_commit_event.attributes))[0].value
            self.assertEqual((block_num, block_id), blocks[i + 1])

        self._unsubscribe()

    def test_receipt_stored(self):
        """Tests that receipts are stored successfully when a block is
        committed."""
        self._subscribe()
        n = self.batch_submitter.submit_next_batch()
        response = self._get_receipt(n)
        receipts = self.assert_receipt_get_response(response)
        state_change = receipts[0].state_changes[0]
        self.assertEqual(
            state_change.type,
            transaction_receipt_pb2.StateChange.SET)
        self.assertEqual(
            state_change.value,
            cbor.dumps({str(n): 0}))
        self.assertEqual(
            state_change.address,
            make_intkey_address(str(n)))
        self._unsubscribe()

    @classmethod
    def setUpClass(cls):
        wait_for_rest_apis(['rest-api:8008'])
        cls.batch_submitter = BatchSubmitter(WAIT)

    def setUp(self):
        self.url = "tcp://validator:4004"
        self.stream = Stream(self.url)

    def tearDown(self):
        if self.stream is not None:
            self.stream.close()

    def _get_receipt(self, num):
        txn_id = \
            self.batch_submitter.batches[num].transactions[0].header_signature
        request = client_receipt_pb2.ClientReceiptGetRequest(
            transaction_ids=[txn_id])
        response = self.stream.send(
            validator_pb2.Message.CLIENT_RECEIPT_GET_REQUEST,
            request.SerializeToString()).result()
        return response

    def _get_events(self, block_id, subscriptions):
        request = client_event_pb2.ClientEventsGetRequest(
            block_ids=[block_id],
            subscriptions=subscriptions)
        response = self.stream.send(
            validator_pb2.Message.CLIENT_EVENTS_GET_REQUEST,
            request.SerializeToString()).result()
        return response

    def _subscribe(self, subscriptions=None, last_known_block_ids=None):
        if subscriptions is None:
            subscriptions = [
                events_pb2.EventSubscription(
                    event_type="sawtooth/block-commit"),
                # Subscribe to the settings state events, to test genesis
                # catch-up.
                events_pb2.EventSubscription(
                    event_type="sawtooth/state-delta",
                    filters=[events_pb2.EventFilter(
                        key='address',
                        match_string='000000.*',
                        filter_type=events_pb2.EventFilter.REGEX_ANY)]),
                # Subscribe to the intkey state events, to test additional
                # events.
                events_pb2.EventSubscription(
                    event_type="sawtooth/state-delta",
                    filters=[events_pb2.EventFilter(
                        key='address',
                        match_string='{}.*'.format(INTKEY_ADDRESS_PREFIX),
                        filter_type=events_pb2.EventFilter.REGEX_ANY)]),
            ]
        if last_known_block_ids is None:
            last_known_block_ids = []
        request = client_event_pb2.ClientEventsSubscribeRequest(
            subscriptions=subscriptions,
            last_known_block_ids=last_known_block_ids)
        response = self.stream.send(
            validator_pb2.Message.CLIENT_EVENTS_SUBSCRIBE_REQUEST,
            request.SerializeToString()).result()
        return response

    def _unsubscribe(self):
        request = client_event_pb2.ClientEventsUnsubscribeRequest()
        response = self.stream.send(
            validator_pb2.Message.CLIENT_EVENTS_UNSUBSCRIBE_REQUEST,
            request.SerializeToString()).result()
        return response

    def assert_block_commit_event(self, event, block_num):
        self.assertEqual(event.event_type, "sawtooth/block-commit")
        self.assertTrue(
            all([
                any(attribute.key == "block_id"
                    for attribute in event.attributes),
                any(attribute.key == "block_num"
                    for attribute in event.attributes),
                any(attribute.key == "previous_block_id"
                    for attribute in event.attributes),
                any(attribute.key == "state_root_hash"
                    for attribute in event.attributes),
            ]))
        for attribute in event.attributes:
            if attribute.key == "block_num":
                self.assertEqual(attribute.value, str(block_num))

    def assert_receipt_get_response(self, msg):
        self.assertEqual(
            msg.message_type,
            validator_pb2.Message.CLIENT_RECEIPT_GET_RESPONSE)

        receipt_response = client_receipt_pb2.ClientReceiptGetResponse()
        receipt_response.ParseFromString(msg.content)

        self.assertEqual(
            receipt_response.status,
            client_receipt_pb2.ClientReceiptGetResponse.OK)

        return receipt_response.receipts

    def assert_state_event(self, event, address_prefix):
        self.assertEqual(event.event_type, "sawtooth/state-delta")
        state_change_list = transaction_receipt_pb2.StateChangeList()
        state_change_list.ParseFromString(event.data)
        for change in state_change_list.state_changes:
            self.assertTrue(change.address.startswith(address_prefix))

    def assert_events_get_response(self, msg):
        self.assertEqual(
            msg.message_type,
            validator_pb2.Message.CLIENT_EVENTS_GET_RESPONSE)

        events_response = client_event_pb2.ClientEventsGetResponse()
        events_response.ParseFromString(msg.content)

        self.assertEqual(
            events_response.status,
            client_event_pb2.ClientEventsGetResponse.OK)

        return events_response.events

    def assert_subscribe_response(self, msg):
        self.assertEqual(
            msg.message_type,
            validator_pb2.Message.CLIENT_EVENTS_SUBSCRIBE_RESPONSE)

        response = client_event_pb2.ClientEventsSubscribeResponse()
        response.ParseFromString(msg.content)

        self.assertEqual(
            response.status,
            client_event_pb2.ClientEventsSubscribeResponse.OK)

    def assert_unsubscribe_response(self, msg):
        self.assertEqual(
            msg.message_type,
            validator_pb2.Message.CLIENT_EVENTS_UNSUBSCRIBE_RESPONSE)

        response = client_event_pb2.ClientEventsUnsubscribeResponse()

        response.ParseFromString(msg.content)

        self.assertEqual(
            response.status,
            client_event_pb2.ClientEventsUnsubscribeResponse.OK)
Ejemplo n.º 39
0
class ZmqDriver(Driver):
    def __init__(self, engine):
        super().__init__(engine)
        self._engine = engine
        self._stream = None
        self._exit = False
        self._updates = None

    def start(self, endpoint):
        self._stream = Stream(endpoint)

        startup_state = self._register()

        self._updates = Queue()

        driver_thread = Thread(
            target=self._driver_loop)
        driver_thread.start()

        try:
            self._engine.start(
                self._updates,
                ZmqService(
                    stream=self._stream,
                    timeout=SERVICE_TIMEOUT,
                    name=self._engine.name(),
                    version=self._engine.version()),
                startup_state)
        except Exception:  # pylint: disable=broad-except
            LOGGER.exception("Uncaught engine exception")

        self.stop()
        driver_thread.join()

    def _driver_loop(self):
        try:
            future = self._stream.receive()
            while True:
                if self._exit:
                    self._engine.stop()
                    break

                try:
                    message = future.result(1)
                    future = self._stream.receive()
                except concurrent.futures.TimeoutError:
                    continue

                result = self._process(message)

                self._updates.put(result)
        except Exception:  # pylint: disable=broad-except
            LOGGER.exception("Uncaught driver exception")

    def stop(self):
        self._exit = True
        self._engine.stop()
        self._stream.close()

    def _register(self):
        self._stream.wait_for_ready()

        request = consensus_pb2.ConsensusRegisterRequest(
            name=self._engine.name(),
            version=self._engine.version(),
        ).SerializeToString()

        while True:
            future = self._stream.send(
                message_type=Message.CONSENSUS_REGISTER_REQUEST,
                content=request)
            response = consensus_pb2.ConsensusRegisterResponse()
            response.ParseFromString(future.result(REGISTER_TIMEOUT).content)

            if (
                response.status
                == consensus_pb2.ConsensusRegisterResponse.NOT_READY
            ):
                continue

            if response.status == consensus_pb2.ConsensusRegisterResponse.OK:
                return StartupState(
                    response.chain_head,
                    response.peers,
                    response.local_peer_info)

            raise exceptions.ReceiveError(
                'Registration failed with status {}'.format(response.status))

    def _process(self, message):
        type_tag = message.message_type

        if type_tag == Message.CONSENSUS_NOTIFY_PEER_CONNECTED:
            notification = consensus_pb2.ConsensusNotifyPeerConnected()
            notification.ParseFromString(message.content)

            data = notification.peer_info

        elif type_tag == Message.CONSENSUS_NOTIFY_PEER_DISCONNECTED:
            notification = consensus_pb2.ConsensusNotifyPeerDisconnected()
            notification.ParseFromString(message.content)

            data = notification.peer_id

        elif type_tag == Message.CONSENSUS_NOTIFY_PEER_MESSAGE:
            notification = consensus_pb2.ConsensusNotifyPeerMessage()
            notification.ParseFromString(message.content)

            data = notification.message, notification.sender_id

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_NEW:
            notification = consensus_pb2.ConsensusNotifyBlockNew()
            notification.ParseFromString(message.content)

            data = notification.block

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_VALID:
            notification = consensus_pb2.ConsensusNotifyBlockValid()
            notification.ParseFromString(message.content)

            data = notification.block_id

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_INVALID:
            notification = consensus_pb2.ConsensusNotifyBlockInvalid()
            notification.ParseFromString(message.content)

            data = notification.block_id

        elif type_tag == Message.CONSENSUS_NOTIFY_BLOCK_COMMIT:
            notification = consensus_pb2.ConsensusNotifyBlockCommit()
            notification.ParseFromString(message.content)

            data = notification.block_id

        else:
            raise exceptions.ReceiveError(
                'Received unexpected message type: {}'.format(type_tag))

        self._stream.send_back(
            message_type=Message.CONSENSUS_NOTIFY_ACK,
            correlation_id=message.correlation_id,
            content=consensus_pb2.ConsensusNotifyAck().SerializeToString())

        return type_tag, data
Ejemplo n.º 40
0
 def __init__(self, url):
     self._stream = Stream(url)
     self._url = url
     self._handlers = []
Ejemplo n.º 41
0
class Subscriber(object):
    """Creates an object that can subscribe to state delta events using the
    Sawtooth SDK's Stream class. Handler functions can be added prior to
    subscribing, and each will be called on each delta event received.
    """
    def __init__(self, validator_url):
        LOGGER.info('Connecting to validator: %s', validator_url)
        self._stream = Stream(validator_url)
        self._event_handlers = []
        self._is_active = False

    def add_handler(self, handler):
        """Adds a handler which will be passed state delta events when they
        occur. Note that this event is mutable.
        """
        self._event_handlers.append(handler)

    def clear_handlers(self):
        """Clears any delta handlers.
        """
        self._event_handlers = []

    def start(self, known_ids=None):
        """Subscribes to state delta events, and then waits to receive deltas.
        Sends any events received to delta handlers.
        """
        if not known_ids:
            known_ids = [NULL_BLOCK_ID]

        self._stream.wait_for_ready()
        LOGGER.debug('Subscribing to state delta events')

        block_sub = EventSubscription(event_type='sawtooth/block-commit')
        delta_sub = EventSubscription(
            event_type='sawtooth/state-delta',
            filters=[EventFilter(
                key='address',
                match_string='^{}.*'.format(NAMESPACE),
                filter_type=EventFilter.REGEX_ANY)])

        request = ClientEventsSubscribeRequest(
            last_known_block_ids=known_ids,
            subscriptions=[block_sub, delta_sub])
        response_future = self._stream.send(
            Message.CLIENT_EVENTS_SUBSCRIBE_REQUEST,
            request.SerializeToString())
        response = ClientEventsSubscribeResponse()
        response.ParseFromString(response_future.result().content)

        # Forked all the way back to genesis, restart with no known_ids
        if (response.status == ClientEventsSubscribeResponse.UNKNOWN_BLOCK
                and known_ids):
            self.start()

        if response.status != ClientEventsSubscribeResponse.OK:
            raise RuntimeError(
                'Subscription failed with status: {}'.format(
                    ClientEventsSubscribeResponse.Status.Name(
                        response.status)))

        self._is_active = True

        LOGGER.debug('Successfully subscribed to state delta events')
        while self._is_active:
            message_future = self._stream.receive()

            event_list = EventList()
            event_list.ParseFromString(message_future.result().content)
            for handler in self._event_handlers:
                handler(event_list.events)

    def stop(self):
        """Stops the Subscriber, unsubscribing from state delta events and
        closing the the stream's connection.
        """
        self._is_active = False

        LOGGER.debug('Unsubscribing from state delta events')
        request = ClientEventsUnsubscribeRequest()
        response_future = self._stream.send(
            Message.CLIENT_EVENTS_UNSUBSCRIBE_REQUEST,
            request.SerializeToString())
        response = ClientEventsUnsubscribeResponse()
        response.ParseFromString(response_future.result().content)

        if response.status != ClientEventsUnsubscribeResponse.OK:
            LOGGER.warning(
                'Failed to unsubscribe with status: %s',
                ClientEventsUnsubscribeResponse.Status.Name(response.status))

        self._stream.close()