def setup_worker():
    """Set up Dramatiq with RabbitMQ.

    Dramatiq manages the message passing to background workers which run
    long tasks to avoid stalling the application responses for too long.

    """
    logger.info('creating tasks queue')
    broker = RabbitmqBroker(url=f'{settings.RABBITMQ_URL}')
    dramatiq.set_broker(broker)
def create_projections():
    """Create repository.

    It works as an interface between application and persisted data for
    visualization with relevant information for queries (read-model).

    Other projections could be set here.

    """
    logger.info('creating read model projections')
    return TranslationProjections(PostgresTranslation())
def create_repository():
    """Create repository.

    It works as an interface between application and persisted data for
    the event sourcing design (write-model).

    Other repositories could be set here.

    """
    logger.info('creating event sourced repositories')
    return AggregatesRepository(Translation, PostgresEventStore())
Exemplo n.º 4
0
def setup_worker():
    """Set up Dramatiq with Redis as a queue.

    Dramatiq manages the message passing to background workers which run
    long tasks to avoid stalling the application responses for too long.

    Redis is used as a message queue for simplicity sake. A more robust
    infrastructure could configure a RabbitMQ here, for example.

    """
    logger.info('creating tasks queue')
    broker = RedisBroker(url=f'{settings.REDIS_URL}')
    dramatiq.set_broker(broker)
Exemplo n.º 5
0
    def trigger(self, event):
        """Trigger an event action to the aggregate.

        It uses the `singledispatch` decorator to decide which action
        to take based on the translation Event class input.

        Args:
            event (Event): An event.

        """

        @singledispatch
        def _trigger(_event):
            raise InvalidEventError(f'Invalid event: {_event}')

        @_trigger.register(TranslationRequested)
        def _(_event):
            if self.status != '':
                raise InvalidStatusError(
                    f'Invalid status transition: {self.status}->requested')
            self.text = _event.text
            self.status = 'requested'

        @_trigger.register(TranslationPending)
        def _(_event):
            if self.status != 'requested':
                raise InvalidStatusError(
                    f'Invalid status transition: {self.status}->pending')
            self.translation_id = _event.translation_id
            self.status = 'pending'

        @_trigger.register(TranslationFinished)
        def _(_event):
            if self.status != 'pending':
                raise InvalidStatusError(
                    f'Invalid status transition: {self.status}->finished')
            self.translated_text = _event.translated_text
            self.status = 'finished'
            self.finished = True

        @_trigger.register(TranslationAborted)
        def _(_event):
            self.error = _event.error
            self.status = 'aborted'
            self.finished = True

        logger.info(f'applying event ({event.id}) on aggregate: {self.id}')
        logger.debug(event.as_dict())

        _trigger(event)
    def save(self, aggregate):
        """Save an aggregate into the event store.

        Args:
            aggregate (Aggregate): Aggregate which attributes will be
                passed as arguments to the persisted data model to
                append events to the stream.

        """
        logger.info(f'saving aggregate: {aggregate.id}...')
        with session_scope() as session:
            self._event_store.append_to_stream(session, aggregate.id,
                                               aggregate.version,
                                               aggregate.changes)
Exemplo n.º 7
0
    def create(cls, text):
        """Create a new translation.

        Args:
            text (str): The text to send tp the translation service.

        Returns:
            Translation: The new translation.

        """
        logger.info(f'creating aggregate')
        event = TranslationRequested.create(text)
        instance = super().create([event])
        instance.finished = False
        return instance
def create_app():
    """Create Flask application and initialize Flask extensions.

    This application reads configuration from environment variables
    using FlaskDynaconf extension.
    It uses the Bootstrap extension for the frontend and also for the
    frontend (dynamic updates), an event server is set with the
    Flask-SSE blueprint.

    """
    logger.info('starting application')
    app = Flask(__name__)
    Bootstrap(app)
    FlaskDynaconf(app)

    return app
def setup_database():
    """Set up database and create tables.

    For this application purpose, the database is running alongside with
    the application containers and a new schema is created at every
    startup.

    In production, a hosted database should be used and creating the
    whole schema and all tables would be unnecessary.

    """
    logger.info('creating databases')
    db = create_db()
    # Comment the line below if not using a container'ized database.
    create_tables(db)
    return db
Exemplo n.º 10
0
    def update(self, aggregate_uuid):
        """Update a translation from the aggregates repository.

        Args:
            aggregate_uuid (str): An UUID4 string to which aggregate
                projection should be update.

        """
        translation = self._repository.get(aggregate_uuid)

        status = translation.status
        text = translation.text
        if status == 'finished':
            translated_text = translation.translated_text
        else:
            translated_text = None

        logger.info(f'updating projection: {aggregate_uuid}')
        with session_scope() as session:
            self._read_model.insert_or_update(session, aggregate_uuid, status,
                                              text, translated_text)
    def get(self, aggregate_uuid):
        """Get an aggregate from the event store.

        Given an UUID, it loads an event stream and re-constructs each
        event row into an Event object given its class.

        Then it constructs an aggregate with the given UUID and the
        event stream version, applying to it the re-constructed events.

        This method is responsible for both managing the persistence
        session and queries as well as translating data to domain
        objects.

        Args:
            aggregate_uuid (str): An UUID4 string. The aggregate from
                which query the event stream.

        Returns:
            Aggregate: In fact, it instantiates an Aggregate of its
                `self._aggregate_cls` with applied events from the
                stream.

        """
        logger.info(f'loading aggregate: {aggregate_uuid}...')
        with session_scope() as session:
            events_stream = self._event_store.load_stream(
                session, aggregate_uuid)
            stream_events = self._event_row_to_object(events_stream.events)

            aggregate = self._aggregate_cls(aggregate_uuid,
                                            events_stream.version)

            for event in stream_events:
                aggregate.apply(event)

        return aggregate
Exemplo n.º 12
0
    def append_to_stream(self, session, aggregate_uuid, expected_version,
                         events):
        """Append an event stream from an aggregate.

        Args:
            session (Session): A session context into which operate.
            aggregate_uuid (str): An UUID4 string. The aggregate from
                which query the events.
            expected_version (int): Version for optimistic lock. Before
                appending an event to an event stream, its aggregate
                must be loaded to avoid inconsistencies. Then, the
                `expected_version` is set and verified to make sure no
                other session will be able to alter the same aggregate
                at the same time.
            events ([Event]): List of events to append to the event
                stream. They will be converted to a dict formatted as a
                JSON before the operation.

        """
        if expected_version:
            # If an `expected_version` is given, the aggregate must be
            # updated as it is already in some version.
            sql = text(f"UPDATE aggregates "
                       f"SET version = :expected_version + 1 "
                       f"WHERE version = :expected_version "
                       f"AND uuid = :aggregate_uuid")
            values = {
                'expected_version': expected_version,
                'aggregate_uuid': aggregate_uuid
            }

            logger.debug(sql, values)
            result = session.execute(sql, values)

            if result.rowcount != 1:
                raise ConcurrencyError(
                    'Failed to update aggregate in database.')

        else:
            # Or else it's a new aggregate.
            sql = text(f"INSERT INTO aggregates (uuid, version) "
                       f"VALUES (:aggregate_uuid, 1)")
            values = {'aggregate_uuid': aggregate_uuid}

            logger.debug(sql)
            result = session.execute(sql, values)

            if result.rowcount != 1:
                raise WriteError('Failed to insert aggregate into database.')

        for event in events:
            # Iterate through events and trying to add them to "events" table,
            # relating each and every one with the same aggregate. But, in the
            # occasion an event is already there (based on its UUID column),
            # it's dropped (`DO NOTHING`).

            sql = text(
                f"INSERT INTO events (uuid, aggregate_uuid, event, data) "
                f"VALUES (:uuid,:aggregate_uuid,:event,:data) ON CONFLICT (uuid) DO NOTHING"
            )
            values = {
                'uuid': event.id,
                'aggregate_uuid': aggregate_uuid,
                'event': event.__class__.__name__,
                'data': json.dumps(event.as_dict())
            }

            result = session.execute(sql, values)

            logger.debug(sql)
            if result.rowcount:
                logger.info(f'new event: {event.id}')
            else:
                logger.debug(f'no new event')