def bootstrap( start_orm: bool = True, uwo: unit_of_work.AbstractUnitOfWork = unit_of_work.SqlAlchemyUnitOfWork(), notifications: notifications.AbstractNotifications = notifications. EmailNotification(), publish: Callable = redis_eventpublisher.publish, ) -> messagebus.MessageBus: if start_orm: orm.start_mappers() dependencies = {'uow': uow, 'send_mail': send_mail, 'publish': publish} injected_event_handlers = { event_type: [ inject_dependencies(handler, dependencies) for handler in event_handlers ] for event_type, event_handlers in handlers.EVENT_HANDLERS.items() } injected_command_hanlers = { command_type: inject_dependencies(handler, dependencies) for command_type, handler in handlers.COMMAND_HANLDERS.items() } return messagebus.MessageBus( uow=uow, event_handlers=injected_event_handlers, command_handlers=injected_command_hanlers, )
def bootstrap( start_orm: bool = True, uow: unit_of_work.AbstractUnitOfWork = unit_of_work.SqlAlchemyUnitOfWork(), notifications: AbstractNotifications = None, publish: Callable = redis_eventpublisher.publish, ) -> messagebus.MessageBus: if notifications is None: notifications = EmailNotifications() if start_orm: orm.start_mappers() dependencies = { "uow": uow, "notifications": notifications, "publish": publish } injected_event_handlers = { event_type: [ inject_dependices(handler, dependencies) for handler in event_handlers ] for event_type, event_handlers in handlers.EVENT_HANDLERS.items() } injected_event_commands = { command_type: inject_dependices(handler, dependencies) for command_type, handler in handlers.COMMAND_HANDLERS.items() } return messagebus.MessageBus( uow=uow, event_handlers=injected_event_handlers, command_handlers=injected_event_commands, )
def test_rolls_back_uncommitted_work_by_default(sqlite_session_factory): uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) with uow: insert_batch(uow.session, "batch1", "MEDIUM-PLINTH", 100, None) new_session = sqlite_session_factory() rows = list(new_session.execute('SELECT * FROM "batches"')) assert rows == []
def add_batch(): eta = request.json['eta'] if eta is not None: eta = datetime.fromisoformat(eta).date() services.add_batch(request.json['ref'], request.json['sku'], request.json['qty'], eta, unit_of_work.SqlAlchemyUnitOfWork()) return 'OK', 201
def bus(sqlite_session_factory): bus = bootstrap.bootstrap( start_orm=True, uow=unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory), notifications=notifications.EmailNotifications(), publish=lambda *args: None, ) yield bus clear_mappers()
def sqlite_bus(sqlite_session_factory): bus = bootstrap.bootstrap( start_orm=True, uow=unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory), send_mail=lambda *args: None, publish=lambda *args: None, ) yield bus clear_mappers()
def allocate_endpoint(): try: batchref = services.allocate(request.json['orderid'], request.json['sku'], request.json['qty'], unit_of_work.SqlAlchemyUnitOfWork()) except (model.OutOfStock, services.InvalidSku) as e: return jsonify({'message': str(e)}), 400 return jsonify({'batchref': batchref}), 201
def try_to_allocate(orderid, sku, exceptions): line = models.OrderLine(orderid, sku, 10) try: with unit_of_work.SqlAlchemyUnitOfWork() as uow: product = uow.products.get(sku=sku) product.allocate(line) time.sleep(0.2) uow.commit() except Exception as e: exceptions.append(e)
def test_rolls_back_on_error(sqlite_session_factory): class MyException(Exception): pass uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) with pytest.raises(MyException): with uow: insert_batch(uow.session, "batch1", "LARGE-FORK", 100, None) raise MyException() new_session = sqlite_session_factory() rows = list(new_session.execute('SELECT * FROM "batches"')) assert rows == []
def test_uow_can_retrieve_a_batch_and_allocate_to_it(sqlite_session_factory): session = sqlite_session_factory() insert_batch(session, "batch1", "HIPSTER-WORKBENCH", 100, None) session.commit() uow = unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory) with uow: product = uow.products.get("HIPSTER-WORKBENCH") line = models.OrderLine("o1", "HIPSTER-WORKBENCH", 10) product.allocate(line) uow.commit() batchref = get_allocated_batch_ref(session, "o1", "HIPSTER-WORKBENCH") assert batchref == "batch1"
def test_uow_can_retrieve_a_batch_and_allocate_to_it(session_factory): session = session_factory() insert_batch(session, 'batch1', 'HIPSTER-WORKBENCH', 100, None) session.commit() uow = unit_of_work.SqlAlchemyUnitOfWork(session_factory) with uow: batch = uow.batches.get(reference='batch1') line = model.OrderLine('o1', 'HIPSTER-WORKBENCH', 10) batch.allocate(line) uow.commit() batchref = get_allocated_batch_ref(session, 'o1', 'HIPSTER-WORKBENCH') assert batchref == 'batch1'
def test_concurrent_updates_to_version_are_not_allowed(postgres_session_factory): sku, batch = random_sku(), random_batchref() session = postgres_session_factory() insert_batch(session, batch, sku, 100, eta=None, product_version=1) session.commit() order1, order2 = random_orderid(1), random_orderid(2) exceptions = [] try_to_allocate_order1 = lambda: try_to_allocate(order1, sku, exceptions) try_to_allocate_order2 = lambda: try_to_allocate(order2, sku, exceptions) thread1 = threading.Thread(target=try_to_allocate_order1) thread2 = threading.Thread(target=try_to_allocate_order2) thread1.start() thread2.start() thread1.join() thread2.join() [[version]] = session.execute( "SELECT version_number FROM products WHERE sku=:sku", dict(sku=sku), ) assert version == 2 [exception] = exceptions assert "could not serialize access due to concurrent update" in str(exception) orders = list( session.execute( "SELECT orderid FROM allocations" " JOIN batches ON allocations.batch_id = batches.id" " JOIN order_lines ON allocations.orderline_id = order_lines.id" " WHERE order_lines.sku=:sku", dict(sku=sku), ) ) assert len(orders) == 1 with unit_of_work.SqlAlchemyUnitOfWork() as uow: uow.session.execute("select 1")
def allocations_view_endpoint(orderid): uow = unit_of_work.SqlAlchemyUnitOfWork() result = views.allocations(orderid, uow) if not result: return 'not found', 404 return jsonify(result), 200
def handle_change_batch_quantity(m): logging.debug('Handling %s', m) data = json.loads(m['data']) cmd = commands.ChangeBatchQuantity(ref=data['batchref'], qty=data['qty']) messagebus.handle(cmd, uow=unit_of_work.SqlAlchemyUnitOfWork())