def bootstrap( start_orm: bool = True, uow: unit_of_work.AbstractUnitOfWork = unit_of_work.SqlAlchemyUnitOfWork(), notifications: AbstractNotifications = None, publish: Callable = redis_eventpublisher.publish, ) -> messagebus.MessageBus: if notifications is None: notifications = EmailNotifications() if start_orm: orm.start_mappers() dependencies = { 'uow': uow, 'notifications': notifications, 'publish': publish } injected_event_handlers = { event_type: [ inject_dependencies(handler, dependencies) for handler in event_handlers ] for event_type, event_handlers in handlers.EVENT_HANDLERS.items() } injected_command_handlers = { command_type: inject_dependencies(handler, dependencies) for command_type, handler in handlers.COMMAND_HANDLERS.items() } return messagebus.MessageBus( uow=uow, event_handlers=injected_event_handlers, command_handlers=injected_command_handlers, )
def test_rools_back_uncommitted_work_by_default(session_factory): uow = unit_of_work.SqlAlchemyUnitOfWork(session_factory) with uow: insert_batch(uow.session, 'batch1', 'MEDIUM-PLINTH', 100, None) new_session = session_factory() rows = list(new_session.execute('SELECT * FROM "batches"')) assert rows == []
def add_batch(): uow = unit_of_work.SqlAlchemyUnitOfWork() eta = request.json['eta'] if eta is not None: eta = datetime.fromisoformat(eta).date() services.add_batch(request.json['ref'], request.json['sku'], request.json['qty'], eta, uow) return 'OK', 201
def sqlite_bus(sqlite_session_factory): bus = bootstrap.bootstrap( start_orm=True, uow=unit_of_work.SqlAlchemyUnitOfWork(sqlite_session_factory), notifications=mock.Mock(), publish=lambda *args: None, ) yield bus clear_mappers()
def test_rolls_back_uncommitted_work_by_default(postgres_session): batch_ref = random_batchref("2021") sku = random_sku("MEDIUM-PLINTH") uow = unit_of_work.SqlAlchemyUnitOfWork() with uow: insert_batch(uow.session, batch_ref, sku, 100, None) new_session = postgres_session rows = list(new_session.execute("SELECT * FROM batches WHERE SKU = 'sku'")) assert rows == []
def allocate_endpoint(): # return jsonify({'message': 'dentro.. al enpoint'}) uow = unit_of_work.SqlAlchemyUnitOfWork() try: batchref = services.allocate(request.json['orderid'], request.json['sku'], request.json['qty'], uow) # batchref=request.json['orderid']+"lalalla" except (model.OutOfStock, services.InvalidSku) as e: return jsonify({'message': str(e)}), 400 return jsonify({'batchref': batchref}), 201
def try_to_allocate(orderid, sku, exceptions, session_factory): line = model.OrderLine(orderid, sku, 10) try: with unit_of_work.SqlAlchemyUnitOfWork(session_factory) as uow: product = uow.products.get(sku=sku) product.allocate(line) time.sleep(0.2) uow.commit() except Exception as e: # pylint: disable=broad-except print(traceback.format_exc()) exceptions.append(e)
def test_rolls_back_on_error(session_factory): class MyException(Exception): pass uow = unit_of_work.SqlAlchemyUnitOfWork(session_factory) with pytest.raises(MyException): with uow: insert_batch(uow.session, 'batch1', 'LARGE-FORK', 100, None) raise MyException() new_session = session_factory() rows = list(new_session.execute('SELECT * FROM "batches"')) assert rows == []
def test_uow_can_retrieve_a_batch_and_allocate_to_it(session_factory): session = session_factory() insert_batch(session, 'batch1', 'HIPSTER-WORKBENCH', 100, None) session.commit() uow = unit_of_work.SqlAlchemyUnitOfWork(session_factory) with uow: batch = uow.batches.get(reference='batch1') line = model.OrderLine('o1', 'HIPSTER-WORKBENCH', 10) batch.allocate(line) uow.commit() batchref = get_allocated_batch_ref(session, 'o1', 'HIPSTER-WORKBENCH') assert batchref == 'batch1'
def test_uow_can_retrieve_a_batch_and_allocate_to_it(postgres_session): batch_ref = random_batchref("2020") orderline_id = random_orderid("2020") sku = random_sku("HIPSTER-WORKBENCH") session = postgres_session insert_batch(session, batch_ref, sku, 100, None) session.commit() uow = unit_of_work.SqlAlchemyUnitOfWork() with uow: product = uow.products.get(sku=sku) line = model.OrderLine(orderline_id, sku, 10) product.allocate(line) uow.commit() batchref = get_allocated_batch_ref(session, orderline_id, sku) assert batchref == batch_ref
def test_rolls_back_on_error(postgres_session): batch_ref = random_batchref("2022") sku = random_sku("LARGE-FORK") class MyException(Exception): pass uow = unit_of_work.SqlAlchemyUnitOfWork() with pytest.raises(MyException): with uow: insert_batch(uow.session, batch_ref, sku, 100, None) raise MyException() new_session = postgres_session rows = list( new_session.execute("SELECT * FROM batches where sku='" + sku + "'")) assert rows == []
def test_concurrent_updates_to_version_are_not_allowed( postgres_session_factory): sku, batch = random_sku(), random_batchref() session = postgres_session_factory() insert_batch(session, batch, sku, 100, eta=None, product_version=1) session.commit() order1, order2 = random_orderid(1), random_orderid(2) exceptions = [] # type: List[Exception] try_to_allocate_order1 = lambda: try_to_allocate(order1, sku, exceptions, postgres_session_factory) try_to_allocate_order2 = lambda: try_to_allocate(order2, sku, exceptions, postgres_session_factory) thread1 = threading.Thread(target=try_to_allocate_order1) thread2 = threading.Thread(target=try_to_allocate_order2) thread1.start() thread2.start() thread1.join() thread2.join() [[version]] = session.execute( "SELECT version_number FROM products WHERE sku=:sku", dict(sku=sku), ) assert version == 2 [exception] = exceptions assert 'could not serialize access due to concurrent update' in str( exception) orders = list( session.execute( "SELECT orderid FROM allocations" " JOIN batches ON allocations.batch_id = batches.id" " JOIN order_lines ON allocations.orderline_id = order_lines.id" " WHERE order_lines.sku=:sku", dict(sku=sku), )) assert len(orders) == 1 with unit_of_work.SqlAlchemyUnitOfWork(postgres_session_factory) as uow: uow.session.execute('select 1')