async def test_session_extra_labware(main_router, get_labware_fixture, virtual_smoothie_env): proto = ''' metadata = {"apiLevel": "2.0"} def run(ctx): tr = ctx.load_labware("fixture_12_trough", "1") tiprack = ctx.load_labware("opentrons_96_tiprack_300ul", "2") instr = ctx.load_instrument("p300_single", "right", tip_racks=[tiprack]) instr.pick_up_tip() instr.aspirate(300, tr["A1"]) ''' extra_labware = [ get_labware_fixture('fixture_12_trough') ] session = main_router.session_manager.create_with_extra_labware( name='<blank>', contents=proto, extra_labware=extra_labware) assert not session.errors session_conts = session.get_containers() for lw in extra_labware: assert lw['parameters']['loadName'] in [ c.name for c in session_conts] session.run() assert not session.errors with pytest.raises(ExceptionInProtocolError): main_router.session_manager.create( name='<blank>', contents=proto)
async def test_load_and_run_v2(main_router, protocol, protocol_file, loop): session = main_router.session_manager.create(name='<blank>', contents=protocol.text) assert main_router.notifications.queue.qsize() == 1 assert session.state == 'loaded' assert session.command_log == {} def run(): session.run() await loop.run_in_executor(executor=None, func=run) assert session.command_log old_log = copy.deepcopy(session.command_log) res = [] index = 0 async for notification in main_router.notifications: payload = notification['payload'] index += 1 # Command log in sync with add-command events emitted if type(payload) is dict: state = payload.get('state') else: state = payload.state res.append(state) if state == 'finished': break assert [key for key, _ in itertools.groupby(res)] == \ ['loaded', 'running', 'finished'] assert main_router.notifications.queue.qsize() == 0,\ 'Notification should be empty after receiving "finished" event' session.run() assert len(session.command_log) == len(old_log) assert session.protocol_text == protocol.text
async def test_session_run_concurrently( main_router, get_labware_fixture, virtual_smoothie_env): """This test proves that we are not able to start a protocol run while one is active. This cross boundaries into the RPC because it emulates how the RPC server handles requests. It uses a thread executor with two threads. This test was added to prove that there's a deadlock if a protocol with a pause is started twice. """ # Create a protocol that does nothing but pause. proto = ''' metadata = {"apiLevel": "2.0"} def run(ctx): ctx.pause() ''' session = main_router.session_manager.create_with_extra_labware( name='<blank>', contents=proto, extra_labware=[] ) from concurrent.futures import ThreadPoolExecutor, as_completed from time import sleep def run_while_running(): """The entry point to threads that try to run while a protocol is running""" with pytest.raises(ThreadedAsyncForbidden): session.run() # Do this twice to prove we run again after completion. for _ in range(2): # Use two as the max workers, just like RPC. max_workers = 2 with ThreadPoolExecutor(max_workers=max_workers) as m: tasks = list() # Start the run. tasks.append(m.submit(lambda: session.run())) # Try to start running the protocol a whole bunch of times. for _ in range(max_workers * 5): tasks.append(m.submit(run_while_running)) # wait to enter pause sleep(0.05) # Now resume tasks.append(m.submit(lambda: session.resume())) for future in as_completed(tasks): future.result()
def run(): session.run()
def run_while_running(): """The entry point to threads that try to run while a protocol is running""" with pytest.raises(ThreadedAsyncForbidden): session.run()