def test_override_worker_config(sync_service, mock_docker): """Execute workflow with modified worker configuration.""" # Create workflow. with sync_service() as api: workflow_id = create_workflow(api, source=TEMPLATE_DIR, specfile=TEMPLATE_HELLOWORLD) user_id = create_user(api) # Start new run with default workers. with sync_service(user_id=user_id) as api: group_id = create_group(api, workflow_id) run_id = start_run(api, group_id, arguments=dict()) with sync_service(user_id=user_id) as api: r = api.runs().get_run(run_id) assert r['state'] == state.STATE_ERROR # Start new run with modified workers. worker_config = {'workers': {'test': Docker()}} with sync_service(user_id=user_id) as api: group_id = create_group(api, workflow_id) run_id = start_run(api, group_id, arguments=dict(), config=worker_config) with sync_service(user_id=user_id) as api: r = api.runs().get_run(run_id) assert r['state'] == state.STATE_SUCCESS
def test_list_group_files_local(local_service, hello_world): """Test getting a listing of uploaded files for a workflow group.""" # -- Setup ---------------------------------------------------------------- # # Upload two files for a workflow group. with local_service() as api: user_id = create_user(api) workflow = hello_world(api, name='W1') workflow_id = workflow.workflow_id with local_service(user_id=user_id) as api: group_id = create_group(api, workflow_id=workflow_id) for i in range(2): upload_file(api=api, group_id=group_id, file=io_file(data={ 'group': 1, 'file': i })) # -- Get file listing ----------------------------------------------------- with local_service(user_id=user_id) as api: files = api.uploads().list_uploaded_files(group_id=group_id) serialize.validate_file_listing(files, 2) # -- Error when listing files as unknonw user ----------------------------- with local_service(user_id='UNKNOWN') as api: with pytest.raises(err.UnauthorizedAccessError): api.uploads().list_uploaded_files(group_id=group_id)
def test_run_helloworld_sync(sync_service, specfile, state): """Execute the helloworld example.""" # -- Setup ---------------------------------------------------------------- # # Start a new run for the workflow template. with sync_service() as api: workflow_id = create_workflow(api, source=TEMPLATE_DIR, specfile=specfile) user_id = create_user(api) with sync_service(user_id=user_id) as api: group_id = create_group(api, workflow_id) names = io_file(data=['Alice', 'Bob'], format='plain/text') file_id = upload_file(api, group_id, names) args = [ serialize_arg('names', serialize_fh(file_id, 'data/names.txt')), serialize_arg('sleeptime', 3) ] run_id = start_run(api, group_id, arguments=args) # -- Validate the run handle against the expected state ------------------- with sync_service(user_id=user_id) as api: r = api.runs().get_run(run_id) serialize.validate_run_handle(r, state=state) if state == st.STATE_SUCCESS: # The run should have the greetings.txt file as a result. files = dict() for obj in r['files']: files[obj['name']] = obj['id'] assert len(files) == 1 fh = api.runs().get_result_file( run_id=run_id, file_id=files['results/greetings.txt']) value = fh.open().read().decode('utf-8').strip() assert 'Hello Alice!' in value assert 'Hello Bob!' in value
def test_cancel_run_helloworld(async_service): """Test cancelling a helloworld run.""" # -- Setup ---------------------------------------------------------------- # # Start a new run for the workflow template. with async_service() as api: workflow_id = create_workflow(api, source=BENCHMARK_DIR) user_id = create_user(api) with async_service(user_id=user_id) as api: group_id = create_group(api, workflow_id) names = io_file(data=['Alice', 'Bob', 'Zoe'], format='plain/text') file_id = upload_file(api, group_id, names) args = [ serialize_arg('names', serialize_fh(file_id)), serialize_arg('sleeptime', 10), serialize_arg('greeting', 'Hi') ] run_id = start_run(api, group_id, arguments=args) # Poll run after sleeping for one second. time.sleep(1) with async_service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) assert run['state'] in st.ACTIVE_STATES # -- Cancel the active run ------------------------------------------------ with async_service(user_id=user_id) as api: run = api.runs().cancel_run( run_id=run_id, reason='done' ) assert run['state'] == st.STATE_CANCELED assert run['messages'][0] == 'done' with async_service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) assert run['state'] == st.STATE_CANCELED assert run['messages'][0] == 'done'
def test_delete_runs_local(local_service, hello_world): """Test deleting runs.""" # -- Setup ---------------------------------------------------------------- # # Start two runs for a group of the 'Hello World' workflow. The first run # is active and the second run canceled. with local_service() as api: user_1 = create_user(api) user_2 = create_user(api) workflow_id = hello_world(api).workflow_id with local_service(user_id=user_1) as api: group_id = create_group(api, workflow_id=workflow_id) run_1, _ = start_hello_world(api, group_id) run_2, _ = start_hello_world(api, group_id) api.runs().cancel_run(run_id=run_2) # -- Ensure that we cannot delete an active run --------------------------- with local_service(user_id=user_1) as api: with pytest.raises(err.InvalidRunStateError): api.runs().delete_run(run_id=run_1) r = api.runs().list_runs(group_id=group_id, state=STATE_PENDING) assert len(r['runs']) == 1 # -- Ensure that user 2 cannot delete run 1 ------------------------------- with local_service(user_id=user_2) as api: with pytest.raises(err.UnauthorizedAccessError): api.runs().delete_run(run_id=run_2) # -- Delete run 1 --------------------------------------------------------- with local_service(user_id=user_1) as api: api.runs().delete_run(run_id=run_2) r = api.runs().list_runs(group_id=group_id) # The active run is not affected. assert len(r['runs']) == 1 # -- Error when deleting an unknown run ----------------------------------- with local_service(user_id=user_1) as api: with pytest.raises(err.UnknownRunError): api.runs().delete_run(run_id=run_2)
def test_create_run_local(local_service, hello_world): """Test life cycle for successful run using the local service.""" # -- Setup ---------------------------------------------------------------- # # Start a new run for a group of the 'Hello World' workflow and set it into # success state. tmpdir = tempfile.mkdtemp() with local_service() as api: user_1 = create_user(api) user_2 = create_user(api) workflow_id = hello_world(api).workflow_id with local_service(user_id=user_1) as api: group_id = create_group(api, workflow_id=workflow_id, users=[user_1]) run_id, file_id = start_hello_world(api, group_id) result = {'group': group_id, 'run': run_id} write_results(rundir=tmpdir, files=[(result, None, 'results/data.json'), ([group_id, run_id], 'txt/plain', 'values.txt')]) api.runs().update_run(run_id=run_id, state=api.runs().backend.success( run_id, files=['results/data.json', 'values.txt']), rundir=tmpdir) assert not os.path.exists(tmpdir) # -- Validate run handle -------------------------------------------------- with local_service(user_id=user_1) as api: r = api.runs().get_run(run_id=run_id) serialize.validate_run_handle(r, st.STATE_SUCCESS) assert is_fh(r['arguments'][0]['value']) # -- Error when non-member attempts to access run ------------------------- with local_service(user_id=user_2) as api: with pytest.raises(err.UnauthorizedAccessError): api.runs().get_run(run_id=run_id)
def test_cancel_remote_workflow(tmpdir): """Cancel the execution of a remote workflow.""" # -- Setup ---------------------------------------------------------------- # env = Config().basedir(tmpdir) engine = RemoteTestController(client=RemoteTestClient(runcount=100), poll_interval=1, is_async=True) service = LocalAPIFactory(env=env, engine=engine) engine.service = service # -- Start a new run for the workflow template. with service() as api: workflow_id = create_workflow(api, source=TEMPLATE_DIR) user_id = create_user(api) with service(user_id=user_id) as api: group_id = create_group(api, workflow_id) run_id = start_run(api, group_id) # -- Poll workflow state every second. with service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) while run['state'] == st.STATE_PENDING: time.sleep(1) with service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) serialize.validate_run_handle(run, state=st.STATE_RUNNING) with service(user_id=user_id) as api: api.runs().cancel_run(run_id=run_id, reason='test') # Sleep to ensure that the workflow monitor polls the state and makes an # attempt to update the run state. This should raise an error for the # monitor. The error is not propagated here or to the run. time.sleep(3) with service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) serialize.validate_run_handle(run, state=st.STATE_CANCELED) assert run['messages'][0] == 'test'
def test_delete_group_file_local(local_service, hello_world): """Test deleting an uploaded file for a workflow group.""" # -- Setup ---------------------------------------------------------------- # # Upload one file for a workflow group. with local_service() as api: user_id = create_user(api) workflow = hello_world(api, name='W1') workflow_id = workflow.workflow_id with local_service(user_id=user_id) as api: group_id = create_group(api, workflow_id=workflow_id) file_id = upload_file(api=api, group_id=group_id, file=io_file(data={ 'group': 1, 'file': 1 })) # -- Error when unknown user attempts to delete the file ------------------ with local_service(user_id='UNKNNOWN') as api: with pytest.raises(err.UnauthorizedAccessError): api.uploads().delete_file(group_id, file_id) # -- Delete the uploaded file --------------------------------------------- with local_service(user_id=user_id) as api: api.uploads().delete_file(group_id, file_id) # After deletion the file cannot be accessed anymore. with local_service(user_id=user_id) as api: with pytest.raises(err.UnknownFileError): api.uploads().get_uploaded_file(group_id, file_id)
def test_run_remote_workflow_error(is_async, tmpdir): """Execute the remote workflow example synchronized and in asynchronous mode when execution results in an error state. """ # -- Setup ---------------------------------------------------------------- env = Config().volume(FStore(basedir=str(tmpdir))).auth() engine = RemoteWorkflowController(client=RemoteTestClient( runcount=3, error='some error'), poll_interval=0.1, is_async=is_async) service = LocalAPIFactory(env=env, engine=engine) # Need to set the association between the engine and the service explicitly # after the API is created. engine.service = service with service() as api: workflow_id = create_workflow(api, source=BENCHMARK_DIR) user_id = create_user(api) with service(user_id=user_id) as api: group_id = create_group(api, workflow_id) # -- Unit test ------------------------------------------------------------ # Start a new run with service(user_id=user_id) as api: run_id = start_run(api, group_id) # Poll workflow state every second. with service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) watch_dog = 30 while run['state'] in st.ACTIVE_STATES and watch_dog: time.sleep(1) watch_dog -= 1 with service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) serialize.validate_run_handle(run, state=st.STATE_ERROR) assert run['messages'] == ['some error']
def test_run_helloworld_sync_env(sync_service): """Successfully execute the helloworld example that contains a notebook step in the Python environment that rund flowServ.""" # -- Setup ---------------------------------------------------------------- # # Start a new run for the workflow template. with sync_service() as api: workflow_id = create_workflow(api, source=BENCHMARK_DIR, specfile=SPEC_FILE) user_id = create_user(api) with sync_service(user_id=user_id) as api: group_id = create_group(api, workflow_id) args = [serialize_arg('greeting', 'Hey there')] run_id = start_run(api, group_id, arguments=args) # -- Validate the run handle against the expected state ------------------- with sync_service(user_id=user_id) as api: r = api.runs().get_run(run_id) serialize.validate_run_handle(r, state=st.STATE_SUCCESS) # The run should have the greetings.txt file as a result. files = dict() for obj in r['files']: files[obj['name']] = obj['id'] assert len(files) == 2 fh = api.runs().get_result_file(run_id=run_id, file_id=files['results/greetings.txt']) value = fh.open().read().decode('utf-8').strip() assert 'Hey there Alice!' in value assert 'Hey there Bob!' in value
def test_cancel_runs_local(local_service, hello_world): """Test canceling runs using a local service.""" # -- Setup ---------------------------------------------------------------- # # Start two runs for the same group of the 'Hello World' workflow. with local_service() as api: user_1 = create_user(api) user_2 = create_user(api) workflow_id = hello_world(api).workflow_id with local_service(user_id=user_1) as api: group_id = create_group(api, workflow_id=workflow_id) run_1, _ = start_hello_world(api, group_id) run_2, _ = start_hello_world(api, group_id) # -- Ensure that there are two active runs r = api.runs().list_runs(group_id=group_id) assert len(r['runs']) == 2 # -- Ensure that user 2 cannot cancel run 2 ------------------------------- with local_service(user_id=user_2) as api: with pytest.raises(err.UnauthorizedAccessError): api.runs().cancel_run(run_id=run_1) with local_service(user_id=user_1) as api: # There are still two active runs. r = api.runs().list_runs(group_id=group_id, state=STATE_PENDING) assert len(r['runs']) == 2 # -- Cancel run 2 --------------------------------------------------------- with local_service(user_id=user_1) as api: api.runs().cancel_run(run_id=run_2) r = api.runs().list_runs(group_id=group_id, state=STATE_PENDING) runs = r['runs'] assert len(runs) == 1 assert run_1 in [r['id'] for r in runs] # -- Error when canceling an inactive run --------------------------------- with local_service(user_id=user_1) as api: with pytest.raises(err.InvalidRunStateError): api.runs().cancel_run(run_id=run_2)
def test_run_remote_workflow_with_error(tmpdir): """Execute the remote workflow example that will end in an error state in asynchronous mode. """ # -- Setup ---------------------------------------------------------------- # # Start a new run for the workflow template. env = Config().basedir(tmpdir) engine = RemoteTestController(client=RemoteTestClient(runcount=3, error='some error'), poll_interval=1, is_async=True) service = LocalAPIFactory(env=env, engine=engine) engine.service = service with service() as api: workflow_id = create_workflow(api, source=TEMPLATE_DIR) user_id = create_user(api) with service(user_id=user_id) as api: group_id = create_group(api, workflow_id) run_id = start_run(api, group_id) # Poll workflow state every second. with service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) while run['state'] in st.ACTIVE_STATES: time.sleep(1) with service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) serialize.validate_run_handle(run, state=st.STATE_ERROR) assert run['messages'][0] == 'some error'
def test_upload_group_file_local(local_service, hello_world): """Test uploading files for a workflow group.""" # -- Setup ---------------------------------------------------------------- # # Create one group with minimal metadata for the 'Hello World' workflow. with local_service() as api: user_id = create_user(api) workflow = hello_world(api, name='W1') workflow_id = workflow.workflow_id with local_service(user_id=user_id) as api: group_id = create_group(api, workflow_id=workflow_id) # -- Upload first file for the group -------------------------------------- with local_service(user_id=user_id) as api: r = api.uploads().upload_file(group_id=group_id, file=io_file(data={ 'group': 1, 'file': 1 }), name='group1.json') file_id = r['id'] serialize.validate_file_handle(r) assert r['name'] == 'group1.json' # -- Get serialized handle for the file and the group --------------------- for uid in [user_id, None]: with local_service(user_id=uid) as api: fcont = api.uploads().get_uploaded_file(group_id, file_id).read() assert fcont == b'{"group": 1, "file": 1}' gh = api.groups().get_group(group_id=group_id) serialize.validate_group_handle(gh)
def test_list_runs_local(local_service, hello_world): """Test listing runs.""" # -- Setup ---------------------------------------------------------------- # # Start one run each for two separate groups of the 'Hello World' workflow. with local_service() as api: user_1 = create_user(api) user_2 = create_user(api) workflow_id = hello_world(api).workflow_id with local_service(user_id=user_1) as api: group_1 = create_group(api, workflow_id=workflow_id) run_1, _ = start_hello_world(api, group_1) with local_service(user_id=user_2) as api: group_2 = create_group(api, workflow_id=workflow_id) run_2, _ = start_hello_world(api, group_2) # Define expected run listing for each group. runs = [(group_1, run_1, user_1), (group_2, run_2, user_2)] for group_id, run_id, user_id in runs: # -- Get run listing for group ---------------------------------------- with local_service(user_id=user_id) as api: r = api.runs().list_runs(group_id) serialize.validate_run_listing(r) assert len(r['runs']) == 1 r['runs'][0]['id'] == run_id # Start additional runs for group 1. Then set the run into error state. with local_service(user_id=user_1) as api: run_3, _ = start_hello_world(api, group_1) error_state = api.runs().backend.error(run_3, ['some errors']) api.runs().update_run(run_3, error_state) # -- Group 1 now has two runs, one in pending state and one in error state with local_service(user_id=user_1) as api: r = api.runs().list_runs(group_1) assert len(r['runs']) == 2 runs = [(r['id'], r['state']) for r in r['runs']] assert (run_1, st.STATE_PENDING) in runs assert (run_3, st.STATE_ERROR) in runs # -- Group 2 remains unchnaged -------------------------------------------- with local_service(user_id=user_2) as api: r = api.runs().list_runs(group_2) assert len(r['runs']) == 1 # -- Error when listing runs for group as non-member ---------------------- with local_service(user_id=user_2) as api: with pytest.raises(err.UnauthorizedAccessError): api.runs().list_runs(group_1)
def test_create_run_local(local_service, hello_world): """Test life cycle for successful run using the local service.""" # -- Setup ---------------------------------------------------------------- # # Start a new run for a group of the 'Hello World' workflow and set it into # success state. fs = FileSystemStorage(basedir=tempfile.mkdtemp()) with local_service() as api: user_1 = create_user(api) user_2 = create_user(api) workflow_id = hello_world(api).workflow_id with local_service(user_id=user_1) as api: group_id = create_group(api, workflow_id=workflow_id, users=[user_1]) run_id, file_id = start_hello_world(api, group_id) result = {'group': group_id, 'run': run_id} write_results(runstore=fs, files=[(result, None, 'results/data.json'), ({ 'avg_count': 3.5, 'max_len': 30 }, None, 'results/analytics.json'), ([group_id, run_id], 'txt/plain', 'values.txt')]) api.runs().update_run(run_id=run_id, state=api.runs().backend.success( run_id, files=['results/data.json', 'values.txt']), runstore=fs) # -- Validate run handle -------------------------------------------------- with local_service(user_id=user_1) as api: r = api.runs().get_run(run_id=run_id) serialize.validate_run_handle(r, st.STATE_SUCCESS) assert is_fh(r['arguments'][0]['value']) # -- Error when non-member attempts to access run ------------------------- with local_service(user_id=user_2) as api: with pytest.raises(err.UnauthorizedAccessError): api.runs().get_run(run_id=run_id) # -- Error for run with invalid arguments --------------------------------- with local_service(user_id=user_1) as api: with pytest.raises(err.DuplicateArgumentError): api.runs().start_run(group_id=group_id, arguments=[{ 'name': 'sleeptime', 'value': 1 }, { 'name': 'sleeptime', 'value': 2 }])['id']
def run_erroneous_workflow(service, specfile): """Execute the modified helloworld example.""" with service() as api: # Create workflow template, user, and the workflow group. workflow_id = create_workflow( api, source=TEMPLATE_DIR, specfile=specfile ) user_id = create_user(api) with service(user_id=user_id) as api: group_id = create_group(api, workflow_id) # Upload the names file. names = io_file(data=NAMES, format='txt/plain') file_id = upload_file(api, group_id, names) # Run the workflow. arguments = [ serialize_arg('names', serialize_fh(file_id)), serialize_arg('greeting', 'Hi') ] run_id = start_run(api, group_id, arguments=arguments) # Poll workflow state every second. run = poll_run(service, run_id, user_id) assert run['state'] == st.STATE_SUCCESS with service() as api: wh = api.workflows().get_workflow(workflow_id=workflow_id) attmpts = 0 while 'postproc' not in wh: time.sleep(1) with service() as api: wh = api.workflows().get_workflow(workflow_id=workflow_id) attmpts += 1 if attmpts > 60: break assert 'postproc' in wh serialize.validate_workflow_handle(wh) attmpts = 0 while wh['postproc']['state'] in st.ACTIVE_STATES: time.sleep(1) with service() as api: wh = api.workflows().get_workflow(workflow_id=workflow_id) attmpts += 1 if attmpts > 60: break assert wh['postproc']['state'] not in st.ACTIVE_STATES serialize.validate_workflow_handle(wh) assert wh['postproc']['state'] == st.STATE_ERROR
def test_run_helloworld_async(async_service, target): """Execute the helloworld example.""" # -- Setup ---------------------------------------------------------------- # # Start a new run for the workflow template. with async_service() as api: workflow_id = create_workflow(api, source=BENCHMARK_DIR) user_id = create_user(api) with async_service(user_id=user_id) as api: group_id = create_group(api, workflow_id) names = io_file(data=['Alice', 'Bob', 'Zoe'], format='plain/text') file_id = upload_file(api, group_id, names) args = [ serialize_arg('names', serialize_fh(file_id, target)), serialize_arg('sleeptime', 1), serialize_arg('greeting', 'Hi') ] run_id = start_run(api, group_id, arguments=args) # Poll workflow state every second. with async_service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) watch_dog = 30 while run['state'] in st.ACTIVE_STATES and watch_dog: time.sleep(1) watch_dog -= 1 with async_service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) assert run['state'] == st.STATE_SUCCESS files = dict() for f in run['files']: files[f['name']] = f['id'] fh = api.runs().get_result_file( run_id=run_id, file_id=files['results/greetings.txt'] ) greetings = fh.open().read().decode('utf-8').strip() assert 'Hi Alice' in greetings assert 'Hi Bob' in greetings assert 'Hi Zoe' in greetings fh = api.runs().get_result_file( run_id=run_id, file_id=files['results/analytics.json'] ) assert json.load(fh.open()) is not None
def test_cancel_remote_workflow(tmpdir): """Cancel the execution of a remote workflow.""" # -- Setup ---------------------------------------------------------------- env = Config().volume(FStore(basedir=str(tmpdir))).auth() engine = RemoteWorkflowController(client=RemoteTestClient(runcount=100), poll_interval=0.25, is_async=True) service = LocalAPIFactory(env=env, engine=engine) # Need to set the association between the engine and the service explicitly # after the API is created. engine.service = service with service() as api: workflow_id = create_workflow(api, source=BENCHMARK_DIR) user_id = create_user(api) with service(user_id=user_id) as api: group_id = create_group(api, workflow_id) # -- Unit test ------------------------------------------------------------ # Start a new run with service(user_id=user_id) as api: run_id = start_run(api, group_id) # -- Poll workflow state every second. with service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) watch_dog = 30 while run['state'] == st.STATE_PENDING and watch_dog: time.sleep(0.1) watch_dog -= 1 with service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) serialize.validate_run_handle(run, state=st.STATE_RUNNING) with service(user_id=user_id) as api: api.runs().cancel_run(run_id=run_id, reason='test') # Sleep to ensure that the workflow monitor polls the state and makes an # attempt to update the run state. This should raise an error for the # monitor. The error is not propagated here or to the run. time.sleep(1) with service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) serialize.validate_run_handle(run, state=st.STATE_CANCELED) assert run['messages'][0] == 'test'
def test_run_remote_workflow(tmpdir, is_async): """Execute the remote workflow example synchronized and in asynchronous mode. """ # -- Setup ---------------------------------------------------------------- # env = Config().basedir(tmpdir) engine = RemoteTestController(client=RemoteTestClient(runcount=3, data=['success']), poll_interval=1, is_async=is_async) service = LocalAPIFactory(env=env, engine=engine) engine.service = service # Start a new run for the workflow template. with service() as api: workflow_id = create_workflow(api, source=TEMPLATE_DIR) user_id = create_user(api) with service(user_id=user_id) as api: group_id = create_group(api, workflow_id) run_id = start_run(api, group_id) # Poll workflow state every second. with service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) watch_dog = 30 while run['state'] in st.ACTIVE_STATES and watch_dog: time.sleep(1) watch_dog -= 1 with service(user_id=user_id) as api: run = api.runs().get_run(run_id=run_id) serialize.validate_run_handle(run, state=st.STATE_SUCCESS) files = dict() for obj in run['files']: files[obj['name']] = obj['id'] f_id = files['results/data.txt'] with service(user_id=user_id) as api: fh = api.runs().get_result_file(run_id=run_id, file_id=f_id) data = fh.open().read().decode('utf-8') assert 'success' in data
def test_postproc_workflow(fsconfig, tmpdir): """Execute the modified helloworld example.""" # -- Setup ---------------------------------------------------------------- # # It is important here that we do not use the SQLite in-memory database # since this fails (for unknown reason; presumably due to different threads) # when the post-processing run is updated. # -- env = Config().basedir(tmpdir).run_async().auth() env.update(fsconfig) service = LocalAPIFactory(env=env) # Start a new run for the workflow template. with service() as api: # Need to set the file store in the backend to the new instance as # well. Otherwise, the post processing workflow may attempt to use # the backend which was initialized prior with a different file store. workflow_id = create_workflow( api, source=TEMPLATE_DIR, specfile=SPEC_FILE ) user_id = create_user(api) # Create four groups and run the workflow with a slightly different input # file for i in range(4): with service(user_id=user_id) as api: group_id = create_group(api, workflow_id) names = io_file(data=NAMES[:(i + 1)], format='plain/text') file_id = upload_file(api, group_id, names) # Set the template argument values arguments = [ serialize_arg('names', serialize_fh(file_id)), serialize_arg('greeting', 'Hi') ] run_id = start_run(api, group_id, arguments=arguments) # Poll workflow state every second. run = poll_run(service, run_id, user_id) assert run['state'] == st.STATE_SUCCESS with service() as api: wh = api.workflows().get_workflow(workflow_id=workflow_id) attmpts = 0 while 'postproc' not in wh: time.sleep(1) with service() as api: wh = api.workflows().get_workflow(workflow_id=workflow_id) attmpts += 1 if attmpts > 60: break assert 'postproc' in wh serialize.validate_workflow_handle(wh) attmpts = 0 while wh['postproc']['state'] in st.ACTIVE_STATES: time.sleep(1) with service() as api: wh = api.workflows().get_workflow(workflow_id=workflow_id) attmpts += 1 if attmpts > 60: break serialize.validate_workflow_handle(wh) with service() as api: ranking = api.workflows().get_ranking(workflow_id=workflow_id) serialize.validate_ranking(ranking) for fobj in wh['postproc']['files']: if fobj['name'] == 'results/compare.json': file_id = fobj['id'] with service(user_id=user_id) as api: fh = api.workflows().get_result_file( workflow_id=workflow_id, file_id=file_id ) compare = util.read_object(fh.open()) assert len(compare) == (i + 1) # Access the post-processing result files. with service() as api: fh = api.workflows().get_result_archive(workflow_id=workflow_id) assert fh.name.startswith('run') assert fh.mime_type == 'application/gzip'