def test_run_successful_steps_splitenv(): """Test successful execution of a workflow step when dividing environment variables between worker and step. """ # Avoid error '/bin/sh: 1: python: not found interpreter = sys.executable commands = [ '{py} printenv.py TEST_ENV_1'.format(py=interpreter), '{py} printenv.py TEST_ENV_2'.format(py=interpreter) ] worker = SubprocessWorker(env={'TEST_ENV_1': 'Hello', 'TEST_ENV_2': 'You'}) step = ContainerStep(image='test', env={'TEST_ENV_2': 'World'}, commands=commands) result = worker.exec(step=step, arguments=dict(), rundir=RUN_DIR) assert result.returncode == 0 assert result.exception is None assert ' '.join([s.strip() for s in result.stdout]) == 'Hello World'
def test_run_steps_with_subprocess_error(mock_subprocess): """Test execution of a workflow step that fails to run.""" commands = ['nothing to do'] step = ContainerStep(image='test', commands=commands) result = SubprocessWorker().run(step=step, env=dict(), rundir=RUN_DIR) assert result.returncode == 1 assert result.exception is not None assert result.stdout == [] assert 'cannot run' in ''.join(result.stderr)
def test_run_successful_steps(): """Test successful execution of a workflow step with two commands.""" # Avoid error '/bin/sh: 1: python: not found interpreter = sys.executable commands = [ '{py} printenv.py TEST_ENV_1'.format(py=interpreter), '{py} printenv.py TEST_ENV_2'.format(py=interpreter) ] env = {'TEST_ENV_1': 'Hello', 'TEST_ENV_2': 'World'} step = ContainerStep(image='test', commands=commands) result = SubprocessWorker().run(step=step, env=env, rundir=RUN_DIR) assert result.returncode == 0 assert result.exception is None assert ' '.join([s.strip() for s in result.stdout]) == 'Hello World' step = ContainerStep(image='test', commands=commands)
def test_run_steps_with_error(): """Test execution of a workflow step where one of the commands raises an error. """ # Avoid error '/bin/sh: 1: python: not found interpreter = sys.executable commands = [ '{py} printenv.py TEST_ENV_1'.format(py=interpreter), '{py} printenv.py TEST_ENV_ERROR'.format(py=interpreter), '{py} printenv.py TEST_ENV_2'.format(py=interpreter) ] env = { 'TEST_ENV_1': 'Hello', 'TEST_ENV_ERROR': 'error', 'TEST_ENV_2': 'World' } step = ContainerStep(image='test', commands=commands) result = SubprocessWorker().run(step=step, env=env, rundir=RUN_DIR) assert result.returncode == 1 assert result.exception is None assert ' '.join([s.strip() for s in result.stdout]) == 'Hello' assert 'there was an error' in ''.join(result.stderr)
def create_worker(doc: Dict) -> Worker: """Factory pattern for workers. Create an instance of a worker implementation from a given worker serialization. Parameters ---------- doc: dict Dictionary serialization for a worker. Returns ------- flowserv.controller.worker.base.Worker """ identifier = doc['name'] worker_type = doc['type'] env = util.to_dict(doc.get('env', [])) vars = util.to_dict(doc.get('variables', [])) volume = doc.get('volume') if worker_type == SUBPROCESS_WORKER: return SubprocessWorker(variables=vars, env=env, identifier=identifier, volume=volume) elif worker_type == DOCKER_WORKER: return DockerWorker(variables=vars, env=env, identifier=identifier, volume=volume) elif worker_type == CODE_WORKER: return CodeWorker(identifier=identifier, volume=volume) elif worker_type == NOTEBOOK_WORKER: return NotebookEngine(identifier=identifier, volume=volume) elif worker_type == NOTEBOOK_DOCKER_WORKER: return NotebookDockerWorker(identifier=identifier, env=env, volume=volume) raise ValueError(f"unknown worker type '{worker_type}'")
def test_run_successful_steps(): """Test successful execution of a workflow step with two commands.""" # Set SYSTEMROOT environment variable. systemroot = os.environ.get('SYSTEMROOT') if not systemroot: os.environ['SYSTEMROOT'] = 'SYSTEMROOT' # Avoid error '/bin/sh: 1: python: not found interpreter = sys.executable commands = [ '{py} printenv.py TEST_ENV_1'.format(py=interpreter), '{py} printenv.py TEST_ENV_2'.format(py=interpreter) ] env = {'TEST_ENV_1': 'Hello', 'TEST_ENV_2': 'World'} step = ContainerStep(identifier='test', image='test', commands=commands) result = SubprocessWorker().run(step=step, env=env, rundir=RUN_DIR) assert result.returncode == 0 assert result.exception is None assert ' '.join([s.strip() for s in result.stdout]) == 'Hello World' step = ContainerStep(identifier='test', image='test', commands=commands) if systemroot: os.environ['SYSTEMROOT'] = systemroot else: del os.environ['SYSTEMROOT']
from flowserv.controller.worker.code import CodeWorker, CODE_WORKER from flowserv.controller.worker.config import java_jvm, python_interpreter from flowserv.controller.worker.docker import DockerWorker, DOCKER_WORKER from flowserv.controller.worker.docker import NotebookDockerWorker, NOTEBOOK_DOCKER_WORKER from flowserv.controller.worker.notebook import NotebookEngine, NOTEBOOK_WORKER from flowserv.controller.worker.subprocess import SubprocessWorker, SUBPROCESS_WORKER from flowserv.model.workflow.step import WorkflowStep import flowserv.error as err import flowserv.util as util """Create an instance of the sub-process worker that is used as the default worker for container steps that do not have a responsible worker defined for them. """ default_container_worker = SubprocessWorker(variables={ 'python': python_interpreter(), 'java': java_jvm() }) """Serialization label for worker identifier.""" WORKER_ID = 'name' class WorkerPool(object): """Manager for a pool of worker instances. Workers are responsible for the initiation and control of the execution of steps in a serial workflow. Workers are instantiated from a dictionary serializations that follows the `workerSpec` schema defined in the `schema.json` file. """ def __init__(self, workers: Optional[List[Dict]] = list(), managers: Optional[Dict] = None):
import importlib.resources as pkg_resources import json import os from flowserv.controller.worker.base import ContainerEngine from flowserv.controller.worker.config import java_jvm, python_interpreter from flowserv.controller.worker.docker import DockerWorker from flowserv.controller.worker.subprocess import SubprocessWorker import flowserv.util as util """Create an instance of the default worker. By default a subprocess worker is used to execute workflow steps. """ default_engine = SubprocessWorker(variables={ 'python': python_interpreter(), 'java': java_jvm() }) """Create schema validator for API requests.""" # Make sure that the path to the schema file is a valid URI. Otherwise, errors # occur (at least on MS Windows environments). Changed based on: # https://github.com/Julian/jsonschema/issues/398#issuecomment-385130094 schemafile = 'file:///{}'.format( os.path.abspath(os.path.join(__file__, 'schema.json'))) schema = json.load(pkg_resources.open_text(__package__, 'schema.json')) resolver = RefResolver(schemafile, schema) validator = Draft7Validator(schema=schema['definitions']['workerSpec'], resolver=resolver) class WorkerFactory(object): """Factory for workers that implement the :class:`flowserv.controller.worker.base.ContainerStep` class.