def run_sync(name, spec, backend): assert backend in ["local", "docker"] if backend == "local": backend_config = config.backends[backend]["fromstring"] from yadage.utils import setupbackend_fromstring from yadage.steering_api import run_workflow spec["backend"] = setupbackend_fromstring(backend_config, spec.pop("backendopts", {})) try: run_workflow(**spec) except: log.exception("caught exception") exc = click.exceptions.ClickException( click.style("Workflow failed", fg="red")) exc.exit_code = 1 raise exc elif backend == "docker": command, dockerconfig = setup_docker() script = """\ mkdir -p ~/.docker echo '{dockerconfig}' > ~/.docker/config.json cat << 'EOF' | yadage-run -f - {spec} EOF """.format(spec=json.dumps(spec), dockerconfig=json.dumps(dockerconfig)) command += ["sh", "-c", textwrap.dedent(script)] subprocess.check_call(command)
def run_workflow(): backend = setupbackend_fromstring('multiproc:auto') with steering_ctx('local:'+ workdir, 'workflow.yml', {'par':'value'}, 'tests/testspecs/local-helloworld', backend, cache='checksums') as ys: ys.adage_argument(default_trackers = False) backend.backends['packtivity'].cache.todisk()
def from_file(ctx, param, value): if not value or ctx.resilient_parsing: return data = {} for v in value: data.update(**yaml.load(v)) verbosity = data.pop('verbosity', 'INFO') logging.basicConfig(level=getattr(logging, verbosity), format=LOGFORMAT) enable_plugins(data.pop('plugins', [])) data['backend'] = utils.setupbackend_fromstring( data.pop('backend', DEFAULT_BACKEND), data.pop('backendopts', {})) rc = RC_FAILED try: steering_api.run_workflow(**data) rc = RC_SUCCEEDED except: log.exception('workflow failed') if rc != RC_SUCCEEDED: exc = click.exceptions.ClickException( click.style("Workflow failed", fg='red')) exc.exit_code = rc raise exc ctx.exit()
def run_workflow(): backend = setupbackend_fromstring('multiproc:auto') with steering_ctx(workdir, 'workflow.yml', {'par':'value'}, 'tests/testspecs/local-helloworld', backend, cache='checksums') as ys: ys.adage_argument(default_trackers = False) backend.backends['packtivity'].cache.todisk()
def run_yadage_workflow(workflow_uuid, workflow_workspace, workflow_json=None, workflow_parameters=None): """Run a ``yadage`` workflow.""" log.info('getting socket..') workflow_workspace = '{0}/{1}'.format(SHARED_VOLUME_PATH, workflow_workspace) # use some shared object between tasks. os.environ["workflow_uuid"] = workflow_uuid os.environ["workflow_workspace"] = workflow_workspace cap_backend = setupbackend_fromstring('fromenv') toplevel = os.getcwd() workflow = None if workflow_json: # When `yadage` is launched using an already validated workflow file. workflow_kwargs = dict(workflow_json=workflow_json) elif workflow: # When `yadage` resolves the workflow file from a remote repository: # i.e. github:reanahub/reana-demo-root6-roofit/workflow.yaml workflow_kwargs = dict(workflow=workflow, toplevel=toplevel) dataopts = {'initdir': workflow_workspace} try: publisher = REANAWorkflowStatusPublisher() with steering_ctx( dataarg=workflow_workspace, dataopts=dataopts, initdata=workflow_parameters if workflow_parameters else {}, visualize=False, updateinterval=5, loginterval=5, backend=cap_backend, **workflow_kwargs) as ys: log.info('running workflow on context: {0}'.format(locals())) publisher.publish_workflow_status(workflow_uuid, 1) ys.adage_argument( additional_trackers=[REANATracker(identifier=workflow_uuid)]) publisher.publish_workflow_status(workflow_uuid, 2) log.info('Workflow {workflow_uuid} finished. Files available ' 'at {workflow_workspace}.'.format( workflow_uuid=workflow_uuid, workflow_workspace=workflow_workspace)) except Exception as e: log.info('workflow failed: {0}'.format(e)) if publisher: publisher.publish_workflow_status(workflow_uuid, 3) else: log.error( 'Workflow {workflow_uuid} failed but status ' 'could not be published.'.format(workflow_uuid=workflow_uuid))
def run_workflow(self, name, spec): backend_config = config.backends['local']["fromstring"] spec["backend"] = setupbackend_fromstring(backend_config, spec.pop("backendopts", {})) try: run_workflow(**spec) except Exception: # TODO: Specify Exception type raise FailedRunException
def init_app(app, statetype, stateopts, backendstring, backendopts = None): from yadage.wflowstate import load_model_fromstring from yadage.utils import setupbackend_fromstring from yadage.controllers import PersistentController model = load_model_fromstring(statetype,stateopts) backend = setupbackend_fromstring(backendstring, backendopts) app.config['yadage_controller'] = PersistentController(model,backend) app.config['yadage_controller'].sync_backend() from yadageblueprint import blueprint app.register_blueprint(blueprint)
def run_workflow(): backend = setupbackend_fromstring("multiproc:auto") with steering_ctx( "local:" + workdir, "workflow.yml", {"par": "value"}, "tests/testspecs/local-helloworld", backend, cache="checksums", ) as ys: ys.adage_argument(default_trackers=False) backend.backends["packtivity"].cache.todisk()
def main(dataarg, workflow, initfiles, controller, ctrlopt, toplevel, verbosity, loginterval, updateinterval, schemadir, backend, dataopt, wflowopt, backendopt, strategy, modelsetup, modelopt, metadir, parameter, validate, visualize, cache, plugins, accept_metadir): logging.basicConfig(level=getattr(logging, verbosity), format=LOGFORMAT) from packtivity.plugins import enable_plugins if plugins: enable_plugins(plugins.split(',')) initdata = utils.getinit_data(initfiles, parameter) dataopts = utils.options_from_eqdelimstring(dataopt) backendopts = utils.options_from_eqdelimstring(backendopt) modelopts = utils.options_from_eqdelimstring(modelopt) ctrlopts = utils.options_from_eqdelimstring(ctrlopt) wflowopts = utils.options_from_eqdelimstring(wflowopt) backend = utils.setupbackend_fromstring(backend, backendopts) rc = RC_FAILED try: steering_api.run_workflow( workflow=workflow, toplevel=toplevel, validate=validate, schemadir=schemadir, initdata=initdata, wflowopts=wflowopts, controller=controller, ctrlopts=ctrlopts, backend=backend, cache=cache, dataarg=dataarg, dataopts=dataopts, metadir=metadir, accept_metadir=accept_metadir, modelsetup=modelsetup, modelopts=modelopts, updateinterval=updateinterval, loginterval=loginterval, visualize=visualize, strategy=strategy, ) rc = RC_SUCCEEDED except: log.exception('workflow failed') if rc != RC_SUCCEEDED: exc = click.exceptions.ClickException( click.style("Workflow failed", fg='red')) exc.exit_code = rc raise exc
def run_yadage_workflow_engine_adapter( publisher, rjc_api_client, workflow_uuid=None, workflow_workspace=None, workflow_json=None, workflow_parameters=None, operational_options={}, **kwargs, ): """Run a ``yadage`` workflow.""" log.info("getting socket..") workflow_workspace = "{0}/{1}".format(SHARED_VOLUME_PATH, workflow_workspace) # use some shared object between tasks. os.environ["workflow_uuid"] = workflow_uuid os.environ["workflow_workspace"] = workflow_workspace os.umask(REANA_WORKFLOW_UMASK) cap_backend = setupbackend_fromstring("fromenv") publisher = REANAWorkflowStatusPublisher(instance=publisher) workflow_kwargs = dict(workflow_json=workflow_json) dataopts = {"initdir": operational_options["initdir"]} initdata = {} for initfile in operational_options["initfiles"]: initdata.update(**yaml.safe_load(open(initfile))) initdata.update(workflow_parameters) with steering_ctx( dataarg=workflow_workspace, dataopts=dataopts, initdata=initdata, visualize=True, updateinterval=5, loginterval=5, backend=cap_backend, accept_metadir="accept_metadir" in operational_options, **workflow_kwargs, ) as ys: log.info("running workflow on context: {0}".format(locals())) publisher.publish_workflow_status(workflow_uuid, 1) ys.adage_argument(additional_trackers=[REANATracker(identifier=workflow_uuid)]) publisher.publish_workflow_status(workflow_uuid, 2)
def run_yadage_workflow_engine_adapter( publisher, rjc_api_client, workflow_uuid=None, workflow_workspace=None, workflow_json=None, workflow_parameters=None, operational_options={}, **kwargs, ): """Run a ``yadage`` workflow.""" os.environ["workflow_uuid"] = workflow_uuid os.environ["workflow_workspace"] = workflow_workspace os.umask(REANA_WORKFLOW_UMASK) tracker = REANATracker(identifier=workflow_uuid, publisher=publisher) tracker.publish_workflow_running_status() cap_backend = setupbackend_fromstring("fromenv") workflow_kwargs = dict(workflow_json=workflow_json) dataopts = {"initdir": operational_options["initdir"]} initdata = {} for initfile in operational_options["initfiles"]: with open(initfile) as stream: initdata.update(**yaml.safe_load(stream)) initdata.update(workflow_parameters) with steering_ctx( dataarg=workflow_workspace, dataopts=dataopts, initdata=initdata, visualize=True, updateinterval=WORKFLOW_TRACKING_UPDATE_INTERVAL_SECONDS, loginterval=LOG_INTERVAL_SECONDS, backend=cap_backend, accept_metadir="accept_metadir" in operational_options, **workflow_kwargs, ) as ys: log.debug(f"running workflow on context: {locals()}") ys.adage_argument(additional_trackers=[tracker]) # hack to publish finished workflow status AFTER Yadage visualization is done. tracker.publish_workflow_final_status()
def multiproc_backend(): backend = setupbackend_fromstring('multiproc:4') return backend
def run_yadage_workflow(workflow_uuid, workflow_workspace, workflow_json=None, workflow_file=None, workflow_parameters=None): """Run a ``yadage`` workflow.""" log.info('getting socket..') workflow_workspace = '{0}/{1}'.format(SHARED_VOLUME_PATH, workflow_workspace) # use some shared object between tasks. os.environ["workflow_uuid"] = workflow_uuid os.environ["workflow_workspace"] = workflow_workspace os.umask(REANA_WORKFLOW_UMASK) cap_backend = setupbackend_fromstring('fromenv') toplevel = os.getcwd() workflow = None if workflow_json: # When `yadage` is launched using an already validated workflow file. workflow_kwargs = dict(workflow_json=workflow_json) elif workflow: # When `yadage` resolves the workflow file from a remote repository: # i.e. github:reanahub/reana-demo-root6-roofit/workflow.yaml workflow_kwargs = dict(workflow=workflow, toplevel=toplevel) elif workflow_file: workflow_file_abs_path = os.path.join(workflow_workspace, workflow_file) if os.path.exists(workflow_file_abs_path): schema_name = 'yadage/workflow-schema' schemadir = None specopts = { 'toplevel': workflow_workspace, 'schema_name': schema_name, 'schemadir': schemadir, 'load_as_ref': False, } validopts = { 'schema_name': schema_name, 'schemadir': schemadir, } workflow_json = yadageschemas.load(spec=workflow_file, specopts=specopts, validopts=validopts, validate=True) workflow_kwargs = dict(workflow_json=workflow_json) dataopts = {'initdir': workflow_workspace} try: check_connection_to_job_controller() publisher = REANAWorkflowStatusPublisher() with steering_ctx( dataarg=workflow_workspace, dataopts=dataopts, initdata=workflow_parameters if workflow_parameters else {}, visualize=True, updateinterval=5, loginterval=5, backend=cap_backend, **workflow_kwargs) as ys: log.info('running workflow on context: {0}'.format(locals())) publisher.publish_workflow_status(workflow_uuid, 1) ys.adage_argument( additional_trackers=[REANATracker(identifier=workflow_uuid)]) publisher.publish_workflow_status(workflow_uuid, 2) log.info('Workflow {workflow_uuid} finished. Files available ' 'at {workflow_workspace}.'.format( workflow_uuid=workflow_uuid, workflow_workspace=workflow_workspace)) except Exception as e: log.info('workflow failed: {0}'.format(e), exc_info=True) if publisher: publisher.publish_workflow_status( workflow_uuid, 3, logs='workflow failed: {0}'.format(e)) else: log.error( 'Workflow {workflow_uuid} failed but status ' 'could not be published.'.format(workflow_uuid=workflow_uuid))
def checksum_cached_multiproc(tmpdir): cache = str(tmpdir.join("cache.json")) backend = setupbackend_fromstring("multiproc:4") backend.enable_cache("checksums:" + cache) return backend
def test_nonexisting_cache(): with pytest.raises(RuntimeError): backend = setupbackend_fromstring('multiproc:4') backend.enable_cache('nonexistent:config')
def test_mytest(): backend = setupbackend_fromstring("multiproc:4") assert type(backend) == PacktivityBackend
def foregroundasync_backend(): backend = setupbackend_fromstring('foregroundasync') return backend
def multiproc_backend(): backend = setupbackend_fromstring('multiproc:4') return backend
def checksum_cached_multiproc(tmpdir): cache = str(tmpdir.join('cache.json')) backend = setupbackend_fromstring('multiproc:4') backend.enable_cache('checksums:' + cache) return backend
def checksum_cached_multiproc(tmpdir): cache = str(tmpdir.join('cache.json')) backend = setupbackend_fromstring('multiproc:4') backend.enable_cache('checksums:'+cache) return backend
def test_mytest(): backend = setupbackend_fromstring('multiproc:4') assert type(backend) == PacktivityBackend
def test_nonexisting_cache(): with pytest.raises(RuntimeError): backend = setupbackend_fromstring("multiproc:4") backend.enable_cache("nonexistent:config")
def main( dataarg, workflow, initfiles, controller, ctrlopt, toplevel, verbosity, loginterval, updateinterval, schemadir, backend, dataopt, wflowopt, backendopt, strategy, strategyopt, modelsetup, modelopt, metadir, parameter, validate, visualize, cache, plugins, accept_metadir, ): if os.path.exists("input.yml") and not initfiles: initfiles = ("input.yml", ) logging.basicConfig(level=getattr(logging, verbosity), format=LOGFORMAT) from packtivity.plugins import enable_plugins if plugins: enable_plugins(plugins.split(",")) initdata = utils.getinit_data(initfiles, parameter) dataarg = utils.coerce_data_arg(dataarg) dataopts = utils.options_from_eqdelimstring(dataopt) backendopts = utils.options_from_eqdelimstring(backendopt) modelopts = utils.options_from_eqdelimstring(modelopt) ctrlopts = utils.options_from_eqdelimstring(ctrlopt) wflowopts = utils.options_from_eqdelimstring(wflowopt) strategyopts = utils.options_from_eqdelimstring(strategyopt) backend = utils.setupbackend_fromstring(backend, backendopts) rc = RC_FAILED try: steering_api.run_workflow( workflow=workflow, toplevel=toplevel, validate=validate, schemadir=schemadir, initdata=initdata, wflowopts=wflowopts, controller=controller, ctrlopts=ctrlopts, backend=backend, cache=cache, dataarg=dataarg, dataopts=dataopts, metadir=metadir, accept_metadir=accept_metadir, modelsetup=modelsetup, modelopts=modelopts, updateinterval=updateinterval, loginterval=loginterval, visualize=visualize, strategy=strategy, strategyopts=strategyopts, ) rc = RC_SUCCEEDED except: if rc != RC_SUCCEEDED: if logging.root.level < logging.INFO: log.exception("workflow failed") exc = click.exceptions.ClickException( click.style("Workflow failed: {}".format(sys.exc_info()[1]), fg="red")) exc.exit_code = rc raise exc
def run_yadage_workflow( workflow_uuid, workflow_workspace, workflow_file, workflow_parameters=None, operational_options={}, ): """Run a ``yadage`` workflow.""" log.info("getting socket..") workflow_workspace = "{0}/{1}".format(SHARED_VOLUME_PATH, workflow_workspace) # use some shared object between tasks. os.environ["workflow_uuid"] = workflow_uuid os.environ["workflow_workspace"] = workflow_workspace os.umask(REANA_WORKFLOW_UMASK) cap_backend = setupbackend_fromstring("fromenv") workflow_file_abs_path = os.path.join(workflow_workspace, workflow_file) publisher = REANAWorkflowStatusPublisher() try: if not os.path.exists(workflow_file_abs_path): message = f"Workflow file {workflow_file} does not exist" raise Exception(message) else: schema_name = "yadage/workflow-schema" schemadir = None specopts = { "toplevel": operational_options["toplevel"], "schema_name": schema_name, "schemadir": schemadir, "load_as_ref": False, } validopts = { "schema_name": schema_name, "schemadir": schemadir, } workflow_json = yadageschemas.load( spec=workflow_file, specopts=specopts, validopts=validopts, validate=True, ) workflow_kwargs = dict(workflow_json=workflow_json) dataopts = {"initdir": operational_options["initdir"]} check_connection_to_job_controller() with steering_ctx( dataarg=workflow_workspace, dataopts=dataopts, initdata=workflow_parameters if workflow_parameters else {}, visualize=True, updateinterval=5, loginterval=5, backend=cap_backend, **workflow_kwargs, ) as ys: log.info("running workflow on context: {0}".format(locals())) publisher.publish_workflow_status(workflow_uuid, 1) ys.adage_argument( additional_trackers=[REANATracker(identifier=workflow_uuid)]) publisher.publish_workflow_status(workflow_uuid, 2) log.info("Workflow {workflow_uuid} finished. Files available " "at {workflow_workspace}.".format( workflow_uuid=workflow_uuid, workflow_workspace=workflow_workspace)) except Exception as e: log.error("Workflow failed: {0}".format(e), exc_info=True) if publisher: publisher.publish_workflow_status( workflow_uuid, 3, logs="workflow failed: {0}".format(e)) else: log.error( "Workflow {workflow_uuid} failed but status " "could not be published.".format(workflow_uuid=workflow_uuid))
def foregroundasync_backend(): backend = setupbackend_fromstring('foregroundasync') return backend