def start_analysis(workflow_id=None): """Start an analysis workflow.""" token = current_app.config.get('REANA_ACCESS_TOKEN') parameters = { "parameters": { "did": 404958, "xsec_in_pb": 0.00122, "dxaod_file": "https://recastwww.web.cern.ch/recastwww/data" + "/reana-recast-demo/" + "mc15_13TeV.123456.cap_recast_demo_signal_one.root"}} response = start_workflow(workflow_id, token, parameters) return jsonify(response)
def start_analysis(workflow_id=None): """Start an analysis workflow.""" uuid = ReanaJob.get_record_from_workflow_id(workflow_id) token = get_token(uuid) parameters = { "parameters": { "did": 404958, "xsec_in_pb": 0.00122, "dxaod_file": "https://recastwww.web.cern.ch/recastwww/data" + "/reana-recast-demo/" + "mc15_13TeV.123456.cap_recast_demo_signal_one.root"}} response = start_workflow(workflow_id, token, parameters) return jsonify(response)
def workflow_restart( ctx, workflow, access_token, parameters, options, file ): # noqa: D301 """Restart previously run workflow. The ``restart`` command allows to restart a previous workflow on the same workspace. Note that workflow restarting can be used in a combination with operational options ``FROM`` and ``TARGET``. You can also pass a modified workflow specification with ``-f`` or ``--file`` flag. You can furthermore use modified input prameters using ``-p`` or ``--parameters`` flag and by setting additional operational options using ``-o`` or ``--options``. The input parameters and operational options can be repetitive. Examples: \n \t $ reana-client restart -w myanalysis.42 -p sleeptime=10 -p myparam=4 \n \t $ reana-client restart -w myanalysis.42 -p myparam=myvalue\n \t $ reana-client restart -w myanalysis.42 -o TARGET=gendata\n \t $ reana-client restart -w myanalysis.42 -o FROM=fitdata """ from reana_client.utils import get_api_url from reana_client.api.client import ( get_workflow_parameters, get_workflow_status, start_workflow, ) logging.debug("command: {}".format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug("{param}: {value}".format(param=p, value=ctx.params[p])) parsed_parameters = { "input_parameters": parameters, "operational_options": options, "restart": True, } if file: parsed_parameters["reana_specification"] = load_reana_spec( click.format_filename(file) ) if workflow: if parameters or options: try: if "reana_specification" in parsed_parameters: workflow_type = parsed_parameters["reana_specification"][ "workflow" ]["type"] original_parameters = ( parsed_parameters["reana_specification"] .get("inputs", {}) .get("parameters", {}) ) else: response = get_workflow_parameters(workflow, access_token) workflow_type = response["type"] original_parameters = response["parameters"] parsed_parameters["operational_options"] = validate_operational_options( workflow_type, parsed_parameters["operational_options"] ) parsed_parameters["input_parameters"] = validate_input_parameters( parsed_parameters["input_parameters"], original_parameters ) except REANAValidationError as e: display_message(e.message, msg_type="error") sys.exit(1) except Exception as e: display_message( "Could not apply given input parameters: " "{0} \n{1}".format(parameters, str(e)), msg_type="error", ) try: logging.info("Connecting to {0}".format(get_api_url())) response = start_workflow(workflow, access_token, parsed_parameters) workflow = response["workflow_name"] + "." + str(response["run_number"]) current_status = get_workflow_status(workflow, access_token).get("status") display_message( get_workflow_status_change_msg(workflow, current_status), msg_type="success", ) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) display_message( "Cannot start workflow {}: \n{}".format(workflow, str(e)), msg_type="error", ) if "invoked_by_subcommand" in ctx.parent.__dict__: sys.exit(1)
def workflow_start( ctx, workflow, access_token, parameters, options, follow ): # noqa: D301 """Start previously created workflow. The ``start`` command allows to start previously created workflow. The workflow execution can be further influenced by passing input prameters using ``-p`` or ``--parameters`` flag and by setting additional operational options using ``-o`` or ``--options``. The input parameters and operational options can be repetitive. For example, to disable caching for the Serial workflow engine, you can set ``-o CACHE=off``. Examples: \n \t $ reana-client start -w myanalysis.42 -p sleeptime=10 -p myparam=4 \n \t $ reana-client start -w myanalysis.42 -p myparam1=myvalue1 -o CACHE=off """ from reana_client.utils import get_api_url from reana_client.api.client import ( get_workflow_parameters, get_workflow_status, start_workflow, ) logging.debug("command: {}".format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug("{param}: {value}".format(param=p, value=ctx.params[p])) parsed_parameters = {"input_parameters": parameters, "operational_options": options} if workflow: if parameters or options: try: response = get_workflow_parameters(workflow, access_token) workflow_type = response["type"] original_parameters = response["parameters"] validate_operational_options( workflow_type, parsed_parameters["operational_options"] ) parsed_parameters["input_parameters"] = validate_input_parameters( parsed_parameters["input_parameters"], original_parameters ) except REANAValidationError as e: display_message(e.message, msg_type="error") sys.exit(1) except Exception as e: display_message( "Could not apply given input parameters: " "{0} \n{1}".format(parameters, str(e)), msg_type="error", ) try: logging.info("Connecting to {0}".format(get_api_url())) response = start_workflow(workflow, access_token, parsed_parameters) current_status = get_workflow_status(workflow, access_token).get("status") display_message( get_workflow_status_change_msg(workflow, current_status), msg_type="success", ) if follow: while "running" in current_status: time.sleep(TIMECHECK) current_status = get_workflow_status(workflow, access_token).get( "status" ) display_message( get_workflow_status_change_msg(workflow, current_status), msg_type="success", ) if "finished" in current_status: if follow: display_message( "Listing workflow output files...", msg_type="info", ) ctx.invoke( get_files, workflow=workflow, access_token=access_token, output_format="url", ) sys.exit(0) elif "failed" in current_status or "stopped" in current_status: sys.exit(1) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) display_message( "Cannot start workflow {}: \n{}".format(workflow, str(e)), msg_type="error", ) if "invoked_by_subcommand" in ctx.parent.__dict__: sys.exit(1)
def cwl_runner(ctx, quiet, outdir, basedir, processfile, jobfile, access_token): """Run CWL files in a standard format <workflow.cwl> <job.json>.""" import json from reana_client.utils import get_api_url from reana_client.api.client import ( create_workflow, get_workflow_logs, start_workflow, upload_file, ) logging.basicConfig( format="[%(levelname)s] %(message)s", stream=sys.stderr, level=logging.INFO if quiet else logging.DEBUG, ) try: basedir = basedir or os.path.abspath(os.path.dirname(processfile)) reana_spec = {"workflow": {"type": "cwl"}} job = {} if jobfile: with open(jobfile) as f: job = yaml.load(f, Loader=yaml.FullLoader) if processfile: reana_spec["inputs"] = {"parameters": job} reana_spec["workflow"]["specification"] = load_workflow_spec( reana_spec["workflow"]["type"], processfile) reana_spec["workflow"]["specification"] = replace_location_in_cwl_spec( reana_spec["workflow"]["specification"]) logging.info("Connecting to {0}".format(get_api_url())) reana_specification = json.loads(json.dumps(reana_spec, sort_keys=True)) response = create_workflow(reana_specification, "cwl-test", access_token) logging.error(response) workflow_name = response["workflow_name"] workflow_id = response["workflow_id"] logging.info("Workflow {0}/{1} has been created.".format( workflow_name, workflow_id)) file_dependencies_list = [] for cwlobj in [processfile, jobfile]: if not cwlobj: continue file_dependencies_obj = get_file_dependencies_obj(cwlobj, basedir) file_dependencies_list.append(file_dependencies_obj) files_to_upload = findfiles(file_dependencies_list) upload_files(files_to_upload, basedir, workflow_id, access_token) response = start_workflow(workflow_id, access_token, reana_spec["inputs"]["parameters"]) logging.error(response) first_logs = "" while True: sleep(1) logging.error("Polling workflow logs") response = get_workflow_logs(workflow_id, access_token) logs = response["logs"] if logs != first_logs: logging.error(logs[len(first_logs):]) first_logs = logs if ("Final process status" in logs or "Traceback (most recent call last)" in logs): # click.echo(response['status']) break try: import ast out = (re.search(r"FinalOutput[\s\S]*?FinalOutput", logs).group().replace("FinalOutput", "")) json_output = out.encode("utf8").decode("unicode_escape") except AttributeError: logging.error("Workflow execution failed") sys.exit(1) except Exception: logging.error(traceback.format_exc()) sys.exit(1) sys.stdout.write(json_output) sys.stdout.write("\n") sys.stdout.flush() except HTTPServerError as e: logging.error(traceback.print_exc()) logging.error(e) except Exception: logging.error(traceback.print_exc())
def workflow_start(ctx, workflow, access_token, parameters, options, follow): # noqa: D301 """Start previously created workflow. The `start` command allows to start previously created workflow. The workflow execution can be further influenced by passing input prameters using `-p` or `--parameters` flag and by setting additional operational options using `-o` or `--options`. The input parameters and operational options can be repetitive. For example, to disable caching for the Serial workflow engine, you can set `-o CACHE=off`. Examples: \n \t $ reana-client start -w myanalysis.42 -p sleeptime=10 -p myparam=4 \n \t $ reana-client start -w myanalysis.42 -p myparam1=myvalue1 -o CACHE=off """ logging.debug('command: {}'.format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug('{param}: {value}'.format(param=p, value=ctx.params[p])) if not access_token: click.echo(click.style(ERROR_MESSAGES['missing_access_token'], fg='red'), err=True) sys.exit(1) parsed_parameters = { 'input_parameters': dict(p.split('=') for p in parameters) } parsed_parameters['operational_options'] = ' '.join(options).split() if workflow: if parameters or options: try: response = get_workflow_parameters(workflow, access_token) if response['type'] == 'cwl': validate_cwl_operational_options( parsed_parameters['operational_options']) if response['type'] == 'serial': parsed_parameters['operational_options'] = \ validate_serial_operational_options( parsed_parameters['operational_options']) parsed_parameters['input_parameters'] = \ validate_input_parameters( parsed_parameters['input_parameters'], response['parameters']) except Exception as e: click.echo(click.style( 'Could not apply given input parameters: ' '{0} \n{1}'.format(parameters, str(e))), err=True) try: logging.info('Connecting to {0}'.format( current_rs_api_client.swagger_spec.api_url)) response = start_workflow(workflow, access_token, parsed_parameters) current_status = get_workflow_status(workflow, access_token).get('status') click.secho(get_workflow_status_change_msg(workflow, current_status), fg='green') if follow: while 'running' in current_status: time.sleep(TIMECHECK) current_status = get_workflow_status( workflow, access_token).get('status') click.secho(get_workflow_status_change_msg( workflow, current_status), fg='green') if 'finished' in current_status: if follow: click.secho( '[INFO] Listing workflow output ' 'files...', bold=True) ctx.invoke(get_files, workflow=workflow, access_token=access_token, output_format='url') sys.exit(0) elif 'failed' in current_status or \ 'stopped' in current_status: sys.exit(1) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) click.echo(click.style( 'Workflow could not be started: \n{}'.format(str(e)), fg='red'), err=True) if 'invoked_by_subcommand' in ctx.parent.__dict__: sys.exit(1)
def workflow_restart(ctx, workflow, access_token, parameters, options, file): # noqa: D301 """Restart previously run workflow. The `restart` command allows to restart a previous workflow on the same workspace. Note that workflow restarting can be used in a combination with operational options ``FROM`` and ``TARGET``. You can also pass a modified workflow specification with ``-f`` or `--file`` flag. You can furthermore use modified input prameters using `-p` or `--parameters` flag and by setting additional operational options using `-o` or `--options`. The input parameters and operational options can be repetitive. Examples: \n \t $ reana-client restart -w myanalysis.42 -p sleeptime=10 -p myparam=4 \n \t $ reana-client restart -w myanalysis.42 -p myparam=myvalue\n \t $ reana-client restart -w myanalysis.42 -o TARGET=gendata\n \t $ reana-client restart -w myanalysis.42 -o FROM=fitdata """ from reana_client.utils import get_api_url from reana_client.api.client import (get_workflow_parameters, get_workflow_status, start_workflow) logging.debug('command: {}'.format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug('{param}: {value}'.format(param=p, value=ctx.params[p])) parsed_parameters = { 'input_parameters': parameters, 'operational_options': options, 'restart': True } if file: parsed_parameters['reana_specification'] = \ load_reana_spec(click.format_filename(file)) if workflow: if parameters or options: try: if 'reana_specification' in parsed_parameters: workflow_type = \ parsed_parameters['reana_specification']['workflow'][ 'type'] original_parameters = \ parsed_parameters['reana_specification'].get( 'inputs', {}).get('parameters', {}) else: response = get_workflow_parameters(workflow, access_token) workflow_type = response['type'] original_parameters = response['parameters'] parsed_parameters['operational_options'] = \ validate_operational_options( workflow_type, parsed_parameters['operational_options']) parsed_parameters['input_parameters'] = \ validate_input_parameters( parsed_parameters['input_parameters'], original_parameters) except REANAValidationError as e: click.secho(e.message, err=True, fg='red') sys.exit(1) except Exception as e: click.secho('Could not apply given input parameters: ' '{0} \n{1}'.format(parameters, str(e)), err=True) try: logging.info('Connecting to {0}'.format(get_api_url())) response = start_workflow(workflow, access_token, parsed_parameters) workflow = response['workflow_name'] + '.' + \ str(response['run_number']) current_status = get_workflow_status(workflow, access_token).get('status') click.secho(get_workflow_status_change_msg(workflow, current_status), fg='green') except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) click.echo(click.style('Cannot start workflow {}: \n{}'.format( workflow, str(e)), fg='red'), err=True) if 'invoked_by_subcommand' in ctx.parent.__dict__: sys.exit(1)
def workflow_start(ctx, workflow, access_token, parameters, options, follow): # noqa: D301 """Start previously created workflow. The `start` command allows to start previously created workflow. The workflow execution can be further influenced by passing input prameters using `-p` or `--parameters` flag and by setting additional operational options using `-o` or `--options`. The input parameters and operational options can be repetitive. For example, to disable caching for the Serial workflow engine, you can set `-o CACHE=off`. Examples: \n \t $ reana-client start -w myanalysis.42 -p sleeptime=10 -p myparam=4 \n \t $ reana-client start -w myanalysis.42 -p myparam1=myvalue1 -o CACHE=off """ from reana_client.utils import get_api_url from reana_client.api.client import (get_workflow_parameters, get_workflow_status, start_workflow) logging.debug('command: {}'.format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug('{param}: {value}'.format(param=p, value=ctx.params[p])) parsed_parameters = { 'input_parameters': parameters, 'operational_options': options } if workflow: if parameters or options: try: response = get_workflow_parameters(workflow, access_token) workflow_type = response['type'] original_parameters = response['parameters'] validate_operational_options( workflow_type, parsed_parameters['operational_options']) parsed_parameters['input_parameters'] = \ validate_input_parameters( parsed_parameters['input_parameters'], original_parameters) except REANAValidationError as e: click.secho(e.message, err=True, fg='red') sys.exit(1) except Exception as e: click.secho('Could not apply given input parameters: ' '{0} \n{1}'.format(parameters, str(e)), err=True) try: logging.info('Connecting to {0}'.format(get_api_url())) response = start_workflow(workflow, access_token, parsed_parameters) current_status = get_workflow_status(workflow, access_token).get('status') click.secho(get_workflow_status_change_msg(workflow, current_status), fg='green') if follow: while 'running' in current_status: time.sleep(TIMECHECK) current_status = get_workflow_status( workflow, access_token).get('status') click.secho(get_workflow_status_change_msg( workflow, current_status), fg='green') if 'finished' in current_status: if follow: click.secho( '[INFO] Listing workflow output ' 'files...', bold=True) ctx.invoke(get_files, workflow=workflow, access_token=access_token, output_format='url') sys.exit(0) elif 'failed' in current_status or \ 'stopped' in current_status: sys.exit(1) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) click.echo(click.style('Cannot start workflow {}: \n{}'.format( workflow, str(e)), fg='red'), err=True) if 'invoked_by_subcommand' in ctx.parent.__dict__: sys.exit(1)
def cwl_runner(ctx, quiet, outdir, basedir, processfile, jobfile, access_token): """Run CWL files in a standard format <workflow.cwl> <job.json>.""" from reana_client.utils import get_api_url from reana_client.api.client import ( create_workflow, get_workflow_logs, start_workflow, upload_file, ) logging.basicConfig( format="[%(levelname)s] %(message)s", stream=sys.stderr, level=logging.INFO if quiet else logging.DEBUG, ) try: basedir = basedir or os.path.abspath(os.path.dirname(processfile)) if processfile: with open(jobfile) as f: reana_spec = { "workflow": { "type": "cwl" }, "inputs": { "parameters": { "input": yaml.load(f, Loader=yaml.FullLoader) } }, } reana_spec["workflow"]["spec"] = load_workflow_spec( reana_spec["workflow"]["type"], processfile, ) else: with open(jobfile) as f: job = yaml.load(f, Loader=yaml.FullLoader) reana_spec = { "workflow": { "type": "cwl" }, "parameters": { "input": "" } } reana_spec["workflow"]["spec"] = load_workflow_spec( reana_spec["workflow"]["type"], job["cwl:tool"]) del job["cwl:tool"] reana_spec["inputs"]["parameters"] = {"input": job} reana_spec["workflow"]["spec"] = replace_location_in_cwl_spec( reana_spec["workflow"]["spec"]) logging.info("Connecting to {0}".format(get_api_url())) response = create_workflow(reana_spec, "cwl-test", access_token) logging.error(response) workflow_name = response["workflow_name"] workflow_id = response["workflow_id"] logging.info("Workflow {0}/{1} has been created.".format( workflow_name, workflow_id)) file_dependencies_list = [] for cwlobj in [processfile, jobfile]: file_dependencies_list.append( get_file_dependencies_obj(cwlobj, basedir)) files_to_upload = findfiles(file_dependencies_list) for cwl_file_object in files_to_upload: file_path = cwl_file_object.get("location") abs_file_path = os.path.join(basedir, file_path) with open(abs_file_path, "r") as f: upload_file(workflow_id, f, file_path, access_token) logging.error("File {} uploaded.".format(file_path)) response = start_workflow(workflow_id, access_token, reana_spec["inputs"]["parameters"]) logging.error(response) first_logs = "" while True: sleep(1) logging.error("Polling workflow logs") response = get_workflow_logs(workflow_id, access_token) logs = response["logs"] if logs != first_logs: logging.error(logs[len(first_logs):]) first_logs = logs if ("Final process status" in logs or "Traceback (most recent call last)" in logs): # click.echo(response['status']) break try: out = re.search(r"success{[\S\s]*", logs).group().replace("success", "") import ast import json json_output = json.dumps(ast.literal_eval(str(out))) except AttributeError: logging.error("Workflow execution failed") sys.exit(1) except Exception: logging.error(traceback.format_exc()) sys.exit(1) sys.stdout.write(json_output) sys.stdout.write("\n") sys.stdout.flush() except HTTPServerError as e: logging.error(traceback.print_exc()) logging.error(e) except Exception: logging.error(traceback.print_exc())
def cwl_runner(ctx, quiet, outdir, basedir, processfile, jobfile, access_token): """Run CWL files in a standard format <workflow.cwl> <job.json>.""" logging.basicConfig(format='[%(levelname)s] %(message)s', stream=sys.stderr, level=logging.INFO if quiet else logging.DEBUG) try: basedir = basedir or os.path.abspath(os.path.dirname(processfile)) if processfile: with open(jobfile) as f: reana_spec = { "workflow": { "type": "cwl" }, "inputs": { "parameters": { "input": yaml.load(f) } } } reana_spec['workflow']['spec'] = load_workflow_spec( reana_spec['workflow']['type'], processfile, ) else: with open(jobfile) as f: job = yaml.load(f) reana_spec = { "workflow": { "type": "cwl" }, "parameters": { "input": "" } } reana_spec['workflow']['spec'] = load_workflow_spec( reana_spec['workflow']['type'], job['cwl:tool']) del job['cwl:tool'] reana_spec['inputs']['parameters'] = {'input': job} reana_spec['workflow']['spec'] = replace_location_in_cwl_spec( reana_spec['workflow']['spec']) logging.info('Connecting to {0}'.format( current_rs_api_client.swagger_spec.api_url)) response = create_workflow(reana_spec, 'cwl-test', access_token) logging.error(response) workflow_name = response['workflow_name'] workflow_id = response['workflow_id'] logging.info('Workflow {0}/{1} has been created.'.format( workflow_name, workflow_id)) file_dependencies_list = [] for cwlobj in [processfile, jobfile]: file_dependencies_list.append( get_file_dependencies_obj(cwlobj, basedir)) files_to_upload = findfiles(file_dependencies_list) for cwl_file_object in files_to_upload: file_path = cwl_file_object.get('location') abs_file_path = os.path.join(basedir, file_path) with open(abs_file_path, 'r') as f: upload_file(workflow_id, f, file_path, access_token) logging.error('File {} uploaded.'.format(file_path)) response = start_workflow(workflow_id, access_token, reana_spec['inputs']['parameters']) logging.error(response) first_logs = "" while True: sleep(1) logging.error('Polling workflow logs') response = get_workflow_logs(workflow_id, access_token) logs = response['logs'] if logs != first_logs: logging.error(logs[len(first_logs):]) first_logs = logs if "Final process status" in logs or \ "Traceback (most recent call last)" in logs: # click.echo(response['status']) break try: out = re.search("success{[\S\s]*", logs).group().replace("success", "") import ast import json json_output = json.dumps(ast.literal_eval(str(out))) except AttributeError: logging.error("Workflow execution failed") sys.exit(1) except Exception as e: logging.error(traceback.format_exc()) sys.exit(1) sys.stdout.write(json_output) sys.stdout.write("\n") sys.stdout.flush() except HTTPServerError as e: logging.error(traceback.print_exc()) logging.error(e) except Exception as e: logging.error(traceback.print_exc())