def get_reana_workflow_status(workflow_id, workflow=None): """Get the status of a workflow.""" rec_uuid = resolve_uuid(workflow_id) token = get_reana_token(rec_uuid) resp = get_workflow_status(workflow_id, token) update_workflow(workflow_id, 'status', resp['status']) return jsonify(resp)
def get_analysis_status(workflow_id=None): """Retrieve status of an analysis workflow.""" token = current_app.config.get('REANA_ACCESS_TOKEN') response = get_workflow_status(workflow_id, token) _logs = json.loads(response.get("logs")) response["logs"] = _logs return jsonify(response)
def workflow_start(ctx, workflow, access_token, parameters, options): # noqa: D301 """Start previously created workflow.""" logging.debug('command: {}'.format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug('{param}: {value}'.format(param=p, value=ctx.params[p])) if not access_token: click.echo(click.style(ERROR_MESSAGES['missing_access_token'], fg='red'), err=True) sys.exit(1) parsed_parameters = { 'input_parameters': dict(p.split('=') for p in parameters) } parsed_parameters['operational_options'] = ' '.join(options).split() if workflow: if parameters or options: try: response = get_workflow_parameters(workflow, access_token) if response['type'] == 'cwl': validate_cwl_operational_options( parsed_parameters['operational_options']) if response['type'] == 'serial': parsed_parameters['operational_options'] = \ validate_serial_operational_options( parsed_parameters['operational_options']) parsed_parameters['input_parameters'] = \ validate_input_parameters( parsed_parameters['input_parameters'], response['parameters']) except Exception as e: click.echo(click.style( 'Could not apply given input parameters: ' '{0} \n{1}'.format(parameters, str(e))), err=True) try: logging.info('Connecting to {0}'.format( current_rs_api_client.swagger_spec.api_url)) response = start_workflow(workflow, access_token, parsed_parameters) current_status = get_workflow_status(workflow, access_token).get('status') click.secho(get_workflow_status_change_msg(workflow, current_status), fg='green') except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) click.echo(click.style( 'Workflow could not be started: \n{}'.format(str(e)), fg='red'), err=True) if 'invoked_by_subcommand' in ctx.parent.__dict__: sys.exit(1)
def get_analysis_status(workflow_id=None): """Retrieve status of an analysis workflow.""" uuid = ReanaJob.get_record_from_workflow_id(workflow_id) token = get_token(uuid) response = get_workflow_status(workflow_id, token) _logs = json.loads(response.get("logs")) response["logs"] = _logs return jsonify(response)
def move_files(ctx, source, target, workflow, access_token): # noqa: D301 """Move files within workspace. The `mv` command allow to move the files within workspace. Examples:\n \t $ reana-client mv data/input.txt input/input.txt """ from reana_client.api.client import get_workflow_status, list_files, mv_files logging.debug("command: {}".format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug("{param}: {value}".format(param=p, value=ctx.params[p])) if workflow: try: current_status = get_workflow_status(workflow, access_token).get("status") if current_status == "running": click.echo( click.style( "File(s) could not be moved for running " "workflow", fg="red"), err=True, ) sys.exit(1) files = list_files(workflow, access_token) current_files = [file["name"] for file in files] if not any(source in item for item in current_files): click.echo( click.style( "Source file(s) {} does not exist in " "workspace {}".format(source, current_files), fg="red", ), err=True, ) sys.exit(1) response = mv_files(source, target, workflow, access_token) click.echo( click.style( "{} was successfully moved to {}.".format(source, target), fg="green", )) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) click.echo(click.style("Something went wrong. {}".format(e), fg="red"), err=True)
def move_files(ctx, source, target, workflow, access_token): # noqa: D301 r"""Move files within workspace. Examples:\n \t $ reana-client mv data/input.txt input/input.txt """ logging.debug('command: {}'.format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug('{param}: {value}'.format(param=p, value=ctx.params[p])) if not access_token: click.echo(click.style(ERROR_MESSAGES['missing_access_token'], fg='red'), err=True) sys.exit(1) if workflow: try: current_status = get_workflow_status(workflow, access_token).get('status') if current_status == 'running': click.echo(click.style( 'File(s) could not be moved for running ' 'workflow', fg='red'), err=True) sys.exit(1) files = list_files(workflow, access_token) current_files = [file['name'] for file in files] if not any(source in item for item in current_files): click.echo(click.style('Source file(s) {} does not exist in ' 'workspace {}'.format( source, current_files), fg='red'), err=True) sys.exit(1) response = mv_files(source, target, workflow, access_token) click.echo( click.style('{} was successfully moved to {}.'.format( source, target), fg='green')) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) click.echo(click.style('Something went wrong. {}'.format(e), fg='red'), err=True)
def workflow_status(ctx, workflow, _filter, output_format, access_token, verbose): """Get status of previously created workflow.""" def render_progress(finished_jobs, total_jobs): if total_jobs: return '{0}/{1}'.format(finished_jobs, total_jobs) else: return '-/-' def add_data_from_reponse(row, data, headers): name, run_number = get_workflow_name_and_run_number(row['name']) total_jobs = row['progress'].get('total') if total_jobs: total_jobs = total_jobs.get('total') else: total_jobs = 0 finished_jobs = row['progress'].get('finished') if finished_jobs: finished_jobs = finished_jobs.get('total') else: finished_jobs = 0 if row['progress']['total'].get('total') or 0 > 0: if 'progress' not in headers: headers += ['progress'] data.append( list( map(str, [ name, run_number, row['created'], row['status'], render_progress(finished_jobs, total_jobs) ]))) return data def add_verbose_data_from_response(response, verbose_headers, headers, data): for k in verbose_headers: if k == 'command': current_command = response['progress']['current_command'] if current_command: if current_command.startswith('bash -c "cd '): current_command = current_command[current_command. index(';') + 2:-2] data[-1] += [current_command] else: if 'current_step_name' in response['progress'] and \ response['progress'].get('current_step_name'): current_step_name = response['progress'].\ get('current_step_name') data[-1] += [current_step_name] else: headers.remove('command') else: data[-1] += [response.get(k)] return data logging.debug('command: {}'.format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug('{param}: {value}'.format(param=p, value=ctx.params[p])) if not access_token: click.echo(click.style(ERROR_MESSAGES['missing_access_token'], fg='red'), err=True) sys.exit(1) if workflow: try: response = get_workflow_status(workflow, access_token) headers = ['name', 'run_number', 'created', 'status', 'progress'] verbose_headers = ['id', 'user', 'command'] data = [] if not isinstance(response, list): response = [response] for workflow in response: add_data_from_reponse(workflow, data, headers) if verbose: headers += verbose_headers add_verbose_data_from_response(workflow, verbose_headers, headers, data) if output_format: tablib_data = tablib.Dataset() tablib_data.headers = headers for row in data: tablib_data.append(row) if _filter: tablib_data = tablib_data.subset(rows=None, cols=list(_filter)) click.echo(tablib_data.export(output_format)) else: click_table_printer(headers, _filter, data) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) click.echo(click.style( 'Workflow status could not be retrieved: \n{}'.format(str(e)), fg='red'), err=True)
def workflow_status( # noqa: C901 ctx, workflow, _format, output_format, access_token, verbose ): # noqa: D301 """Get status of a workflow. The ``status`` command allow to retrieve status of a workflow. The status can be created, queued, running, failed, etc. You can increase verbosity or filter retrieved information by passing appropriate command-line options. Examples: \n \t $ reana-client status -w myanalysis.42 \n \t $ reana-client status -w myanalysis.42 -v --json """ import tablib from reana_client.api.client import get_workflow_status def render_progress(finished_jobs, total_jobs): if total_jobs: return "{0}/{1}".format(finished_jobs, total_jobs) else: return "-/-" def add_data_from_reponse(row, data, headers): name, run_number = get_workflow_name_and_run_number(row["name"]) total_jobs = row["progress"].get("total") if total_jobs: total_jobs = total_jobs.get("total") else: total_jobs = 0 finished_jobs = row["progress"].get("finished") if finished_jobs: finished_jobs = finished_jobs.get("total") else: finished_jobs = 0 parsed_response = list( map(str, [name, run_number, row["created"], row["status"]]) ) if row["progress"]["total"].get("total") or 0 > 0: if "progress" not in headers: headers += ["progress"] parsed_response.append(render_progress(finished_jobs, total_jobs)) if row["status"] in ["running", "finished", "failed", "stopped"]: started_at = row["progress"].get("run_started_at") finished_at = row["progress"].get("run_finished_at") if started_at: after_created_pos = headers.index("created") + 1 headers.insert(after_created_pos, "started") parsed_response.insert(after_created_pos, started_at) if finished_at: after_started_pos = headers.index("started") + 1 headers.insert(after_started_pos, "ended") parsed_response.insert(after_started_pos, finished_at) data.append(parsed_response) return data def add_verbose_data_from_response(response, verbose_headers, headers, data): for k in verbose_headers: if k == "command": current_command = response["progress"]["current_command"] if current_command: if current_command.startswith('bash -c "cd '): current_command = current_command[ current_command.index(";") + 2 : -2 ] data[-1] += [current_command] else: if "current_step_name" in response["progress"] and response[ "progress" ].get("current_step_name"): current_step_name = response["progress"].get( "current_step_name" ) data[-1] += [current_step_name] else: headers.remove("command") else: data[-1] += [response.get(k)] return data logging.debug("command: {}".format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug("{param}: {value}".format(param=p, value=ctx.params[p])) if workflow: try: if _format: parsed_filters = parse_format_parameters(_format) _format = [item["column_name"] for item in parsed_filters] response = get_workflow_status(workflow, access_token) headers = ["name", "run_number", "created", "status"] verbose_headers = ["id", "user", "command"] data = [] if not isinstance(response, list): response = [response] for workflow in response: add_data_from_reponse(workflow, data, headers) if verbose: headers += verbose_headers add_verbose_data_from_response( workflow, verbose_headers, headers, data ) if output_format: tablib_data = tablib.Dataset() tablib_data.headers = headers for row in data: tablib_data.append(row) if _format: tablib_data = tablib_data.subset(rows=None, cols=list(_format)) display_message(tablib_data.export(output_format)) else: click_table_printer(headers, _format, data) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) display_message( "Cannot retrieve the status of a workflow {}: \n" "{}".format(workflow, str(e)), msg_type="error", )
def workflow_restart( ctx, workflow, access_token, parameters, options, file ): # noqa: D301 """Restart previously run workflow. The ``restart`` command allows to restart a previous workflow on the same workspace. Note that workflow restarting can be used in a combination with operational options ``FROM`` and ``TARGET``. You can also pass a modified workflow specification with ``-f`` or ``--file`` flag. You can furthermore use modified input prameters using ``-p`` or ``--parameters`` flag and by setting additional operational options using ``-o`` or ``--options``. The input parameters and operational options can be repetitive. Examples: \n \t $ reana-client restart -w myanalysis.42 -p sleeptime=10 -p myparam=4 \n \t $ reana-client restart -w myanalysis.42 -p myparam=myvalue\n \t $ reana-client restart -w myanalysis.42 -o TARGET=gendata\n \t $ reana-client restart -w myanalysis.42 -o FROM=fitdata """ from reana_client.utils import get_api_url from reana_client.api.client import ( get_workflow_parameters, get_workflow_status, start_workflow, ) logging.debug("command: {}".format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug("{param}: {value}".format(param=p, value=ctx.params[p])) parsed_parameters = { "input_parameters": parameters, "operational_options": options, "restart": True, } if file: parsed_parameters["reana_specification"] = load_reana_spec( click.format_filename(file) ) if workflow: if parameters or options: try: if "reana_specification" in parsed_parameters: workflow_type = parsed_parameters["reana_specification"][ "workflow" ]["type"] original_parameters = ( parsed_parameters["reana_specification"] .get("inputs", {}) .get("parameters", {}) ) else: response = get_workflow_parameters(workflow, access_token) workflow_type = response["type"] original_parameters = response["parameters"] parsed_parameters["operational_options"] = validate_operational_options( workflow_type, parsed_parameters["operational_options"] ) parsed_parameters["input_parameters"] = validate_input_parameters( parsed_parameters["input_parameters"], original_parameters ) except REANAValidationError as e: display_message(e.message, msg_type="error") sys.exit(1) except Exception as e: display_message( "Could not apply given input parameters: " "{0} \n{1}".format(parameters, str(e)), msg_type="error", ) try: logging.info("Connecting to {0}".format(get_api_url())) response = start_workflow(workflow, access_token, parsed_parameters) workflow = response["workflow_name"] + "." + str(response["run_number"]) current_status = get_workflow_status(workflow, access_token).get("status") display_message( get_workflow_status_change_msg(workflow, current_status), msg_type="success", ) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) display_message( "Cannot start workflow {}: \n{}".format(workflow, str(e)), msg_type="error", ) if "invoked_by_subcommand" in ctx.parent.__dict__: sys.exit(1)
def workflow_start( ctx, workflow, access_token, parameters, options, follow ): # noqa: D301 """Start previously created workflow. The ``start`` command allows to start previously created workflow. The workflow execution can be further influenced by passing input prameters using ``-p`` or ``--parameters`` flag and by setting additional operational options using ``-o`` or ``--options``. The input parameters and operational options can be repetitive. For example, to disable caching for the Serial workflow engine, you can set ``-o CACHE=off``. Examples: \n \t $ reana-client start -w myanalysis.42 -p sleeptime=10 -p myparam=4 \n \t $ reana-client start -w myanalysis.42 -p myparam1=myvalue1 -o CACHE=off """ from reana_client.utils import get_api_url from reana_client.api.client import ( get_workflow_parameters, get_workflow_status, start_workflow, ) logging.debug("command: {}".format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug("{param}: {value}".format(param=p, value=ctx.params[p])) parsed_parameters = {"input_parameters": parameters, "operational_options": options} if workflow: if parameters or options: try: response = get_workflow_parameters(workflow, access_token) workflow_type = response["type"] original_parameters = response["parameters"] validate_operational_options( workflow_type, parsed_parameters["operational_options"] ) parsed_parameters["input_parameters"] = validate_input_parameters( parsed_parameters["input_parameters"], original_parameters ) except REANAValidationError as e: display_message(e.message, msg_type="error") sys.exit(1) except Exception as e: display_message( "Could not apply given input parameters: " "{0} \n{1}".format(parameters, str(e)), msg_type="error", ) try: logging.info("Connecting to {0}".format(get_api_url())) response = start_workflow(workflow, access_token, parsed_parameters) current_status = get_workflow_status(workflow, access_token).get("status") display_message( get_workflow_status_change_msg(workflow, current_status), msg_type="success", ) if follow: while "running" in current_status: time.sleep(TIMECHECK) current_status = get_workflow_status(workflow, access_token).get( "status" ) display_message( get_workflow_status_change_msg(workflow, current_status), msg_type="success", ) if "finished" in current_status: if follow: display_message( "Listing workflow output files...", msg_type="info", ) ctx.invoke( get_files, workflow=workflow, access_token=access_token, output_format="url", ) sys.exit(0) elif "failed" in current_status or "stopped" in current_status: sys.exit(1) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) display_message( "Cannot start workflow {}: \n{}".format(workflow, str(e)), msg_type="error", ) if "invoked_by_subcommand" in ctx.parent.__dict__: sys.exit(1)
def workflow_start(ctx, workflow, access_token, parameters, options, follow): # noqa: D301 """Start previously created workflow. The `start` command allows to start previously created workflow. The workflow execution can be further influenced by passing input prameters using `-p` or `--parameters` flag and by setting additional operational options using `-o` or `--options`. The input parameters and operational options can be repetitive. For example, to disable caching for the Serial workflow engine, you can set `-o CACHE=off`. Examples: \n \t $ reana-client start -w myanalysis.42 -p sleeptime=10 -p myparam=4 \n \t $ reana-client start -w myanalysis.42 -p myparam1=myvalue1 -o CACHE=off """ logging.debug('command: {}'.format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug('{param}: {value}'.format(param=p, value=ctx.params[p])) if not access_token: click.echo(click.style(ERROR_MESSAGES['missing_access_token'], fg='red'), err=True) sys.exit(1) parsed_parameters = { 'input_parameters': dict(p.split('=') for p in parameters) } parsed_parameters['operational_options'] = ' '.join(options).split() if workflow: if parameters or options: try: response = get_workflow_parameters(workflow, access_token) if response['type'] == 'cwl': validate_cwl_operational_options( parsed_parameters['operational_options']) if response['type'] == 'serial': parsed_parameters['operational_options'] = \ validate_serial_operational_options( parsed_parameters['operational_options']) parsed_parameters['input_parameters'] = \ validate_input_parameters( parsed_parameters['input_parameters'], response['parameters']) except Exception as e: click.echo(click.style( 'Could not apply given input parameters: ' '{0} \n{1}'.format(parameters, str(e))), err=True) try: logging.info('Connecting to {0}'.format( current_rs_api_client.swagger_spec.api_url)) response = start_workflow(workflow, access_token, parsed_parameters) current_status = get_workflow_status(workflow, access_token).get('status') click.secho(get_workflow_status_change_msg(workflow, current_status), fg='green') if follow: while 'running' in current_status: time.sleep(TIMECHECK) current_status = get_workflow_status( workflow, access_token).get('status') click.secho(get_workflow_status_change_msg( workflow, current_status), fg='green') if 'finished' in current_status: if follow: click.secho( '[INFO] Listing workflow output ' 'files...', bold=True) ctx.invoke(get_files, workflow=workflow, access_token=access_token, output_format='url') sys.exit(0) elif 'failed' in current_status or \ 'stopped' in current_status: sys.exit(1) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) click.echo(click.style( 'Workflow could not be started: \n{}'.format(str(e)), fg='red'), err=True) if 'invoked_by_subcommand' in ctx.parent.__dict__: sys.exit(1)
def workflow_status(ctx, workflow, _filter, output_format, access_token, verbose): # noqa: D301 """Get status of a workflow. The `status` command allow to retrieve status of a workflow. The status can be created, queued, running, failed, etc. You can increase verbosity or filter retrieved information by passing appropriate command-line options. Examples: \n \t $ reana-client status -w myanalysis.42 \n \t $ reana-client status -w myanalysis.42 -v --json """ import tablib from reana_client.api.client import get_workflow_status def render_progress(finished_jobs, total_jobs): if total_jobs: return '{0}/{1}'.format(finished_jobs, total_jobs) else: return '-/-' def add_data_from_reponse(row, data, headers): name, run_number = get_workflow_name_and_run_number(row['name']) total_jobs = row['progress'].get('total') if total_jobs: total_jobs = total_jobs.get('total') else: total_jobs = 0 finished_jobs = row['progress'].get('finished') if finished_jobs: finished_jobs = finished_jobs.get('total') else: finished_jobs = 0 parsed_response = list( map(str, [name, run_number, row['created'], row['status']])) if row['progress']['total'].get('total') or 0 > 0: if 'progress' not in headers: headers += ['progress'] parsed_response.append( render_progress(finished_jobs, total_jobs)) if row['status'] in ['running', 'finished', 'failed', 'stopped']: started_at = row['progress'].get('run_started_at') finished_at = row['progress'].get('run_finished_at') if started_at: after_created_pos = headers.index('created') + 1 headers.insert(after_created_pos, 'started') parsed_response.insert(after_created_pos, started_at) if finished_at: after_started_pos = headers.index('started') + 1 headers.insert(after_started_pos, 'ended') parsed_response.insert(after_started_pos, finished_at) data.append(parsed_response) return data def add_verbose_data_from_response(response, verbose_headers, headers, data): for k in verbose_headers: if k == 'command': current_command = response['progress']['current_command'] if current_command: if current_command.startswith('bash -c "cd '): current_command = current_command[current_command. index(';') + 2:-2] data[-1] += [current_command] else: if 'current_step_name' in response['progress'] and \ response['progress'].get('current_step_name'): current_step_name = response['progress'].\ get('current_step_name') data[-1] += [current_step_name] else: headers.remove('command') else: data[-1] += [response.get(k)] return data logging.debug('command: {}'.format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug('{param}: {value}'.format(param=p, value=ctx.params[p])) if workflow: try: if _filter: parsed_filters = parse_parameters(_filter) _filter = [item['column_name'] for item in parsed_filters] response = get_workflow_status(workflow, access_token) headers = ['name', 'run_number', 'created', 'status'] verbose_headers = ['id', 'user', 'command'] data = [] if not isinstance(response, list): response = [response] for workflow in response: add_data_from_reponse(workflow, data, headers) if verbose: headers += verbose_headers add_verbose_data_from_response(workflow, verbose_headers, headers, data) if output_format: tablib_data = tablib.Dataset() tablib_data.headers = headers for row in data: tablib_data.append(row) if _filter: tablib_data = tablib_data.subset(rows=None, cols=list(_filter)) click.echo(tablib_data.export(output_format)) else: click_table_printer(headers, _filter, data) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) click.echo(click.style( 'Cannot retrieve the status of a workflow {}: \n{}'.format( workflow, str(e)), fg='red'), err=True)
def workflow_restart(ctx, workflow, access_token, parameters, options, file): # noqa: D301 """Restart previously run workflow. The `restart` command allows to restart a previous workflow on the same workspace. Note that workflow restarting can be used in a combination with operational options ``FROM`` and ``TARGET``. You can also pass a modified workflow specification with ``-f`` or `--file`` flag. You can furthermore use modified input prameters using `-p` or `--parameters` flag and by setting additional operational options using `-o` or `--options`. The input parameters and operational options can be repetitive. Examples: \n \t $ reana-client restart -w myanalysis.42 -p sleeptime=10 -p myparam=4 \n \t $ reana-client restart -w myanalysis.42 -p myparam=myvalue\n \t $ reana-client restart -w myanalysis.42 -o TARGET=gendata\n \t $ reana-client restart -w myanalysis.42 -o FROM=fitdata """ from reana_client.utils import get_api_url from reana_client.api.client import (get_workflow_parameters, get_workflow_status, start_workflow) logging.debug('command: {}'.format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug('{param}: {value}'.format(param=p, value=ctx.params[p])) parsed_parameters = { 'input_parameters': parameters, 'operational_options': options, 'restart': True } if file: parsed_parameters['reana_specification'] = \ load_reana_spec(click.format_filename(file)) if workflow: if parameters or options: try: if 'reana_specification' in parsed_parameters: workflow_type = \ parsed_parameters['reana_specification']['workflow'][ 'type'] original_parameters = \ parsed_parameters['reana_specification'].get( 'inputs', {}).get('parameters', {}) else: response = get_workflow_parameters(workflow, access_token) workflow_type = response['type'] original_parameters = response['parameters'] parsed_parameters['operational_options'] = \ validate_operational_options( workflow_type, parsed_parameters['operational_options']) parsed_parameters['input_parameters'] = \ validate_input_parameters( parsed_parameters['input_parameters'], original_parameters) except REANAValidationError as e: click.secho(e.message, err=True, fg='red') sys.exit(1) except Exception as e: click.secho('Could not apply given input parameters: ' '{0} \n{1}'.format(parameters, str(e)), err=True) try: logging.info('Connecting to {0}'.format(get_api_url())) response = start_workflow(workflow, access_token, parsed_parameters) workflow = response['workflow_name'] + '.' + \ str(response['run_number']) current_status = get_workflow_status(workflow, access_token).get('status') click.secho(get_workflow_status_change_msg(workflow, current_status), fg='green') except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) click.echo(click.style('Cannot start workflow {}: \n{}'.format( workflow, str(e)), fg='red'), err=True) if 'invoked_by_subcommand' in ctx.parent.__dict__: sys.exit(1)
def workflow_start(ctx, workflow, access_token, parameters, options, follow): # noqa: D301 """Start previously created workflow. The `start` command allows to start previously created workflow. The workflow execution can be further influenced by passing input prameters using `-p` or `--parameters` flag and by setting additional operational options using `-o` or `--options`. The input parameters and operational options can be repetitive. For example, to disable caching for the Serial workflow engine, you can set `-o CACHE=off`. Examples: \n \t $ reana-client start -w myanalysis.42 -p sleeptime=10 -p myparam=4 \n \t $ reana-client start -w myanalysis.42 -p myparam1=myvalue1 -o CACHE=off """ from reana_client.utils import get_api_url from reana_client.api.client import (get_workflow_parameters, get_workflow_status, start_workflow) logging.debug('command: {}'.format(ctx.command_path.replace(" ", "."))) for p in ctx.params: logging.debug('{param}: {value}'.format(param=p, value=ctx.params[p])) parsed_parameters = { 'input_parameters': parameters, 'operational_options': options } if workflow: if parameters or options: try: response = get_workflow_parameters(workflow, access_token) workflow_type = response['type'] original_parameters = response['parameters'] validate_operational_options( workflow_type, parsed_parameters['operational_options']) parsed_parameters['input_parameters'] = \ validate_input_parameters( parsed_parameters['input_parameters'], original_parameters) except REANAValidationError as e: click.secho(e.message, err=True, fg='red') sys.exit(1) except Exception as e: click.secho('Could not apply given input parameters: ' '{0} \n{1}'.format(parameters, str(e)), err=True) try: logging.info('Connecting to {0}'.format(get_api_url())) response = start_workflow(workflow, access_token, parsed_parameters) current_status = get_workflow_status(workflow, access_token).get('status') click.secho(get_workflow_status_change_msg(workflow, current_status), fg='green') if follow: while 'running' in current_status: time.sleep(TIMECHECK) current_status = get_workflow_status( workflow, access_token).get('status') click.secho(get_workflow_status_change_msg( workflow, current_status), fg='green') if 'finished' in current_status: if follow: click.secho( '[INFO] Listing workflow output ' 'files...', bold=True) ctx.invoke(get_files, workflow=workflow, access_token=access_token, output_format='url') sys.exit(0) elif 'failed' in current_status or \ 'stopped' in current_status: sys.exit(1) except Exception as e: logging.debug(traceback.format_exc()) logging.debug(str(e)) click.echo(click.style('Cannot start workflow {}: \n{}'.format( workflow, str(e)), fg='red'), err=True) if 'invoked_by_subcommand' in ctx.parent.__dict__: sys.exit(1)
def get_analysis_status(workflow_id=None): """Retrieves status of an analysis workflow.""" token = current_app.config.get('REANA_ACCESS_TOKEN') response = get_workflow_status(workflow_id, token) return jsonify(response)