def get_command_line(): data = request.get_json(force=True) tool = process_builder(ctx, data['tool_cfg']) inputs = {strip_prefix(k): construct_files(v, tool._inputs[k].validator) for k, v in data['input_map'].iteritems()} job = Job('Fake job ID', tool, inputs, {'cpu': 1, 'mem': 1024}, ctx) cli_job = CLIJob(job) return json.dumps({ 'arguments': cli_job.make_arg_list(), 'stdin': cli_job.stdin, 'stdout': cli_job.stdout, }, indent=2)
def get_outputs(): data = request.get_json(force=True) tool = process_builder(ctx, data['tool_cfg']) inputs = {strip_prefix(k): construct_files(v, tool._inputs[k].validator) for k, v in data['input_map'].iteritems()} job = Job('Fake job ID', tool, inputs, {'cpu': 1, 'mem': 1024}, ctx) cli_job = CLIJob(job) status = 'SUCCESS' if data['exit_code'] in data['tool_cfg'].get('successCodes', [0])\ else 'FAILURE' return json.dumps({ 'status': status, 'outputs': cli_job.get_outputs(data['job_dir'], job), }, indent=2)
def from_dict(cls, context, d): cls.infer_step_id(d) converted = { k: process_builder(context, v) if k == 'run' else context.from_dict(v) for k, v in six.iteritems(d) } kwargs = Process.kwarg_dict(converted) kwargs.update({ 'app': converted['run'], 'inputs': [WorkflowStepInput.from_dict(context, inp) for inp in converted.get('inputs', [])], 'outputs': [OutputParameter.from_dict(context, inp) for inp in converted.get('outputs', [])], 'scatter': converted.get('scatter') }) return cls(**kwargs)
def get_command_line(): data = request.get_json(force=True) tool = process_builder(ctx, data['tool_cfg']) inputs = { strip_prefix(k): construct_files(v, tool._inputs[k].validator) for k, v in data['input_map'].iteritems() } job = Job('Fake job ID', tool, inputs, {'cpu': 1, 'mem': 1024}, ctx) cli_job = CLIJob(job) return json.dumps( { 'arguments': cli_job.make_arg_list(), 'stdin': cli_job.stdin, 'stdout': cli_job.stdout, }, indent=2)
def get_outputs(): data = request.get_json(force=True) tool = process_builder(ctx, data['tool_cfg']) inputs = { strip_prefix(k): construct_files(v, tool._inputs[k].validator) for k, v in data['input_map'].iteritems() } job = Job('Fake job ID', tool, inputs, {'cpu': 1, 'mem': 1024}, ctx) cli_job = CLIJob(job) status = 'SUCCESS' if data['exit_code'] in data['tool_cfg'].get('successCodes', [0])\ else 'FAILURE' return json.dumps( { 'status': status, 'outputs': cli_job.get_outputs(data['job_dir'], job), }, indent=2)
def get_outputs(): data = request.get_json(force=True) tool = process_builder(ctx, data['tool_cfg']) inputs = {strip_prefix(k): construct_files(v, tool._inputs[strip_prefix(k)].validator) for k, v in data['input_map'].iteritems()} job = Job('Fake job ID', tool, inputs, {'cpu': 1, 'mem': 1024}, ctx) cli_job = CLIJob(job) print data # status = 'SUCCESS' if data['exit_code'] in data['tool_cfg'].get('successCodes', [0]) else 'FAILURE' status = 'SUCCESS' outputs = cli_job.get_outputs(data['job_dir'], job) if inputs: key = inputs.keys()[0] prefix = key[0:key.rfind('.')+1] outputs = {prefix + k: v for k, v in outputs.iteritems()} return json.dumps({ 'status': status, 'outputs': outputs, })
def from_dict(cls, context, d): cls.infer_step_id(d) converted = { k: process_builder(context, v) if k == 'run' else context.from_dict(v) for k, v in six.iteritems(d) } kwargs = Process.kwarg_dict(converted) kwargs.update({ 'app': converted['run'], 'inputs': [ WorkflowStepInput.from_dict(context, inp) for inp in converted.get('inputs', []) ], 'outputs': [ OutputParameter.from_dict(context, inp) for inp in converted.get('outputs', []) ], 'scatter': converted.get('scatter') }) return cls(**kwargs)
def main(): disable_warnings() logging.basicConfig(level=logging.WARN) if len(sys.argv) == 1: print(USAGE) return usage = USAGE.format(resources=make_resources_usage_string(), inputs='<inputs>') app_usage = usage if len(sys.argv) == 2 and \ (sys.argv[1] == '--help' or sys.argv[1] == '-h'): print(USAGE) return dry_run_args = dry_run_parse() if not dry_run_args: print(USAGE) return if not (dry_run_args['<tool>']): print('You have to specify a tool, with --tool option') print(usage) return tool = get_tool(dry_run_args) if not tool: fail("Couldn't find tool.") if isinstance(tool, list): tool = loader.index.get('#main') if 'class' not in tool: fail("Document must have a 'class' field") if 'id' not in tool: tool['id'] = dry_run_args['<tool>'] context = init_context(tool) app = process_builder(context, tool) job = None if isinstance(app, Job): job = app app = job.app rabix.expressions.update_engines(app) if dry_run_args['--install']: app.install() print("Install successful.") return if dry_run_args['--conformance-test']: job_dict = from_url(dry_run_args['<job>']) conformance_test(context, app, job_dict, dry_run_args.get('--basedir')) return try: args = docopt.docopt(usage, version=version, help=False) job_dict = copy.deepcopy(TEMPLATE_JOB) logging.root.setLevel(log_level(dry_run_args['--verbose'])) input_file_path = args.get('<inp>') or args.get('--inp-file') if input_file_path: basedir = os.path.dirname(os.path.abspath(input_file_path)) input_file = from_url(input_file_path) inputs = get_inputs(input_file, app.inputs, basedir) job_dict['inputs'].update(inputs) input_usage = job_dict['inputs'] if job: basedir = os.path.dirname(args.get('<tool>')) job.inputs = get_inputs(job.inputs, app.inputs, basedir) input_usage.update(job.inputs) app_inputs_usage = make_app_usage_string( app, template=TOOL_TEMPLATE, inp=input_usage) app_usage = make_app_usage_string(app, USAGE, job_dict['inputs']) try: app_inputs = docopt.docopt(app_inputs_usage, args['<inputs>']) except docopt.DocoptExit: if not job: raise for inp in job.app.inputs: if inp.required and inp.id not in job.inputs: raise app_inputs = {} if args['--help']: print(app_usage) return # trim leading --, and ignore empty arays app_inputs = { k[2:]: v for k, v in six.iteritems(app_inputs) if v != [] } inp = get_inputs(app_inputs, app.inputs) if not job: job_dict['id'] = args.get('--outdir') or args.get('--dir') job_dict['app'] = app job = Job.from_dict(context, job_dict) job.inputs.update(inp) if args['--print-cli']: if not isinstance(app, CommandLineTool): fail(dry_run_args['<tool>'] + " is not a command line app") print(CLIJob(job).cmd_line()) return if args['--pretty-print']: fmt = partial(result_str, job.id) else: fmt = lambda result: json.dumps(context.to_primitive(result)) if not job.inputs and not args['--'] and not args['--quiet']: print(app_usage) return try: context.executor.execute(job, lambda _, result: print(fmt(result))) except RabixError as err: fail(err.message) except docopt.DocoptExit: fail(app_usage)
def main(): disable_warnings() logging.basicConfig(level=logging.WARN) if len(sys.argv) == 1: print(USAGE) return usage = USAGE.format(resources=make_resources_usage_string(), inputs='<inputs>') app_usage = usage if len(sys.argv) == 2 and \ (sys.argv[1] == '--help' or sys.argv[1] == '-h'): print(USAGE) return dry_run_args = dry_run_parse() if not dry_run_args: print(USAGE) return if not (dry_run_args['<tool>']): print('You have to specify a tool, with --tool option') print(usage) return tool = get_tool(dry_run_args) if not tool: fail("Couldn't find tool.") if isinstance(tool, list): tool = loader.index.get('#main') if 'class' not in tool: fail("Document must have a 'class' field") if 'id' not in tool: tool['id'] = dry_run_args['<tool>'] context = init_context(tool) app = process_builder(context, tool) job = None if isinstance(app, Job): job = app app = job.app rabix.expressions.update_engines(app) if dry_run_args['--install']: app.install() print("Install successful.") return if dry_run_args['--conformance-test']: job_dict = from_url(dry_run_args['<job>']) conformance_test(context, app, job_dict, dry_run_args.get('--basedir')) return try: args = docopt.docopt(usage, version=version, help=False) job_dict = copy.deepcopy(TEMPLATE_JOB) logging.root.setLevel(log_level(dry_run_args['--verbose'])) input_file_path = args.get('<inp>') or args.get('--inp-file') if input_file_path: basedir = os.path.dirname(os.path.abspath(input_file_path)) input_file = from_url(input_file_path) inputs = get_inputs(input_file, app.inputs, basedir) job_dict['inputs'].update(inputs) input_usage = job_dict['inputs'] if job: basedir = os.path.dirname(args.get('<tool>')) job.inputs = get_inputs(job.inputs, app.inputs, basedir) input_usage.update(job.inputs) app_inputs_usage = make_app_usage_string(app, template=TOOL_TEMPLATE, inp=input_usage) app_usage = make_app_usage_string(app, USAGE, job_dict['inputs']) try: app_inputs = docopt.docopt(app_inputs_usage, args['<inputs>']) except docopt.DocoptExit: if not job: raise for inp in job.app.inputs: if inp.required and inp.id not in job.inputs: raise app_inputs = {} if args['--help']: print(app_usage) return # trim leading --, and ignore empty arays app_inputs = { k[2:]: v for k, v in six.iteritems(app_inputs) if v != [] } inp = get_inputs(app_inputs, app.inputs) if not job: job_dict['id'] = args.get('--outdir') or args.get('--dir') job_dict['app'] = app job = Job.from_dict(context, job_dict) job.inputs.update(inp) if args['--print-cli']: if not isinstance(app, CommandLineTool): fail(dry_run_args['<tool>'] + " is not a command line app") print(CLIJob(job).cmd_line()) return if args['--pretty-print']: fmt = partial(result_str, job.id) else: fmt = lambda result: json.dumps(context.to_primitive(result)) if not job.inputs and not args['--'] and not args['--quiet']: print(app_usage) return try: context.executor.execute(job, lambda _, result: print(fmt(result))) except RabixError as err: fail(err.message) except docopt.DocoptExit: fail(app_usage)