def test_workflow(): path = abspath(join(__file__, '../../test_runtime/wf_tests.yaml')) doc = from_url(path) tests = doc['tests'] for test_name, test in six.iteritems(tests): context = init_context(test['job']) job = Job.from_dict(context, test['job']) for inp in job.app.inputs: construct_files(job.inputs.get(inp.id), inp.validator) yield assert_execution, job, test['outputs']
def get_inputs(args, inputs, basedir=None): basedir = basedir or os.path.abspath('.') constructed = {} for i in inputs: val = args.get(i.id) if i.depth == 0: cons = construct_files(val, i.validator) else: cons = [construct_files(e, i.validator) for e in val] if val else [] if cons: constructed[i.id] = cons return map_rec_collection(lambda v: rebase_path(v, basedir), constructed)
def get_inputs(args, inputs, basedir=None): basedir = basedir or os.path.abspath('.') constructed = {} for i in inputs: val = args.get(i.id) if i.depth == 0: cons = construct_files(val, i.validator) else: cons = [construct_files(e, i.validator) for e in val] if val else [] if cons: constructed[i.id] = cons return map_rec_collection( lambda v: rebase_path(v, basedir), constructed )
def get_command_line(): data = request.get_json(force=True) tool = process_builder(ctx, data['tool_cfg']) inputs = {strip_prefix(k): construct_files(v, tool._inputs[k].validator) for k, v in data['input_map'].iteritems()} job = Job('Fake job ID', tool, inputs, {'cpu': 1, 'mem': 1024}, ctx) cli_job = CLIJob(job) return json.dumps({ 'arguments': cli_job.make_arg_list(), 'stdin': cli_job.stdin, 'stdout': cli_job.stdout, }, indent=2)
def get_outputs(): data = request.get_json(force=True) tool = process_builder(ctx, data['tool_cfg']) inputs = {strip_prefix(k): construct_files(v, tool._inputs[k].validator) for k, v in data['input_map'].iteritems()} job = Job('Fake job ID', tool, inputs, {'cpu': 1, 'mem': 1024}, ctx) cli_job = CLIJob(job) status = 'SUCCESS' if data['exit_code'] in data['tool_cfg'].get('successCodes', [0])\ else 'FAILURE' return json.dumps({ 'status': status, 'outputs': cli_job.get_outputs(data['job_dir'], job), }, indent=2)
def get_command_line(): data = request.get_json(force=True) tool = process_builder(ctx, data['tool_cfg']) inputs = { strip_prefix(k): construct_files(v, tool._inputs[k].validator) for k, v in data['input_map'].iteritems() } job = Job('Fake job ID', tool, inputs, {'cpu': 1, 'mem': 1024}, ctx) cli_job = CLIJob(job) return json.dumps( { 'arguments': cli_job.make_arg_list(), 'stdin': cli_job.stdin, 'stdout': cli_job.stdout, }, indent=2)
def get_outputs(): data = request.get_json(force=True) tool = process_builder(ctx, data['tool_cfg']) inputs = { strip_prefix(k): construct_files(v, tool._inputs[k].validator) for k, v in data['input_map'].iteritems() } job = Job('Fake job ID', tool, inputs, {'cpu': 1, 'mem': 1024}, ctx) cli_job = CLIJob(job) status = 'SUCCESS' if data['exit_code'] in data['tool_cfg'].get('successCodes', [0])\ else 'FAILURE' return json.dumps( { 'status': status, 'outputs': cli_job.get_outputs(data['job_dir'], job), }, indent=2)
def get_outputs(): data = request.get_json(force=True) tool = process_builder(ctx, data['tool_cfg']) inputs = {strip_prefix(k): construct_files(v, tool._inputs[strip_prefix(k)].validator) for k, v in data['input_map'].iteritems()} job = Job('Fake job ID', tool, inputs, {'cpu': 1, 'mem': 1024}, ctx) cli_job = CLIJob(job) print data # status = 'SUCCESS' if data['exit_code'] in data['tool_cfg'].get('successCodes', [0]) else 'FAILURE' status = 'SUCCESS' outputs = cli_job.get_outputs(data['job_dir'], job) if inputs: key = inputs.keys()[0] prefix = key[0:key.rfind('.')+1] outputs = {prefix + k: v for k, v in outputs.iteritems()} return json.dumps({ 'status': status, 'outputs': outputs, })
def run(self, job, job_dir=None): job_dir = os.path.abspath(job_dir or job.id) if not job_dir.endswith('/'): job_dir += '/' os.mkdir(job_dir) os.chmod(job_dir, os.stat(job_dir).st_mode | stat.S_IROTH | stat.S_IWOTH) self.cli_job = CLIJob(job) if self.container: self.ensure_files(job, job_dir) abspath_job = Job(job.id, job.app, copy.deepcopy(job.inputs), job.allocated_resources, job.context) self.install(job=job) cmd_line = self.command_line(job, job_dir) self.job_dump(job, job_dir) self.container.run(cmd_line, job_dir) result_path = os.path.abspath(job_dir) + '/result.cwl.json' if os.path.exists(result_path): with open(result_path, 'r') as res: outputs = json.load(res) else: with open(result_path, 'w') as res: outputs = self.cli_job.get_outputs( os.path.abspath(job_dir), abspath_job) json.dump(outputs, res) outputs = { o.id: construct_files(outputs.get(o.id), o.validator) for o in job.app.outputs } self.unmap_paths(outputs) def write_rbx(f): if isinstance(f, File): with open(f.path + '.rbx.json', 'w') as rbx: json.dump(f.to_dict(), rbx) map_rec_collection(write_rbx, outputs) return outputs
def run(self, job, job_dir=None): job_dir = os.path.abspath(job_dir or job.id) if not job_dir.endswith('/'): job_dir += '/' os.mkdir(job_dir) os.chmod(job_dir, os.stat(job_dir).st_mode | stat.S_IROTH | stat.S_IWOTH) self.cli_job = CLIJob(job) if self.container: self.ensure_files(job, job_dir) abspath_job = Job( job.id, job.app, copy.deepcopy(job.inputs), job.allocated_resources, job.context ) self.install(job=job) cmd_line = self.command_line(job, job_dir) self.job_dump(job, job_dir) self.container.run(cmd_line, job_dir) result_path = os.path.abspath(job_dir) + '/result.cwl.json' if os.path.exists(result_path): with open(result_path, 'r') as res: outputs = json.load(res) else: with open(result_path, 'w') as res: outputs = self.cli_job.get_outputs( os.path.abspath(job_dir), abspath_job) json.dump(outputs, res) outputs = {o.id: construct_files(outputs.get(o.id), o.validator) for o in job.app.outputs} self.unmap_paths(outputs) def write_rbx(f): if isinstance(f, File): with open(f.path + '.rbx.json', 'w') as rbx: json.dump(f.to_dict(), rbx) map_rec_collection(write_rbx, outputs) return outputs