def complete_job(run_id, outputs): run = RunObject.from_db(run_id) run.complete(outputs) run.to_db() job_group = run.job_group job_group_id = str(job_group.id) if job_group else None _job_finished_notify(run) for trigger in run.run_obj.operator_run.operator.from_triggers.filter( run_type=TriggerRunType.INDIVIDUAL): create_jobs_from_chaining.delay(trigger.to_operator_id, trigger.from_operator_id, [run_id], job_group_id=job_group_id)
def test_run_fail_job(self, mock_get_pipeline): with open('runner/tests/run/pair-workflow.cwl', 'r') as f: app = json.load(f) with open('runner/tests/run/inputs.json', 'r') as f: inputs = json.load(f) mock_get_pipeline.return_value = app run = RunObject.from_cwl_definition(str(self.run.id), inputs) run.to_db() operator_run = OperatorRun.objects.first() operator_run.runs.add(run.run_obj) num_failed_runs = operator_run.num_failed_runs fail_job(run.run_id, {'details': 'Error has happened'}) operator_run.refresh_from_db() self.assertEqual(operator_run.num_failed_runs, num_failed_runs + 1) run_obj = RunObject.from_db(run.run_id) self.assertEqual(run_obj.message, {'details': 'Error has happened'})
def post(self, request): run_id = request.data.get('run') run = RunObject.from_db(run_id) inputs = dict() for port in run.inputs: inputs[port.name] = port.db_value data = dict(app=str(run.run_obj.app.id), inputs=inputs, tags=run.tags, job_group_id=run.job_group.id, job_group_notifier_id=run.job_group_notifier.id, resume=run_id) serializer = APIRunCreateSerializer(data=data, context={'request': request}) if serializer.is_valid(): new_run = serializer.save() response = RunSerializerFull(new_run) create_run_task.delay(response.data['id'], data['inputs']) job_group_notifier_id = str(new_run.job_group_notifier_id) self._send_notifications(job_group_notifier_id, new_run) return Response(response.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def test_run_complete_job(self, mock_get_pipeline): with open('runner/tests/run/pair-workflow.cwl', 'r') as f: app = json.load(f) with open('runner/tests/run/inputs.json', 'r') as f: inputs = json.load(f) mock_get_pipeline.return_value = app run = RunObject.from_cwl_definition(str(self.run.id), inputs) run.to_db() operator_run = OperatorRun.objects.first() operator_run.runs.add(run.run_obj) num_completed_runs = operator_run.num_completed_runs complete_job(run.run_id, self.outputs) operator_run.refresh_from_db() self.assertEqual(operator_run.num_completed_runs, num_completed_runs + 1) run_obj = RunObject.from_db(run.run_id) file_obj = File.objects.filter(path=self.outputs['maf']['location']. replace('file://', '')).first() run_obj.to_db() for out in run_obj.outputs: if out.name == 'maf': self.assertEqual(out.value['location'], self.outputs['maf']['location']) self.assertEqual(FileProcessor.get_bid_from_file(file_obj), out.db_value['location']) port = Port.objects.filter(run_id=run_obj.run_id, name='bams').first() self.assertEqual(len(port.files.all()), 4) expected_result = ( '/output/argos_pair_workflow/425194f6-a974-4c2f-995f-f27d7ba54ddc/outputs/test_1.rg.md.abra.printreads.bam', '/output/argos_pair_workflow/425194f6-a974-4c2f-995f-f27d7ba54ddc/outputs/test_1.rg.md.abra.printreads.bai', '/output/argos_pair_workflow/425194f6-a974-4c2f-995f-f27d7ba54ddc/outputs/test_2.rg.md.abra.printreads.bam', '/output/argos_pair_workflow/425194f6-a974-4c2f-995f-f27d7ba54ddc/outputs/test_2.rg.md.abra.printreads.bai' ) self.assertTrue(port.files.all()[0].path in expected_result) self.assertTrue(port.files.all()[1].path in expected_result) self.assertTrue(port.files.all()[2].path in expected_result) self.assertTrue(port.files.all()[3].path in expected_result)
def complete_job(run_id, outputs): run = RunObject.from_db(run_id) if run.run_obj.is_completed: return try: run.complete(outputs) except Exception as e: fail_job(run_id, str(e)) return run.to_db() job_group = run.job_group job_group_id = str(job_group.id) if job_group else None _job_finished_notify(run) for trigger in run.run_obj.operator_run.operator.from_triggers.filter( run_type=TriggerRunType.INDIVIDUAL): create_jobs_from_chaining.delay(trigger.to_operator_id, trigger.from_operator_id, [run_id], job_group_id=job_group_id, parent=str(run.run_obj.operator_run.id) if run.run_obj.operator_run else None)