def post(self, request): request_ids = request.data.get('request_ids', []) run_ids = request.data.get('run_ids', []) job_group_id = request.data.get('job_group_id', []) pipeline_name = request.data['pipeline_name'] pipeline = get_object_or_404(Pipeline, name=pipeline_name) if request_ids: for request_id in request_ids: logging.info("Submitting requestId %s to pipeline %s" % (request_id, pipeline_name)) if not job_group_id: job_group = JobGroup.objects.create() job_group_id = str(job_group.id) create_jobs_from_request.delay(request_id, pipeline.operator_id, job_group_id) body = {"details": "Operator Job submitted %s" % str(request_ids)} else: if run_ids: operator_model = Operator.objects.get(id=pipeline.operator_id) if job_group_id: operator = OperatorFactory.get_by_model( operator_model, run_ids=run_ids, job_group_id=job_group_id) create_jobs_from_operator(operator, job_group_id) body = { "details": "Operator Job submitted to pipeline %s, job group id %s, with runs %s" % (pipeline_name, job_group_id, str(run_ids)) } else: operator = OperatorFactory.get_by_model(operator_model, run_ids=run_ids) create_jobs_from_operator(operator) body = { "details": "Operator Job submitted to pipeline %s with runs %s" % (pipeline_name, str(run_ids)) } else: operator_model = Operator.objects.get(id=pipeline.operator_id) if job_group_id: operator = OperatorFactory.get_by_model( operator_model, job_group_id=job_group_id) run_routine_operator_job(operator, job_group_id) body = { "details": "Operator Job submitted to operator %s (JobGroupId: %s)" % (operator, job_group_id) } else: operator = OperatorFactory.get_by_model(operator_model) run_routine_operator_job(operator) body = { "details": "Operator Job submitted to operator %s" % operator } return Response(body, status=status.HTTP_200_OK)
def test_get_demo_operator_jobs(self): """ Test case for getting jobs from Demo Operator """ request_id = '1' operator_instance = self.operator operator = OperatorFactory.get_by_model(operator_instance, request_id=request_id) pipeline_instance = Pipeline.objects.get(id=operator.get_pipeline_id()) jobs = operator.get_jobs() self.assertEqual(len(jobs), 1) serialized_run, job = jobs[0] initial_data = serialized_run.initial_data expected_input = { 'input_file': { 'class': 'File', 'location': 'juno://' + self.file1.path } } expected_initial_data = { 'app': pipeline_instance.id, 'inputs': expected_input, 'name': 'DEMO JOB', 'tags': {} } expected_job = expected_input self.assertDictEqual(initial_data, expected_initial_data) self.assertDictEqual(job, expected_job)
def test_access_qc_operator(self): """ Test that an ACCESS QC operator instance can be created and validated """ self.assertEqual(len(File.objects.all()), 10) self.assertEqual(len(FileMetadata.objects.all()), 5) # This operator needs to write a temp file, so need to override this env var settings.BEAGLE_SHARED_TMPDIR = "/tmp" request_id = "05500_FH" operator_model = Operator.objects.get(id=11) operator = OperatorFactory.get_by_model(operator_model, request_id=request_id) self.assertEqual(operator.get_pipeline_id(), "05419097-a2b8-4d57-a8ab-c4c4cddcbabc") self.assertEqual(str(operator.model), "AccessQCOperator") self.assertEqual(operator.request_id, request_id) self.assertEqual(operator._jobs, []) jobs = operator.get_jobs() self.assertEqual(len(jobs) > 0, True) for job in jobs: self.assertEqual(job.is_valid(), True) input_json = job.inputs self.assertEqual(len(input_json["collapsed_bam"]), 1) self.assertEqual(len(input_json["duplex_bam"]), 1) self.assertEqual(len(input_json["group_reads_by_umi_bam"]), 1) self.assertEqual(len(input_json["simplex_bam"]), 1) self.assertEqual(len(input_json["uncollapsed_bam_base_recal"]), 1) self.assertEqual(len(input_json["sample_group"]), 1) self.assertEqual(len(input_json["sample_name"]), 1) self.assertEqual(len(input_json["sample_sex"]), 1) self.assertIsNotNone(input_json["samples-json"])
def post(self, request): normals_override = request.data.get('normals_override', []) tumors_override = request.data.get('tumors_override', []) operator_model = Operator.objects.get(slug="tempo_mpgen_operator") pairing_override = None heading = "TempoMPGen Run %s" % datetime.datetime.now().isoformat() job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) job_group_notifier_id = notifier_start(job_group, heading, operator_model) operator = OperatorFactory.get_by_model( operator_model, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id) if normals_override and tumors_override: pairing_override = dict() pairing_override['normal_samples'] = normals_override pairing_override['tumor_samples'] = tumors_override body = { "details": "Submitting TempoMPGen Job with pairing overrides." } else: body = {"details": "TempoMPGen Job submitted."} create_tempo_mpgen_job(operator, pairing_override, job_group_id, job_group_notifier_id) return Response(body, status=status.HTTP_202_ACCEPTED)
def test_access_legacy_fastq_merge_operator(self): """ Test that an Access legacy FastqMerge operator instance can be created and validated """ operator_files_count = 4 self.assertEqual(len(File.objects.all()), operator_files_count) self.assertEqual(len(FileMetadata.objects.all()), operator_files_count) request_id = "10151_F" operator_model = Operator.objects.get(id=8) operator = OperatorFactory.get_by_model(operator_model, request_id=request_id) self.assertEqual(operator.get_pipeline_id(), "65419097-a2b8-4d57-a8ab-c4c4cdffffff") self.assertEqual(str(operator.model), "AccessLegacyFastqMergeOperator") self.assertEqual(operator.request_id, request_id) self.assertEqual(operator._jobs, []) jobs = operator.get_jobs() self.assertEqual(len(jobs) > 0, True) for job in jobs: self.assertEqual(job[0].is_valid(), True) # Ensure at least 3 of the metadata fields are set, as not all of them are request. self.assertEqual(len(job[0].initial_data["output_metadata"].keys()) > 3, True) job[0].initial_data["output_metadata"]["sampleOrigin"] = "Tissue" self.assertEqual(float(job[0].initial_data["output_metadata"]["captureConcentrationNm"]) > 0, True) self.assertEqual(job[0].initial_data["output_metadata"]["captureConcentrationNm"], sum([float(f.metadata["captureConcentrationNm"]) for f in list(FileMetadata.objects.all())]) / operator_files_count) input_json = job[0].initial_data['inputs'] self.assertEqual(len(input_json["fastq1"]), 2) self.assertEqual(len(input_json["fastq2"]), 2)
def test_operator_factory_argos3(self): """ Test that a Argos operator has access to all files in the database, even non-Argos files """ # Load fixtures; 4 fastq files for 2 patient samples test_files_fixture = os.path.join(settings.TEST_FIXTURE_DIR, "10075_D_single_TN_pair.file.json") call_command("loaddata", test_files_fixture, verbosity=0) test_files_fixture = os.path.join(settings.TEST_FIXTURE_DIR, "10075_D_single_TN_pair.filemetadata.json") call_command("loaddata", test_files_fixture, verbosity=0) self.assertEqual(len(File.objects.all()), 4) self.assertEqual(len(FileMetadata.objects.all()), 4) # create some more fixtures file_instance = File.objects.create( file_name="foo.fastq.gz", path="/foo.fastq.gz", file_group=FileGroup.objects.get(id=settings.IMPORT_FILE_GROUP), ) filemetadata_instance = FileMetadata.objects.create(file=file_instance) self.assertEqual(len(File.objects.all()), 5) self.assertEqual(len(FileMetadata.objects.all()), 5) # create Argos operator request_id = "bar" operator_model = Operator.objects.get(id=1) operator = OperatorFactory.get_by_model(operator_model, request_id=request_id) self.assertEqual(operator.get_pipeline_id(), "cb5d793b-e650-4b7d-bfcd-882858e29cc5") self.assertEqual(str(operator.model), "argos") self.assertEqual(operator.request_id, "bar") self.assertEqual(operator._jobs, []) self.assertEqual(len(operator.files), 5)
def test_get_demo_operator(self): """ Test that DemoOperator can be initialized """ operator_instance = self.operator operator = OperatorFactory.get_by_model(operator_instance) self.assertEqual(str(operator.model), "DemoOperator")
def test_access_legacy_snv_operator(self): """ Test that an Access legacy SNV operator instance can be created and validated """ # create access SNV operator # todo: avoid the magic number here: operator_model = Operator.objects.get(id=5) operator = OperatorFactory.get_by_model(operator_model, request_id=REQUEST_ID) self.assertEqual(operator.get_pipeline_id(), "65419097-a2b8-4d57-a8ab-c4c4cddcbeaa") self.assertEqual(str(operator.model), "AccessLegacySNVOperator") self.assertEqual(operator.request_id, REQUEST_ID) self.assertEqual(operator._jobs, []) pipeline_slug = "AccessLegacySNVOperator" access_legacy_snv_model = Operator.objects.get(slug=pipeline_slug) operator = AccessLegacySNVOperator( access_legacy_snv_model, request_id=REQUEST_ID, run_ids=["bc23076e-f477-4578-943c-1fbf6f1fca44"]) self.assertTrue(isinstance(operator, AccessLegacySNVOperator)) self.assertTrue(operator.request_id == REQUEST_ID) self.assertTrue(operator._jobs == []) self.assertEqual(operator.run_ids, ["bc23076e-f477-4578-943c-1fbf6f1fca44"]) self.assertEqual(operator.get_pipeline_id(), "65419097-a2b8-4d57-a8ab-c4c4cddcbeaa") # Create and validate the input data input_data = operator.get_sample_inputs() self.validate(input_data)
def test_get_demo_operator_jobs(self): """ Test case for getting jobs from Demo Operator """ request_id = "1" operator_instance = self.operator operator = OperatorFactory.get_by_model(operator_instance, request_id=request_id) pipeline_instance = Pipeline.objects.get(id=operator.get_pipeline_id()) jobs = operator.get_jobs() self.assertEqual(len(jobs), 1) serialized_run, job = jobs[0] initial_data = serialized_run.initial_data expected_input = { "input_file": { "class": "File", "location": "juno://" + self.file1.path } } expected_initial_data = { "app": pipeline_instance.id, "inputs": expected_input, "name": "DEMO JOB", "tags": {} } expected_job = expected_input self.assertDictEqual(initial_data, expected_initial_data) self.assertDictEqual(job, expected_job)
def post(self, request): run_ids = request.data.get('run_ids') pipeline_names = request.data.get('pipelines') job_group_id = request.data.get('job_group_id', None) for_each = request.data.get('for_each', False) if not for_each: for pipeline_name in pipeline_names: get_object_or_404(Pipeline, name=pipeline_name) try: run = Run.objects.get(id=run_ids[0]) req = run.tags.get('requestId', 'Unknown') except Run.DoesNotExist: req = 'Unknown' if not job_group_id: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) notifier_start(job_group, req) else: try: job_group = JobGroup.objects.get(id=job_group_id) except JobGroup.DoesNotExist: return Response( {'details': 'Invalid JobGroup: %s' % job_group_id}, status=status.HTTP_400_BAD_REQUEST) for pipeline_name in pipeline_names: pipeline = get_object_or_404(Pipeline, name=pipeline_name) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: job_group_notifier_id = notifier_start( job_group, req, operator=pipeline.operator) operator_model = Operator.objects.get(id=pipeline.operator_id) operator = OperatorFactory.get_by_model( operator_model, run_ids=run_ids, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id) create_jobs_from_operator( operator, job_group_id, job_group_notifier_id=job_group_notifier_id) else: return Response({'details': 'Not Implemented'}, status=status.HTTP_400_BAD_REQUEST) body = { "details": "Operator Job submitted to pipelines %s, job group id %s, with runs %s" % (pipeline_names, job_group_id, str(run_ids)) } return Response(body, status=status.HTTP_202_ACCEPTED)
def test_access_legacy_operator(self): """ Test that an Access legacy operator instance can be created and validated """ operator_files_count = 2 self.assertEqual(len(File.objects.all()), operator_files_count) self.assertEqual(len(FileMetadata.objects.all()), operator_files_count) request_id = "access_merge_fastq_test_request" operator_model = Operator.objects.get(id=3) operator = OperatorFactory.get_by_model( operator_model, request_id=request_id, run_ids=["bc23076e-f477-4578-943c-1fbf6f1fca44"]) self.assertEqual(operator.get_pipeline_id(), "65419097-a2b8-4d57-a8ab-c4c4cddcbeac") self.assertEqual(str(operator.model), "AccessOperator") self.assertEqual(operator.request_id, request_id) self.assertEqual(operator._jobs, []) jobs = operator.get_jobs() self.assertEqual(len(jobs) > 0, True) for job in jobs: self.assertEqual(job.is_valid(), True) input_json = job.inputs self.assertEqual(len(input_json["fastq1"]), 1) self.assertEqual(len(input_json["fastq2"]), 1)
def post(self, request): pairs = request.data.get("pairs") pipeline_names = request.data.get("pipelines") pipeline_versions = request.data.get("pipeline_versions") name = request.data.get("name") labHeadName = request.data.get("labHeadName") investigatorName = request.data.get("investigatorName") assay = request.data.get("assay") job_group_id = request.data.get("job_group_id", None) output_directory_prefix = request.data.get("output_directory_prefix", None) if not job_group_id: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) for i, pipeline_name in enumerate(pipeline_names): pipeline_version = pipeline_versions[i] pipeline = get_object_or_404(Pipeline, name=pipeline_name, version=pipeline_version) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: metadata = { "assay": assay, "investigatorName": investigatorName, "labHeadName": labHeadName } job_group_notifier_id = notifier_start( job_group, name, operator=pipeline.operator, metadata=metadata) operator_model = Operator.objects.get(id=pipeline.operator_id) operator = OperatorFactory.get_by_model( operator_model, pairing={"pairs": pairs}, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id, output_directory_prefix=output_directory_prefix, ) create_jobs_from_operator( operator, job_group_id, job_group_notifier_id=job_group_notifier_id) body = { "details": "Operator Job submitted to pipelines %s, job group id %s, with pairs %s" % (pipeline_names, job_group_id, str(pairs)) } return Response(body, status=status.HTTP_202_ACCEPTED)
def test_create_jobs_from_operator_pipeline_deleted(self, get_pipeline_id, get_jobs, send_notification, create_run_task): argos_jobs = list() argos_jobs.append((APIRunCreateSerializer( data={'app': 'cb5d793b-e650-4b7d-bfcd-882858e29cc5', 'inputs': None, 'name': None, 'tags': {}}), None)) get_jobs.return_value = argos_jobs get_pipeline_id.return_value = None create_run_task.return_value = None send_notification.return_value = None Run.objects.all().delete() operator = OperatorFactory.get_by_model(Operator.objects.get(id=1), request_id="bar") create_jobs_from_operator(operator, None) self.assertEqual(len(Run.objects.all()), 1) self.assertEqual(Run.objects.first().status, RunStatus.FAILED)
def test_operator_factory_argos1(self): """ Test that a Argos operator can be created This test uses an empty database """ request_id = "bar" operator_model = Operator.objects.get(id=1) operator = OperatorFactory.get_by_model(operator_model, request_id=request_id) self.assertEqual(operator.get_pipeline_id(), "cb5d793b-e650-4b7d-bfcd-882858e29cc5") self.assertEqual(str(operator.model), "argos") self.assertEqual(operator.request_id, "bar") self.assertEqual(operator._jobs, []) # no File items in the database yet self.assertEqual(len(operator.files), 0)
def test_demo_operator_input(self): """ Test that input data can be generated from Demo Operator """ request_id = '1' operator_instance = self.operator operator = OperatorFactory.get_by_model(operator_instance, request_id=request_id) run_input = operator.create_input() expected_input = { 'input_file': { 'class': 'File', 'location': 'juno://' + self.file1.path } } self.assertDictEqual(run_input, expected_input)
def test_demo_operator_input(self): """ Test that input data can be generated from Demo Operator """ request_id = "1" operator_instance = self.operator operator = OperatorFactory.get_by_model(operator_instance, request_id=request_id) run_input = operator.create_input() expected_input = { "input_file": { "class": "File", "location": "juno://" + self.file1.path } } self.assertDictEqual(run_input, expected_input)
def test_create_jobs_from_operator_pipeline_deleted( self, get_pipeline_id, get_jobs, send_notification, create_run_task, memcache_task_lock, set_for_restart ): argos_jobs = list() argos_jobs.append(RunCreator(app="cb5d793b-e650-4b7d-bfcd-882858e29cc5", inputs=None, name=None, tags={})) set_for_restart.return_value = None get_jobs.return_value = argos_jobs get_pipeline_id.return_value = None create_run_task.return_value = None send_notification.return_value = None memcache_task_lock.return_value = True Run.objects.all().delete() operator = OperatorFactory.get_by_model(Operator.objects.get(id=1), request_id="bar") create_jobs_from_operator(operator, None) self.assertEqual(len(Run.objects.all()), 1) self.assertEqual(RunStatus(Run.objects.first().status), RunStatus.FAILED)
def test_create_copy_output_jobs(self): """ Test that copy output jobs are correctly created """ print("Running test_create_copy_output_jobs ----") # Load fixtures test_files_fixture = os.path.join( settings.TEST_FIXTURE_DIR, "ca18b090-03ad-4bef-acd3-52600f8e62eb.run.full.json") call_command('loaddata', test_files_fixture, verbosity=0) operator_model = Operator.objects.get(id=4) operator = OperatorFactory.get_by_model( operator_model, version='v1.1.0', run_ids=["ca18b090-03ad-4bef-acd3-52600f8e62eb"]) input_json_valid = False if operator.get_jobs()[0][0].is_valid(): input_json = operator.get_jobs()[0][0].initial_data['inputs'] input_json_valid = self.validate_copy_outputs_input(input_json) print(json.dumps(input_json, cls=UUIDEncoder)) self.assertEqual(input_json_valid, True)
def post(self, request): lab_head_email = request.data.get('lab_head_email', []) if lab_head_email: operator_model = Operator.objects.get(class_name="AionOperator") operator = OperatorFactory.get_by_model( operator_model, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id) heading = "Aion Run for %s" % lab_head_email job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) job_group_notifier_id = notifier_start(job_group, heading, operator_model) create_aion_job(operator, lab_head_email, job_group_id, job_group_notifier_id) body = {"details": "Aion Job submitted for %s" % lab_head_email} return Response(body, status=status.HTTP_202_ACCEPTED)
def test_access_legacy_sv_operator(self): """ Test that an Access legacy SV operator instance can be created and validated """ # create access SV operator request_id = "access_legacy_test_request" # todo: avoid the magic number here: operator_model = Operator.objects.get(id=6) operator = OperatorFactory.get_by_model(operator_model, request_id=request_id) self.assertEqual(operator.get_pipeline_id(), "65419097-a2b8-4d57-a8ab-c4c4cddcbead") self.assertEqual(str(operator.model), "AccessLegacySVOperator") self.assertEqual(operator.request_id, request_id) self.assertEqual(operator._jobs, []) pipeline_slug = "AccessLegacySVOperator" access_legacy_sv_model = Operator.objects.get(slug=pipeline_slug) operator = AccessLegacySVOperator( access_legacy_sv_model, request_id=request_id, run_ids=['bc23076e-f477-4578-943c-1fbf6f1fca42']) self.assertTrue(isinstance(operator, AccessLegacySVOperator)) self.assertTrue(operator.request_id == request_id) self.assertTrue(operator._jobs == []) self.assertEqual(operator.run_ids, ['bc23076e-f477-4578-943c-1fbf6f1fca42']) self.assertEqual(operator.get_pipeline_id(), "65419097-a2b8-4d57-a8ab-c4c4cddcbead") # Create and validate the input data input_data = operator.get_sample_inputs() required_input_fields = [ 'sv_sample_id', 'sv_tumor_bams', ] for inputs in input_data: for field in required_input_fields: self.assertIn(field, inputs) self.assertEqual(len(inputs[field]), 1) self.assertIn('sv_normal_bam', inputs)
def test_operator_factory_argos2(self): """ Test that a Argos operator can be created This test loads some Argos fixtures and checks the number of Files available to the Operator """ # Load fixtures; 4 fastq files for 2 patient samples test_files_fixture = os.path.join(settings.TEST_FIXTURE_DIR, "10075_D_single_TN_pair.file.json") call_command("loaddata", test_files_fixture, verbosity=0) test_files_fixture = os.path.join(settings.TEST_FIXTURE_DIR, "10075_D_single_TN_pair.filemetadata.json") call_command("loaddata", test_files_fixture, verbosity=0) request_id = "bar" operator_model = Operator.objects.get(id=1) operator = OperatorFactory.get_by_model(operator_model, request_id=request_id) self.assertEqual(operator.get_pipeline_id(), "cb5d793b-e650-4b7d-bfcd-882858e29cc5") self.assertEqual(str(operator.model), "argos") self.assertEqual(operator.request_id, "bar") self.assertEqual(operator._jobs, []) self.assertEqual(len(operator.files), 4)
def post(self, request): pairs = request.data.get('pairs') pipeline_names = request.data.get('pipelines') name = request.data.get('name') job_group_id = request.data.get('job_group_id', None) output_directory_prefix = request.data.get('output_directory_prefix', None) if not job_group_id: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) for pipeline_name in pipeline_names: pipeline = get_object_or_404(Pipeline, name=pipeline_name) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: job_group_notifier_id = notifier_start( job_group, name, operator=pipeline.operator) operator_model = Operator.objects.get(id=pipeline.operator_id) operator = OperatorFactory.get_by_model( operator_model, pairing={'pairs': pairs}, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id, output_directory_prefix=output_directory_prefix) create_jobs_from_operator( operator, job_group_id, job_group_notifier_id=job_group_notifier_id) body = { "details": "Operator Job submitted to pipelines %s, job group id %s, with pairs %s" % (pipeline_names, job_group_id, str(pairs)) } return Response(body, status=status.HTTP_202_ACCEPTED)
def test_access_nucleo_operator(self): """ Test that an ACCESS Nucleo operator instance can be created and validated """ operator_files_count = 4 self.assertEqual(len(File.objects.all()), operator_files_count) self.assertEqual(len(FileMetadata.objects.all()), operator_files_count) request_id = "10151_F" operator_model = Operator.objects.get(id=10) operator = OperatorFactory.get_by_model(operator_model, request_id=request_id) self.assertEqual(operator.get_pipeline_id(), "65419097-a2b8-4d57-a8ab-c4c4cddcbabc") self.assertEqual(str(operator.model), "AccessNucleoOperator") self.assertEqual(operator.request_id, request_id) self.assertEqual(operator._jobs, []) jobs = operator.get_jobs() self.assertEqual(len(jobs) > 0, True) for job in jobs: self.assertEqual(job.is_valid(), True) # Ensure at least 3 of the metadata fields are set, as not all of them are request. self.assertEqual(len(job.output_metadata.keys()) > 3, True) job.output_metadata["sampleOrigin"] = "Tissue" self.assertEqual( float(job.output_metadata["captureConcentrationNm"]) > 0, True) self.assertEqual( job.output_metadata["captureConcentrationNm"], sum([ float(f.metadata["captureConcentrationNm"]) for f in list(FileMetadata.objects.all()) ]) / operator_files_count, ) input_json = job.inputs self.assertEqual(len(input_json["fgbio_fastq_to_bam_input"]), 2) self.assertEqual(len(input_json["fgbio_fastq_to_bam_input"][0]), 2) self.assertEqual(len(input_json["fgbio_fastq_to_bam_input"][1]), 2)
def post(self, request): run_ids = request.data.get("run_ids") pipeline_names = request.data.get("pipelines") pipeline_versions = request.data.get("pipeline_versions", None) job_group_id = request.data.get("job_group_id", None) for_each = request.data.get("for_each", False) if not for_each: for i, pipeline_name in enumerate(pipeline_names): pipeline_version = pipeline_versions[i] get_object_or_404(Pipeline, name=pipeline_name, version=pipeline_version) try: run = Run.objects.get(id=run_ids[0]) req = run.tags.get("requestId", "Unknown") except Run.DoesNotExist: req = "Unknown" if not job_group_id: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) notifier_start(job_group, req) else: try: job_group = JobGroup.objects.get(id=job_group_id) except JobGroup.DoesNotExist: return Response( {"details": "Invalid JobGroup: %s" % job_group_id}, status=status.HTTP_400_BAD_REQUEST) for i, pipeline_name in enumerate(pipeline_names): pipeline_version = pipeline_versions[i] pipeline = get_object_or_404(Pipeline, name=pipeline_name, version=pipeline_version) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: job_group_notifier_id = notifier_start( job_group, req, operator=pipeline.operator) operator_model = Operator.objects.get(id=pipeline.operator_id) operator = OperatorFactory.get_by_model( operator_model, run_ids=run_ids, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id, pipeline=str(pipeline.id), ) create_jobs_from_operator( operator, job_group_id, job_group_notifier_id=job_group_notifier_id) else: return Response({"details": "Not Implemented"}, status=status.HTTP_400_BAD_REQUEST) body = { "details": "Operator Job submitted to pipelines %s, job group id %s, with runs %s" % (pipeline_names, job_group_id, str(run_ids)) } return Response(body, status=status.HTTP_202_ACCEPTED)
def test_operator_factory_call(self): """ Test operator call from factory """ first_operator = OperatorFactory.get_by_model(Operator.objects.first()) self.assertTrue(first_operator != None)