def post(self, request): run_ids = request.data.get('run_ids') pipeline_names = request.data.get('pipelines') job_group_id = request.data.get('job_group_id', None) for_each = request.data.get('for_each', False) if not for_each: for pipeline_name in pipeline_names: get_object_or_404(Pipeline, name=pipeline_name) try: run = Run.objects.get(id=run_ids[0]) req = run.tags.get('requestId', 'Unknown') except Run.DoesNotExist: req = 'Unknown' if not job_group_id: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) notifier_start(job_group, req) else: try: job_group = JobGroup.objects.get(id=job_group_id) except JobGroup.DoesNotExist: return Response( {'details': 'Invalid JobGroup: %s' % job_group_id}, status=status.HTTP_400_BAD_REQUEST) for pipeline_name in pipeline_names: pipeline = get_object_or_404(Pipeline, name=pipeline_name) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: job_group_notifier_id = notifier_start( job_group, req, operator=pipeline.operator) operator_model = Operator.objects.get(id=pipeline.operator_id) operator = OperatorFactory.get_by_model( operator_model, run_ids=run_ids, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id) create_jobs_from_operator( operator, job_group_id, job_group_notifier_id=job_group_notifier_id) else: return Response({'details': 'Not Implemented'}, status=status.HTTP_400_BAD_REQUEST) body = { "details": "Operator Job submitted to pipelines %s, job group id %s, with runs %s" % (pipeline_names, job_group_id, str(run_ids)) } return Response(body, status=status.HTTP_202_ACCEPTED)
def post(self, request): normals_override = request.data.get('normals_override', []) tumors_override = request.data.get('tumors_override', []) operator_model = Operator.objects.get(slug="tempo_mpgen_operator") pairing_override = None heading = "TempoMPGen Run %s" % datetime.datetime.now().isoformat() job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) job_group_notifier_id = notifier_start(job_group, heading, operator_model) operator = OperatorFactory.get_by_model( operator_model, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id) if normals_override and tumors_override: pairing_override = dict() pairing_override['normal_samples'] = normals_override pairing_override['tumor_samples'] = tumors_override body = { "details": "Submitting TempoMPGen Job with pairing overrides." } else: body = {"details": "TempoMPGen Job submitted."} create_tempo_mpgen_job(operator, pairing_override, job_group_id, job_group_notifier_id) return Response(body, status=status.HTTP_202_ACCEPTED)
def create_request_job(request_id, redelivery=False): logger.info( "Searching for job: %s for request_id: %s" % (TYPES['REQUEST'], request_id)) count = Job.objects.filter(run=TYPES['REQUEST'], args__request_id=request_id, status__in=[JobStatus.CREATED, JobStatus.IN_PROGRESS, JobStatus.WAITING_FOR_CHILDREN]).count() request_redelivered = Job.objects.filter(run=TYPES['REQUEST'], args__request_id=request_id).count() > 0 assays = ETLConfiguration.objects.first() if request_redelivered and not (assays.redelivery and redelivery): return None, "Request is redelivered, but redelivery deactivated" if count == 0: job_group = JobGroup() job_group.save() job_group_notifier_id = notifier_start(job_group, request_id) job_group_notifier = JobGroupNotifier.objects.get(id=job_group_notifier_id) job = Job(run=TYPES['REQUEST'], args={'request_id': request_id, 'job_group': str(job_group.id), 'job_group_notifier': job_group_notifier_id, 'redelivery': request_redelivered}, status=JobStatus.CREATED, max_retry=1, children=[], callback=TYPES['REQUEST_CALLBACK'], callback_args={'request_id': request_id, 'job_group': str(job_group.id), 'job_group_notifier': job_group_notifier_id}, job_group=job_group, job_group_notifier=job_group_notifier) job.save() if request_redelivered: redelivery_event = RedeliveryEvent(job_group_notifier_id).to_dict() send_notification.delay(redelivery_event) return job, "Job Created"
def post(self, request): pairs = request.data.get("pairs") pipeline_names = request.data.get("pipelines") pipeline_versions = request.data.get("pipeline_versions") name = request.data.get("name") labHeadName = request.data.get("labHeadName") investigatorName = request.data.get("investigatorName") assay = request.data.get("assay") job_group_id = request.data.get("job_group_id", None) output_directory_prefix = request.data.get("output_directory_prefix", None) if not job_group_id: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) for i, pipeline_name in enumerate(pipeline_names): pipeline_version = pipeline_versions[i] pipeline = get_object_or_404(Pipeline, name=pipeline_name, version=pipeline_version) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: metadata = { "assay": assay, "investigatorName": investigatorName, "labHeadName": labHeadName } job_group_notifier_id = notifier_start( job_group, name, operator=pipeline.operator, metadata=metadata) operator_model = Operator.objects.get(id=pipeline.operator_id) operator = OperatorFactory.get_by_model( operator_model, pairing={"pairs": pairs}, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id, output_directory_prefix=output_directory_prefix, ) create_jobs_from_operator( operator, job_group_id, job_group_notifier_id=job_group_notifier_id) body = { "details": "Operator Job submitted to pipelines %s, job group id %s, with pairs %s" % (pipeline_names, job_group_id, str(pairs)) } return Response(body, status=status.HTTP_202_ACCEPTED)
def post(self, request): request_ids = request.data.get('request_ids') pipeline_name = request.data.get('pipeline') job_group_id = request.data.get('job_group_id', None) for_each = request.data.get('for_each', True) pipeline = get_object_or_404(Pipeline, name=pipeline_name) errors = [] if not request_ids: errors.append('request_ids needs to be specified') if not pipeline: errors.append('pipeline needs to be specified') if errors: return Response({'details': errors}, status=status.HTTP_400_BAD_REQUEST) if not job_group_id: if for_each: for req in request_ids: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) logging.info("Submitting requestId %s to pipeline %s" % (req, pipeline)) create_jobs_from_request.delay(req, pipeline.operator_id, job_group_id, pipeline=str(pipeline.id)) else: return Response({'details': 'Not Implemented'}, status=status.HTTP_400_BAD_REQUEST) else: if for_each: for req in request_ids: logging.info("Submitting requestId %s to pipeline %s" % (req, pipeline)) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: job_group_notifier_id = notifier_start( job_group_id, req, pipeline.operator) create_jobs_from_request.delay( req, pipeline.operator_id, job_group_id, job_group_notifier_id=job_group_notifier_id, pipeline=str(pipeline.id)) else: return Response({'details': 'Not Implemented'}, status=status.HTTP_400_BAD_REQUEST) body = {"details": "Operator Job submitted %s" % str(request_ids)} return Response(body, status=status.HTTP_202_ACCEPTED)
class JobViewTest(APITestCase): def setUp(self): admin_user = User.objects.create_superuser('admin', 'sample_email', 'password') self.client.force_authenticate(user=admin_user) self.job_group1 = JobGroup(jira_id='jira_id1') self.job_group1.save() self.job_group2 = JobGroup(jira_id='jira_id2') self.job_group2.save() self.job_group3 = JobGroup(jira_id='jira_id3') self.job_group3.save() self.job_group4 = JobGroup(jira_id='jira_id4') self.job_group4.save() self.job1 = Job(args={'key1':'value1','key2':'value2','boolean_key':True,'sample_id':'sample_id1','request_id':'request_id1'}, status=JobStatus.COMPLETED, job_group=self.job_group1, run=TYPES['SAMPLE']) self.job1.save() self.job2 = Job(args={'key1':'value1','key2':'1value2','boolean_key':False,'sample_id':'sample_id2','request_id':'request_id1'}, status=JobStatus.FAILED, job_group=self.job_group2, run=TYPES['POOLED_NORMAL']) self.job2.save() self.job3 = Job(args={'key1':'value1','key2':'1value2','boolean_key':False,'sample_id':'sample_id3','request_id':'request_id1'}, status=JobStatus.FAILED, job_group=self.job_group1, run=TYPES['POOLED_NORMAL']) self.job3.save() self.job4 = Job(args={'key1':'value1','key2':'1value4','boolean_key':False,'sample_id':'sample_id4','request_id':'request_id1'}, status=JobStatus.FAILED, job_group=self.job_group3, run=TYPES['POOLED_NORMAL']) self.job4.save() self.job5 = Job(args={'key1':'value1','key2':'1value2','boolean_key':False,'sample_id':'sample_id5','request_id':'request_id2'}, status=JobStatus.FAILED, job_group=self.job_group4, run=TYPES['POOLED_NORMAL']) self.job5.save() self.api_root = '/v0/etl/jobs' def test_query_job_group(self): response = self.client.get(self.api_root+'/?job_group='+str(self.job_group1.id)) self.assertEqual(len(response.json()['results']), 2) def test_query_job_type(self): response = self.client.get(self.api_root+'/?type=POOLED_NORMAL') self.assertEqual(len(response.json()['results']), 4) def test_query_sampleid(self): response = self.client.get(self.api_root+'/?sample_id=sample_id1') self.assertEqual(len(response.json()['results']), 1) def test_query_requestid(self): response = self.client.get(self.api_root+'/?request_id=request_id1') self.assertEqual(len(response.json()['results']), 4) response = self.client.get(self.api_root+'/?request_id=request_id1&sample_id=sample_id1') self.assertEqual(len(response.json()['results']), 1) def test_query_value_args(self): response = self.client.get(self.api_root+'/?values_args=key1,key2') self.assertEqual(len(response.json()['results']),3) def test_query_args(self): response = self.client.get(self.api_root+'/?args=key2:1value4') self.assertEqual(len(response.json()['results']), 1) response = self.client.get(self.api_root+'/?args=boolean_key:False') self.assertEqual(len(response.json()['results']), 4) def test_query_args_distribution(self): response = self.client.get(self.api_root+'/?args_distribution=key2') expected_result = {'1value2': 3, 'value2': 1, '1value4': 1} self.assertEqual(response.json(),expected_result)
def post(self, request): lab_head_email = request.data.get('lab_head_email', []) if lab_head_email: operator_model = Operator.objects.get(class_name="AionOperator") operator = OperatorFactory.get_by_model( operator_model, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id) heading = "Aion Run for %s" % lab_head_email job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) job_group_notifier_id = notifier_start(job_group, heading, operator_model) create_aion_job(operator, lab_head_email, job_group_id, job_group_notifier_id) body = {"details": "Aion Job submitted for %s" % lab_head_email} return Response(body, status=status.HTTP_202_ACCEPTED)
def post(self, request): pairs = request.data.get('pairs') pipeline_names = request.data.get('pipelines') name = request.data.get('name') job_group_id = request.data.get('job_group_id', None) output_directory_prefix = request.data.get('output_directory_prefix', None) if not job_group_id: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) for pipeline_name in pipeline_names: pipeline = get_object_or_404(Pipeline, name=pipeline_name) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: job_group_notifier_id = notifier_start( job_group, name, operator=pipeline.operator) operator_model = Operator.objects.get(id=pipeline.operator_id) operator = OperatorFactory.get_by_model( operator_model, pairing={'pairs': pairs}, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id, output_directory_prefix=output_directory_prefix) create_jobs_from_operator( operator, job_group_id, job_group_notifier_id=job_group_notifier_id) body = { "details": "Operator Job submitted to pipelines %s, job group id %s, with pairs %s" % (pipeline_names, job_group_id, str(pairs)) } return Response(body, status=status.HTTP_202_ACCEPTED)
def test_construct_output_directory(self): """ Test the creation of the output directory Since this tests per run_id, this also tests project prefix retrieval (part of output dir path) """ job_group = JobGroup() job_group.save() operator_model = Operator.objects.get(id=12) ultron_operator = UltronOperator( operator_model, pipeline="cb5d793b-e650-4b7d-bfcd-882858e29cc5", job_group_id=job_group.id) for run_id in self.run_ids: output_directory = ultron_operator._get_output_directory(run_id) expected_output_directory_with_timestamp = os.path.join( self.expected_output_directory, job_group.created_date.strftime("%Y%m%d_%H_%M_%f"), "analysis") self.assertEqual(output_directory, expected_output_directory_with_timestamp)
def setUp(self): admin_user = User.objects.create_superuser('admin', 'sample_email', 'password') self.client.force_authenticate(user=admin_user) self.job_group1 = JobGroup(jira_id='jira_id1') self.job_group1.save() self.job_group2 = JobGroup(jira_id='jira_id2') self.job_group2.save() self.job_group3 = JobGroup(jira_id='jira_id3') self.job_group3.save() self.job_group4 = JobGroup(jira_id='jira_id4') self.job_group4.save() self.job1 = Job(args={'key1':'value1','key2':'value2','boolean_key':True,'sample_id':'sample_id1','request_id':'request_id1'}, status=JobStatus.COMPLETED, job_group=self.job_group1, run=TYPES['SAMPLE']) self.job1.save() self.job2 = Job(args={'key1':'value1','key2':'1value2','boolean_key':False,'sample_id':'sample_id2','request_id':'request_id1'}, status=JobStatus.FAILED, job_group=self.job_group2, run=TYPES['POOLED_NORMAL']) self.job2.save() self.job3 = Job(args={'key1':'value1','key2':'1value2','boolean_key':False,'sample_id':'sample_id3','request_id':'request_id1'}, status=JobStatus.FAILED, job_group=self.job_group1, run=TYPES['POOLED_NORMAL']) self.job3.save() self.job4 = Job(args={'key1':'value1','key2':'1value4','boolean_key':False,'sample_id':'sample_id4','request_id':'request_id1'}, status=JobStatus.FAILED, job_group=self.job_group3, run=TYPES['POOLED_NORMAL']) self.job4.save() self.job5 = Job(args={'key1':'value1','key2':'1value2','boolean_key':False,'sample_id':'sample_id5','request_id':'request_id2'}, status=JobStatus.FAILED, job_group=self.job_group4, run=TYPES['POOLED_NORMAL']) self.job5.save() self.api_root = '/v0/etl/jobs'
def create_request_job(request_id, redelivery=False): logger.info("Searching for job: %s for request_id: %s" % (TYPES["REQUEST"], request_id)) count = Job.objects.filter( run=TYPES["REQUEST"], args__request_id=request_id, status__in=[ JobStatus.CREATED, JobStatus.IN_PROGRESS, JobStatus.WAITING_FOR_CHILDREN ], ).count() request_redelivered = Job.objects.filter( run=TYPES["REQUEST"], args__request_id=request_id).count() > 0 delivery_date = None try: request_from_lims = LIMSClient.get_request_samples(request_id) delivery_date = datetime.fromtimestamp( request_from_lims["deliveryDate"] / 1000) except Exception: logger.error("Failed to retrieve deliveryDate for request %s" % request_id) if not Request.objects.filter(request_id=request_id): Request.objects.create(request_id=request_id, delivery_date=delivery_date) assays = ETLConfiguration.objects.first() if request_redelivered and not (assays.redelivery and redelivery): return None, "Request is redelivered, but redelivery deactivated" if count == 0: job_group = JobGroup() job_group.save() job_group_notifier_id = notifier_start(job_group, request_id) job_group_notifier = JobGroupNotifier.objects.get( id=job_group_notifier_id) job = Job( run=TYPES["REQUEST"], args={ "request_id": request_id, "job_group": str(job_group.id), "job_group_notifier": job_group_notifier_id, "redelivery": request_redelivered, }, status=JobStatus.CREATED, max_retry=1, children=[], callback=TYPES["REQUEST_CALLBACK"], callback_args={ "request_id": request_id, "job_group": str(job_group.id), "job_group_notifier": job_group_notifier_id, }, job_group=job_group, job_group_notifier=job_group_notifier, ) job.save() if request_redelivered: redelivery_event = RedeliveryEvent(job_group_notifier_id).to_dict() send_notification.delay(redelivery_event) request_obj = Request.objects.filter(request_id=request_id).first() if request_obj: delivery_date_event = SetDeliveryDateFieldEvent( job_group_notifier_id, str(request_obj.delivery_date)).to_dict() send_notification.delay(delivery_date_event) return job, "Job Created"
def test_construct_ultron_job(self): """ Test the creation of an ultron job """ sample = FileMetadata.objects.get( id=self.file_metadata_ids[0][0]).metadata["sampleId"] input_json = { "argos_version_string": "1.1.2", "bam_files": [ { "class": "File", "location": "juno:///juno/work/ci/voyager-output/4d9c8213-df56-4a0f-8d86-ce2bd8349c59/s_C_ALLANT_T001_d.rg.md.abra.printreads.bam", "secondaryFiles": [{ "class": "File", "location": "juno:///juno/work/ci/voyager-output/4d9c8213-df56-4a0f-8d86-ce2bd8349c59/s_C_ALLANT_T001_d.rg.md.abra.printreads.bai", }], }, { "class": "File", "location": "juno:///juno/work/ci/voyager-output/28ca34e8-9d4c-4543-9fc7-981bf5f6a97f/s_C_ALLANT_T003_d.rg.md.abra.printreads.bam", "secondaryFiles": [{ "class": "File", "location": "juno:///juno/work/ci/voyager-output/28ca34e8-9d4c-4543-9fc7-981bf5f6a97f/s_C_ALLANT_T003_d.rg.md.abra.printreads.bai", }], }, ], "exac_filter": { "class": "File", "location": "juno:///juno/work/ci/resources/vep/cache/ExAC_nonTCGA.r0.3.1.sites.vep.vcf.gz", }, "fillout_output_fname": "ALN-REQ-ID.fillout.maf", "is_impact": True, "maf_files": [ { "class": "File", "location": "juno:///juno/work/ci/voyager-output/4d9c8213-df56-4a0f-8d86-ce2bd8349c59/s_C_ALLANT_T001_d.s_C_ALLANT_N002_d.muts.maf", }, { "class": "File", "location": "juno:///juno/work/ci/voyager-output/28ca34e8-9d4c-4543-9fc7-981bf5f6a97f/s_C_ALLANT_T003_d.s_C_ALLANT_N002_d.muts.maf", }, ], "ref_fasta": { "class": "File", "location": "juno:///juno/work/ci/resources/genomes/GRCh37/fasta/b37.fasta" }, "sample_ids": ["s_C_ALLANT_T001_d", "s_C_ALLANT_T003_d"], "unindexed_bam_files": [ { "class": "File", "location": "juno:///path/to/P-00000002-T.bam" }, { "class": "File", "location": "juno:///path/to/00000001-T.bam" }, ], "unindexed_maf_files": [ { "class": "File", "location": "juno:///path/to/P-0000002-T01-IM6.txt" }, { "class": "File", "location": "juno:///path/to/P-0000000-T01-IM6.txt" }, ], "unindexed_sample_ids": ["P-0000002-T01-IM6", "P-0000001-T01-IM6"], } operator_model = Operator.objects.get(id=12) job_group = JobGroup() job_group.save() ultron_operator = UltronOperator( operator_model, pipeline="cb5d793b-e650-4b7d-bfcd-882858e29cc5", job_group_id=job_group.id) inputs = ultron_operator._build_inputs(self.run_ids) rep_run_id = self.run_ids[ 0] # required; because output_dir is arbitrarily set, we assume # they're going to be the same for every run, set by one run_id ultron_jobs = [ultron_operator._build_job(inputs, rep_run_id)] job_name = "" job_input_json = "" self.assertEqual( 1, len(ultron_jobs) ) # there should only be one batch job; quirk that it must be in a list for ultron_job in ultron_jobs: if ultron_job.is_valid(): job_name = ultron_job.name job_input_json = ultron_job.inputs tags = ultron_job.tags output_directory = ultron_job.output_directory expected_output_directory_with_timestamp = os.path.join( self.expected_output_directory, job_group.created_date.strftime("%Y%m%d_%H_%M_%f"), "analysis") for key in job_input_json: self.assertEqual(ordered(job_input_json[key]), ordered(input_json[key])) self.assertEqual(output_directory, expected_output_directory_with_timestamp)
def setUp(self): admin_user = User.objects.create_superuser("admin", "sample_email", "password") self.client.force_authenticate(user=admin_user) self.job_group1 = JobGroup() self.job_group1.save() self.job_group2 = JobGroup() self.job_group2.save() self.job_group3 = JobGroup() self.job_group3.save() self.job_group4 = JobGroup() self.job_group4.save() self.job1 = Job( args={ "key1": "value1", "key2": "value2", "boolean_key": True, "sample_id": "sample_id1", "request_id": "request_id1", }, status=JobStatus.COMPLETED, job_group=self.job_group1, run=TYPES["SAMPLE"], ) self.job1.save() self.job2 = Job( args={ "key1": "value1", "key2": "1value2", "boolean_key": False, "sample_id": "sample_id2", "request_id": "request_id1", }, status=JobStatus.FAILED, job_group=self.job_group2, run=TYPES["POOLED_NORMAL"], ) self.job2.save() self.job3 = Job( args={ "key1": "value1", "key2": "1value2", "boolean_key": False, "sample_id": "sample_id3", "request_id": "request_id1", }, status=JobStatus.FAILED, job_group=self.job_group1, run=TYPES["POOLED_NORMAL"], ) self.job3.save() self.job4 = Job( args={ "key1": "value1", "key2": "1value4", "boolean_key": False, "sample_id": "sample_id4", "request_id": "request_id1", }, status=JobStatus.FAILED, job_group=self.job_group3, run=TYPES["POOLED_NORMAL"], ) self.job4.save() self.job5 = Job( args={ "key1": "value1", "key2": "1value2", "boolean_key": False, "sample_id": "sample_id5", "request_id": "request_id2", }, status=JobStatus.FAILED, job_group=self.job_group4, run=TYPES["POOLED_NORMAL"], ) self.job5.save() self.api_root = "/v0/etl/jobs"
class JobViewTest(APITestCase): def setUp(self): admin_user = User.objects.create_superuser("admin", "sample_email", "password") self.client.force_authenticate(user=admin_user) self.job_group1 = JobGroup() self.job_group1.save() self.job_group2 = JobGroup() self.job_group2.save() self.job_group3 = JobGroup() self.job_group3.save() self.job_group4 = JobGroup() self.job_group4.save() self.job1 = Job( args={ "key1": "value1", "key2": "value2", "boolean_key": True, "sample_id": "sample_id1", "request_id": "request_id1", }, status=JobStatus.COMPLETED, job_group=self.job_group1, run=TYPES["SAMPLE"], ) self.job1.save() self.job2 = Job( args={ "key1": "value1", "key2": "1value2", "boolean_key": False, "sample_id": "sample_id2", "request_id": "request_id1", }, status=JobStatus.FAILED, job_group=self.job_group2, run=TYPES["POOLED_NORMAL"], ) self.job2.save() self.job3 = Job( args={ "key1": "value1", "key2": "1value2", "boolean_key": False, "sample_id": "sample_id3", "request_id": "request_id1", }, status=JobStatus.FAILED, job_group=self.job_group1, run=TYPES["POOLED_NORMAL"], ) self.job3.save() self.job4 = Job( args={ "key1": "value1", "key2": "1value4", "boolean_key": False, "sample_id": "sample_id4", "request_id": "request_id1", }, status=JobStatus.FAILED, job_group=self.job_group3, run=TYPES["POOLED_NORMAL"], ) self.job4.save() self.job5 = Job( args={ "key1": "value1", "key2": "1value2", "boolean_key": False, "sample_id": "sample_id5", "request_id": "request_id2", }, status=JobStatus.FAILED, job_group=self.job_group4, run=TYPES["POOLED_NORMAL"], ) self.job5.save() self.api_root = "/v0/etl/jobs" def test_query_job_group(self): response = self.client.get(self.api_root + "/?job_group=" + str(self.job_group1.id)) self.assertEqual(len(response.json()["results"]), 2) def test_query_job_type(self): response = self.client.get(self.api_root + "/?type=POOLED_NORMAL") self.assertEqual(len(response.json()["results"]), 4) def test_query_sampleid(self): response = self.client.get(self.api_root + "/?sample_id=sample_id1") self.assertEqual(len(response.json()["results"]), 1) def test_query_requestid(self): response = self.client.get(self.api_root + "/?request_id=request_id1") self.assertEqual(len(response.json()["results"]), 4) response = self.client.get(self.api_root + "/?request_id=request_id1&sample_id=sample_id1") self.assertEqual(len(response.json()["results"]), 1) def test_query_value_args(self): response = self.client.get(self.api_root + "/?values_args=key1,key2") self.assertEqual(len(response.json()["results"]), 3) def test_query_args(self): response = self.client.get(self.api_root + "/?args=key2:1value4") self.assertEqual(len(response.json()["results"]), 1) response = self.client.get(self.api_root + "/?args=boolean_key:False") self.assertEqual(len(response.json()["results"]), 4) def test_query_args_distribution(self): response = self.client.get(self.api_root + "/?args_distribution=key2") expected_result = {"1value2": 3, "value2": 1, "1value4": 1} self.assertEqual(response.json(), expected_result)
def post(self, request): run_ids = request.data.get("run_ids") pipeline_names = request.data.get("pipelines") pipeline_versions = request.data.get("pipeline_versions", None) job_group_id = request.data.get("job_group_id", None) for_each = request.data.get("for_each", False) if not for_each: for i, pipeline_name in enumerate(pipeline_names): pipeline_version = pipeline_versions[i] get_object_or_404(Pipeline, name=pipeline_name, version=pipeline_version) try: run = Run.objects.get(id=run_ids[0]) req = run.tags.get("requestId", "Unknown") except Run.DoesNotExist: req = "Unknown" if not job_group_id: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) notifier_start(job_group, req) else: try: job_group = JobGroup.objects.get(id=job_group_id) except JobGroup.DoesNotExist: return Response( {"details": "Invalid JobGroup: %s" % job_group_id}, status=status.HTTP_400_BAD_REQUEST) for i, pipeline_name in enumerate(pipeline_names): pipeline_version = pipeline_versions[i] pipeline = get_object_or_404(Pipeline, name=pipeline_name, version=pipeline_version) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: job_group_notifier_id = notifier_start( job_group, req, operator=pipeline.operator) operator_model = Operator.objects.get(id=pipeline.operator_id) operator = OperatorFactory.get_by_model( operator_model, run_ids=run_ids, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id, pipeline=str(pipeline.id), ) create_jobs_from_operator( operator, job_group_id, job_group_notifier_id=job_group_notifier_id) else: return Response({"details": "Not Implemented"}, status=status.HTTP_400_BAD_REQUEST) body = { "details": "Operator Job submitted to pipelines %s, job group id %s, with runs %s" % (pipeline_names, job_group_id, str(run_ids)) } return Response(body, status=status.HTTP_202_ACCEPTED)