def post(self, request): request_ids = request.data.get('request_ids', []) run_ids = request.data.get('run_ids', []) job_group_id = request.data.get('job_group_id', []) pipeline_name = request.data['pipeline_name'] pipeline = get_object_or_404(Pipeline, name=pipeline_name) if request_ids: for request_id in request_ids: logging.info("Submitting requestId %s to pipeline %s" % (request_id, pipeline_name)) if not job_group_id: job_group = JobGroup.objects.create() job_group_id = str(job_group.id) create_jobs_from_request.delay(request_id, pipeline.operator_id, job_group_id) body = {"details": "Operator Job submitted %s" % str(request_ids)} else: if run_ids: operator_model = Operator.objects.get(id=pipeline.operator_id) if job_group_id: operator = OperatorFactory.get_by_model( operator_model, run_ids=run_ids, job_group_id=job_group_id) create_jobs_from_operator(operator, job_group_id) body = { "details": "Operator Job submitted to pipeline %s, job group id %s, with runs %s" % (pipeline_name, job_group_id, str(run_ids)) } else: operator = OperatorFactory.get_by_model(operator_model, run_ids=run_ids) create_jobs_from_operator(operator) body = { "details": "Operator Job submitted to pipeline %s with runs %s" % (pipeline_name, str(run_ids)) } else: operator_model = Operator.objects.get(id=pipeline.operator_id) if job_group_id: operator = OperatorFactory.get_by_model( operator_model, job_group_id=job_group_id) run_routine_operator_job(operator, job_group_id) body = { "details": "Operator Job submitted to operator %s (JobGroupId: %s)" % (operator, job_group_id) } else: operator = OperatorFactory.get_by_model(operator_model) run_routine_operator_job(operator) body = { "details": "Operator Job submitted to operator %s" % operator } return Response(body, status=status.HTTP_200_OK)
def post(self, request): run_ids = request.data.get('run_ids') pipeline_names = request.data.get('pipelines') job_group_id = request.data.get('job_group_id', None) for_each = request.data.get('for_each', False) if not for_each: for pipeline_name in pipeline_names: get_object_or_404(Pipeline, name=pipeline_name) try: run = Run.objects.get(id=run_ids[0]) req = run.tags.get('requestId', 'Unknown') except Run.DoesNotExist: req = 'Unknown' if not job_group_id: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) notifier_start(job_group, req) else: try: job_group = JobGroup.objects.get(id=job_group_id) except JobGroup.DoesNotExist: return Response( {'details': 'Invalid JobGroup: %s' % job_group_id}, status=status.HTTP_400_BAD_REQUEST) for pipeline_name in pipeline_names: pipeline = get_object_or_404(Pipeline, name=pipeline_name) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: job_group_notifier_id = notifier_start( job_group, req, operator=pipeline.operator) operator_model = Operator.objects.get(id=pipeline.operator_id) operator = OperatorFactory.get_by_model( operator_model, run_ids=run_ids, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id) create_jobs_from_operator( operator, job_group_id, job_group_notifier_id=job_group_notifier_id) else: return Response({'details': 'Not Implemented'}, status=status.HTTP_400_BAD_REQUEST) body = { "details": "Operator Job submitted to pipelines %s, job group id %s, with runs %s" % (pipeline_names, job_group_id, str(run_ids)) } return Response(body, status=status.HTTP_202_ACCEPTED)
def post(self, request): pairs = request.data.get("pairs") pipeline_names = request.data.get("pipelines") pipeline_versions = request.data.get("pipeline_versions") name = request.data.get("name") labHeadName = request.data.get("labHeadName") investigatorName = request.data.get("investigatorName") assay = request.data.get("assay") job_group_id = request.data.get("job_group_id", None) output_directory_prefix = request.data.get("output_directory_prefix", None) if not job_group_id: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) for i, pipeline_name in enumerate(pipeline_names): pipeline_version = pipeline_versions[i] pipeline = get_object_or_404(Pipeline, name=pipeline_name, version=pipeline_version) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: metadata = { "assay": assay, "investigatorName": investigatorName, "labHeadName": labHeadName } job_group_notifier_id = notifier_start( job_group, name, operator=pipeline.operator, metadata=metadata) operator_model = Operator.objects.get(id=pipeline.operator_id) operator = OperatorFactory.get_by_model( operator_model, pairing={"pairs": pairs}, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id, output_directory_prefix=output_directory_prefix, ) create_jobs_from_operator( operator, job_group_id, job_group_notifier_id=job_group_notifier_id) body = { "details": "Operator Job submitted to pipelines %s, job group id %s, with pairs %s" % (pipeline_names, job_group_id, str(pairs)) } return Response(body, status=status.HTTP_202_ACCEPTED)
def test_create_jobs_from_operator_pipeline_deleted(self, get_pipeline_id, get_jobs, send_notification, create_run_task): argos_jobs = list() argos_jobs.append((APIRunCreateSerializer( data={'app': 'cb5d793b-e650-4b7d-bfcd-882858e29cc5', 'inputs': None, 'name': None, 'tags': {}}), None)) get_jobs.return_value = argos_jobs get_pipeline_id.return_value = None create_run_task.return_value = None send_notification.return_value = None Run.objects.all().delete() operator = OperatorFactory.get_by_model(Operator.objects.get(id=1), request_id="bar") create_jobs_from_operator(operator, None) self.assertEqual(len(Run.objects.all()), 1) self.assertEqual(Run.objects.first().status, RunStatus.FAILED)
def test_create_jobs_from_operator_pipeline_deleted( self, get_pipeline_id, get_jobs, send_notification, create_run_task, memcache_task_lock, set_for_restart ): argos_jobs = list() argos_jobs.append(RunCreator(app="cb5d793b-e650-4b7d-bfcd-882858e29cc5", inputs=None, name=None, tags={})) set_for_restart.return_value = None get_jobs.return_value = argos_jobs get_pipeline_id.return_value = None create_run_task.return_value = None send_notification.return_value = None memcache_task_lock.return_value = True Run.objects.all().delete() operator = OperatorFactory.get_by_model(Operator.objects.get(id=1), request_id="bar") create_jobs_from_operator(operator, None) self.assertEqual(len(Run.objects.all()), 1) self.assertEqual(RunStatus(Run.objects.first().status), RunStatus.FAILED)
def post(self, request): pairs = request.data.get('pairs') pipeline_names = request.data.get('pipelines') name = request.data.get('name') job_group_id = request.data.get('job_group_id', None) output_directory_prefix = request.data.get('output_directory_prefix', None) if not job_group_id: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) for pipeline_name in pipeline_names: pipeline = get_object_or_404(Pipeline, name=pipeline_name) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: job_group_notifier_id = notifier_start( job_group, name, operator=pipeline.operator) operator_model = Operator.objects.get(id=pipeline.operator_id) operator = OperatorFactory.get_by_model( operator_model, pairing={'pairs': pairs}, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id, output_directory_prefix=output_directory_prefix) create_jobs_from_operator( operator, job_group_id, job_group_notifier_id=job_group_notifier_id) body = { "details": "Operator Job submitted to pipelines %s, job group id %s, with pairs %s" % (pipeline_names, job_group_id, str(pairs)) } return Response(body, status=status.HTTP_202_ACCEPTED)
def post(self, request): run_ids = request.data.get("run_ids") pipeline_names = request.data.get("pipelines") pipeline_versions = request.data.get("pipeline_versions", None) job_group_id = request.data.get("job_group_id", None) for_each = request.data.get("for_each", False) if not for_each: for i, pipeline_name in enumerate(pipeline_names): pipeline_version = pipeline_versions[i] get_object_or_404(Pipeline, name=pipeline_name, version=pipeline_version) try: run = Run.objects.get(id=run_ids[0]) req = run.tags.get("requestId", "Unknown") except Run.DoesNotExist: req = "Unknown" if not job_group_id: job_group = JobGroup() job_group.save() job_group_id = str(job_group.id) notifier_start(job_group, req) else: try: job_group = JobGroup.objects.get(id=job_group_id) except JobGroup.DoesNotExist: return Response( {"details": "Invalid JobGroup: %s" % job_group_id}, status=status.HTTP_400_BAD_REQUEST) for i, pipeline_name in enumerate(pipeline_names): pipeline_version = pipeline_versions[i] pipeline = get_object_or_404(Pipeline, name=pipeline_name, version=pipeline_version) try: job_group_notifier = JobGroupNotifier.objects.get( job_group_id=job_group_id, notifier_type_id=pipeline.operator.notifier_id) job_group_notifier_id = str(job_group_notifier.id) except JobGroupNotifier.DoesNotExist: job_group_notifier_id = notifier_start( job_group, req, operator=pipeline.operator) operator_model = Operator.objects.get(id=pipeline.operator_id) operator = OperatorFactory.get_by_model( operator_model, run_ids=run_ids, job_group_id=job_group_id, job_group_notifier_id=job_group_notifier_id, pipeline=str(pipeline.id), ) create_jobs_from_operator( operator, job_group_id, job_group_notifier_id=job_group_notifier_id) else: return Response({"details": "Not Implemented"}, status=status.HTTP_400_BAD_REQUEST) body = { "details": "Operator Job submitted to pipelines %s, job group id %s, with runs %s" % (pipeline_names, job_group_id, str(run_ids)) } return Response(body, status=status.HTTP_202_ACCEPTED)