def list(self, request): """Retrieves jobs and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) job_status = rest_util.parse_string(request, 'status', required=False) job_ids = rest_util.parse_int_list(request, 'job_id', required=False) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, 'job_type_category', required=False) order = rest_util.parse_string_list(request, 'order', required=False) jobs = Job.objects.get_jobs(started, ended, job_status, job_ids, job_type_ids, job_type_names, job_type_categories, order) page = self.paginate_queryset(jobs) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def get(self, request): """Gets jobs and their associated latest execution :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) job_status = rest_util.parse_string(request, 'status', required=False) job_ids = rest_util.parse_int_list(request, 'job_id', required=False) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, 'job_type_category', required=False) order = rest_util.parse_string_list(request, 'order', required=False) jobs = Job.objects.get_jobs(started, ended, job_status, job_ids, job_type_ids, job_type_names, job_type_categories, order) page = rest_util.perform_paging(request, jobs) # Add the latest execution for each matching job paged_jobs = list(page.object_list) job_exes_dict = JobExecution.objects.get_latest(page.object_list) for job in paged_jobs: job.latest_job_exe = job_exes_dict[job.id] if job.id in job_exes_dict else None page.object_list = paged_jobs serializer = JobWithExecutionListSerializer(page, context={'request': request}) return Response(serializer.data, status=status.HTTP_200_OK)
def list(self, request): """Gets jobs and their associated latest execution :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) job_status = rest_util.parse_string(request, 'status', required=False) job_ids = rest_util.parse_int_list(request, 'job_id', required=False) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, 'job_type_category', required=False) order = rest_util.parse_string_list(request, 'order', required=False) jobs = Job.objects.get_jobs(started, ended, job_status, job_ids, job_type_ids, job_type_names, job_type_categories, order) # Add the latest execution for each matching job page = self.paginate_queryset(jobs) job_exes_dict = JobExecution.objects.get_latest(page) for job in page: job.latest_job_exe = job_exes_dict[job.id] if job.id in job_exes_dict else None serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def get(self, request): '''Gets job executions and their associated job_type id, name, and version :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user ''' started = rest_util.parse_timestamp(request, u'started', required=False) ended = rest_util.parse_timestamp(request, u'ended', required=False) rest_util.check_time_range(started, ended) job_status = rest_util.parse_string(request, u'status', required=False) job_type_ids = rest_util.parse_int_list(request, u'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, u'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, u'job_type_category', required=False) node_ids = rest_util.parse_int_list(request, u'node_id', required=False) order = rest_util.parse_string_list(request, u'order', required=False) job_exes = JobExecution.objects.get_exes(started, ended, job_status, job_type_ids, job_type_names, job_type_categories, node_ids, order) page = rest_util.perform_paging(request, job_exes) serializer = JobExecutionListSerializer(page, context={u'request': request}) return Response(serializer.data, status=status.HTTP_200_OK)
def list(self, request): """Gets job executions and their associated job_type id, name, and version :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) job_status = rest_util.parse_string(request, 'status', required=False) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, 'job_type_category', required=False) node_ids = rest_util.parse_int_list(request, 'node_id', required=False) order = rest_util.parse_string_list(request, 'order', required=False) job_exes = JobExecution.objects.get_exes(started, ended, job_status, job_type_ids, job_type_names, job_type_categories, node_ids, order) page = self.paginate_queryset(job_exes) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def get(self, request): '''Retrieves the job updates for a given time range and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user ''' started = rest_util.parse_timestamp(request, u'started', required=False) ended = rest_util.parse_timestamp(request, u'ended', required=False) rest_util.check_time_range(started, ended) job_status = rest_util.parse_string(request, u'status', required=False) job_type_ids = rest_util.parse_int_list(request, u'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, u'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, u'job_type_category', required=False) order = rest_util.parse_string_list(request, u'order', required=False) jobs = Job.objects.get_job_updates(started, ended, job_status, job_type_ids, job_type_names, job_type_categories, order) page = rest_util.perform_paging(request, jobs) Job.objects.populate_input_files(page) serializer = JobUpdateListSerializer(page, context={'request': request}) return Response(serializer.data, status=status.HTTP_200_OK)
def get(self, request): """Retrieves the job updates for a given time range and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) statuses = rest_util.parse_string_list(request, 'status', required=False) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, 'job_type_category', required=False) include_superseded = rest_util.parse_bool(request, 'include_superseded', required=False) order = rest_util.parse_string_list(request, 'order', required=False) jobs = Job.objects.get_job_updates(started=started, ended=ended, statuses=statuses, job_type_ids=job_type_ids, job_type_names=job_type_names, job_type_categories=job_type_categories, include_superseded=include_superseded, order=order) page = self.paginate_queryset(jobs) Job.objects.populate_input_files(page) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def test_parse_int_list_accepted_all(self): '''Tests parsing a list of int parameters where all values are acceptable.''' request = MagicMock(Request) request.QUERY_PARAMS = QueryDict('', mutable=True) request.QUERY_PARAMS.setlist('test', ['1', '2']) self.assertListEqual(rest_util.parse_int_list(request, 'test', accepted_values=[1, 2]), [1, 2])
def get(self, request): '''Retrieves the product updates for a given time range and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user ''' started = rest_util.parse_timestamp(request, u'started', required=False) ended = rest_util.parse_timestamp(request, u'ended', required=False) rest_util.check_time_range(started, ended) job_type_ids = rest_util.parse_int_list(request, u'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, u'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, u'job_type_category', required=False) is_operational = rest_util.parse_bool(request, u'is_operational', required=False) file_name = rest_util.parse_string(request, u'file_name', required=False) order = rest_util.parse_string_list(request, u'order', required=False) products = ProductFile.objects.get_products(started, ended, job_type_ids, job_type_names, job_type_categories, is_operational, file_name, order) page = rest_util.perform_paging(request, products) ProductFile.objects.populate_source_ancestors(page) serializer = ProductFileUpdateListSerializer(page, context={'request': request}) return Response(serializer.data, status=status.HTTP_200_OK)
def list(self, request): """Retrieves the product updates for a given time range and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, u'started', required=False) ended = rest_util.parse_timestamp(request, u'ended', required=False) rest_util.check_time_range(started, ended) job_type_ids = rest_util.parse_int_list(request, u'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, u'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, u'job_type_category', required=False) is_operational = rest_util.parse_bool(request, u'is_operational', required=False) file_name = rest_util.parse_string(request, u'file_name', required=False) order = rest_util.parse_string_list(request, u'order', required=False) products = ProductFile.objects.get_products(started, ended, job_type_ids, job_type_names, job_type_categories, is_operational, file_name, order) page = self.paginate_queryset(products) ProductFile.objects.populate_source_ancestors(page) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def test_parse_int_list_post(self): '''Tests parsing a required list of int parameters that are provided via POST.''' request = MagicMock(Request) request.DATA = QueryDict('', mutable=True) request.DATA.setlist('test', ['1', '2']) self.assertEqual(rest_util.parse_int_list(request, 'test'), [1, 2])
def test_parse_int_list(self): '''Tests parsing a required list of int parameters that is provided via GET.''' request = MagicMock(Request) request.QUERY_PARAMS = QueryDict('', mutable=True) request.QUERY_PARAMS.setlist('test', ['1', '2']) self.assertListEqual(rest_util.parse_int_list(request, 'test'), [1, 2])
def test_parse_int_list_optional(self): '''Tests parsing an optional list of int parameters that are missing.''' request = MagicMock(Request) request.QUERY_PARAMS = QueryDict('', mutable=True) request.QUERY_PARAMS.update({ 'test': '1', }) self.assertListEqual(rest_util.parse_int_list(request, 'test2', required=False), [])
def test_parse_int_list_default(self): '''Tests parsing a required list of int parameters that are provided via default value.''' request = MagicMock(Request) request.QUERY_PARAMS = QueryDict('', mutable=True) request.QUERY_PARAMS.update({ 'test': '1', }) self.assertEqual(rest_util.parse_int_list(request, 'test2', ['2', '3']), [2, 3])
def test_parse_int_list_post(self): """Tests parsing a required list of int parameters that are provided via POST.""" request = MagicMock(Request) request.data = QueryDict('', mutable=True) request.data.update({ 'test': ['1', '2'] }) self.assertEqual(rest_util.parse_int_list(request, 'test'), [1, 2])
def post(self, request): """Increase max_tries, place it on the queue, and returns the new job information in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) job_status = rest_util.parse_string(request, 'status', required=False) job_ids = rest_util.parse_int_list(request, 'job_ids', required=False) job_type_ids = rest_util.parse_int_list(request, 'job_type_ids', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_names', required=False) job_type_categories = rest_util.parse_string_list(request, 'job_type_categories', required=False) error_categories = rest_util.parse_string_list(request, 'error_categories', required=False) priority = rest_util.parse_int(request, 'priority', required=False) # Fetch all the jobs matching the filters job_status = [job_status] if job_status else job_status jobs = Job.objects.get_jobs(started=started, ended=ended, statuses=job_status, job_ids=job_ids, job_type_ids=job_type_ids, job_type_names=job_type_names, job_type_categories=job_type_categories, error_categories=error_categories) if not jobs: raise Http404 # Attempt to queue all jobs matching the filters requested_job_ids = {job.id for job in jobs} Queue.objects.requeue_jobs(requested_job_ids, priority) # Refresh models to get the new status information for all originally requested jobs jobs = Job.objects.get_jobs(job_ids=requested_job_ids) page = self.paginate_queryset(jobs) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def get(self, request): '''Retrieves the list of all recipes and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user ''' started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) type_ids = rest_util.parse_int_list(request, 'type_id', required=False) type_names = rest_util.parse_string_list(request, 'type_name', required=False) order = rest_util.parse_string_list(request, 'order', required=False) recipes = Recipe.objects.get_recipes(started, ended, type_ids, type_names, order) page = rest_util.perform_paging(request, recipes) serializer = RecipeListSerializer(page, context={'request': request}) return Response(serializer.data, status=status.HTTP_200_OK)
def list(self, request): """Retrieves the list of all recipes and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) type_ids = rest_util.parse_int_list(request, 'type_id', required=False) type_names = rest_util.parse_string_list(request, 'type_name', required=False) include_superseded = rest_util.parse_bool(request, 'include_superseded', required=False) order = rest_util.parse_string_list(request, 'order', required=False) recipes = Recipe.objects.get_recipes(started=started, ended=ended, type_ids=type_ids, type_names=type_names, include_superseded=include_superseded, order=order) page = self.paginate_queryset(recipes) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def list(self, request): """Retrieves the list of all ingests and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) ingest_statuses = rest_util.parse_string_list(request, 'status', required=False) strike_ids = rest_util.parse_int_list(request, 'strike_id', required=False) file_name = rest_util.parse_string(request, 'file_name', required=False) order = rest_util.parse_string_list(request, 'order', required=False) ingests = Ingest.objects.get_ingests(started, ended, ingest_statuses, strike_ids, file_name, order) page = self.paginate_queryset(ingests) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def list(self, request): """Retrieves the job load for a given time range and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp( request, 'started', default_value=rest_util.get_relative_days(7)) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended, max_duration=datetime.timedelta(days=31)) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, 'job_type_category', required=False) job_type_priorities = rest_util.parse_string_list(request, 'job_type_priority', required=False) job_loads = JobLoad.objects.get_job_loads(started, ended, job_type_ids, job_type_names, job_type_categories, job_type_priorities) job_loads_grouped = JobLoad.objects.group_by_time(job_loads) page = self.paginate_queryset(job_loads_grouped) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def list(self, request): """Retrieves the list of all recipes and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) type_ids = rest_util.parse_int_list(request, 'type_id', required=False) type_names = rest_util.parse_string_list(request, 'type_name', required=False) order = rest_util.parse_string_list(request, 'order', required=False) recipes = Recipe.objects.get_recipes(started, ended, type_ids, type_names, order) page = self.paginate_queryset(recipes) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def get(self, request): """Retrieves the job load for a given time range and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, 'started', default_value=rest_util.get_relative_days(7)) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended, max_duration=datetime.timedelta(days=31)) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, 'job_type_category', required=False) job_type_priorities = rest_util.parse_string_list(request, 'job_type_priority', required=False) job_loads = JobLoad.objects.get_job_loads(started, ended, job_type_ids, job_type_names, job_type_categories, job_type_priorities) job_loads_grouped = JobLoad.objects.group_by_time(job_loads) page = rest_util.perform_paging(request, job_loads_grouped) serializer = JobLoadGroupListSerializer(page, context={'request': request}) return Response(serializer.data, status=status.HTTP_200_OK)
def _list_v6(self, request): """Retrieves the list of all recipes and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) source_started = rest_util.parse_timestamp(request, 'source_started', required=False) source_ended = rest_util.parse_timestamp(request, 'source_ended', required=False) rest_util.check_time_range(source_started, source_ended) source_sensor_classes = rest_util.parse_string_list( request, 'source_sensor_class', required=False) source_sensors = rest_util.parse_string_list(request, 'source_sensor', required=False) source_collections = rest_util.parse_string_list(request, 'source_collection', required=False) source_tasks = rest_util.parse_string_list(request, 'source_task', required=False) recipe_ids = rest_util.parse_int_list(request, 'recipe_id', required=False) type_ids = rest_util.parse_int_list(request, 'recipe_type_id', required=False) type_names = rest_util.parse_string_list(request, 'recipe_type_name', required=False) batch_ids = rest_util.parse_int_list(request, 'batch_id', required=False) is_superseded = rest_util.parse_bool(request, 'is_superseded', required=False) is_completed = rest_util.parse_bool(request, 'is_completed', required=False) order = rest_util.parse_string_list(request, 'order', required=False) recipes = Recipe.objects.get_recipes_v6( started=started, ended=ended, source_started=source_started, source_ended=source_ended, source_sensor_classes=source_sensor_classes, source_sensors=source_sensors, source_collections=source_collections, source_tasks=source_tasks, ids=recipe_ids, type_ids=type_ids, type_names=type_names, batch_ids=batch_ids, is_superseded=is_superseded, is_completed=is_completed, order=order) # additional optimizations not being captured by the existing ones in the manager # see issue #1717 recipes = recipes.select_related('recipe_type_rev__recipe_type').defer( None) page = self.paginate_queryset(recipes) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def create_all_v6(self, request): """Creates or edits a dataset - including the dataset members - and returns a link to the detail URL""" title = rest_util.parse_string(request, 'title', required=False) description = rest_util.parse_string(request, 'description', required=False) definition = rest_util.parse_dict(request, 'definition', required=True) template = rest_util.parse_dict(request, 'data_template', required=False) dry_run = rest_util.parse_bool(request, 'dry_run', default_value=False) # file filters data_started = rest_util.parse_timestamp(request, 'data_started', required=False) data_ended = rest_util.parse_timestamp(request, 'data_ended', required=False) rest_util.check_time_range(data_started, data_ended) source_started = rest_util.parse_timestamp(request, 'source_started', required=False) source_ended = rest_util.parse_timestamp(request, 'source_ended', required=False) rest_util.check_time_range(source_started, source_ended) source_sensor_classes = rest_util.parse_string_list( request, 'source_sensor_class', required=False) source_sensors = rest_util.parse_string_list(request, 'source_sensor', required=False) source_collections = rest_util.parse_string_list(request, 'source_collection', required=False) source_tasks = rest_util.parse_string_list(request, 'source_task', required=False) mod_started = rest_util.parse_timestamp(request, 'modified_started', required=False) mod_ended = rest_util.parse_timestamp(request, 'modified_ended', required=False) rest_util.check_time_range(mod_started, mod_ended) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_ids = rest_util.parse_int_list(request, 'job_id', required=False) file_names = rest_util.parse_string_list(request, 'file_name', required=False) job_outputs = rest_util.parse_string_list(request, 'job_output', required=False) recipe_ids = rest_util.parse_int_list(request, 'recipe_id', required=False) recipe_type_ids = rest_util.parse_int_list(request, 'recipe_type_id', required=False) recipe_nodes = rest_util.parse_string_list(request, 'recipe_node', required=False) batch_ids = rest_util.parse_int_list(request, 'batch_id', required=False) order = rest_util.parse_string_list(request, 'order', required=False) data = rest_util.parse_dict_list(request, 'data', required=False) data_list = [] # validate the definition & create the dataset try: dataset_def = DataSetDefinitionV6( definition=definition, do_validate=True).get_definition() except InvalidDataSetDefinition as ex: message = 'DataSet definition is invalid' logger.exception(message) raise BadParameter('%s: %s' % (message, unicode(ex))) try: dataset = DataSet.objects.create_dataset_v6( dataset_def, title=title, description=description) except Exception as ex: message = 'Unable to create new dataset' logger.exception(message) raise BadParameter('%s: %s' % (message, unicode(ex))) try: dataset = DataSet.objects.get_details_v6(dataset.id) except DataSet.DoesNotExist: raise Http404 if not data and not template: url = reverse('dataset_details_view', args=[dataset.id], request=request) serializer = DataSetDetailsSerializerV6(dataset) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=dict(location=url)) # Try and find the data if data: try: for d in data: data = DataV6(data=d, do_validate=True).get_data() data_list.append(data) except InvalidData as ex: message = 'Data is invalid' logger.exception(message) raise BadParameter('%s: %s' % (message, unicode(ex))) elif template: try: data_list = DataSetMember.objects.build_data_list( template=template, data_started=data_started, data_ended=data_ended, source_started=source_started, source_ended=source_ended, source_sensor_classes=source_sensor_classes, source_sensors=source_sensors, source_collections=source_collections, source_tasks=source_tasks, mod_started=mod_started, mod_ended=mod_ended, job_type_ids=job_type_ids, job_type_names=job_type_names, job_ids=job_ids, file_names=file_names, job_outputs=job_outputs, recipe_ids=recipe_ids, recipe_type_ids=recipe_type_ids, recipe_nodes=recipe_nodes, batch_ids=batch_ids, order=order) except InvalidData as ex: message = 'Data is invalid' logger.exception(message) raise BadParameter('%s: %s' % (message, unicode(ex))) if not data_list: resp_dict = { 'No Results': 'No files found from filters and/or no data provided' } return Response(resp_dict) validation = DataSetMember.objects.validate_data_list( dataset_def=dataset_def, data_list=data_list) members = [] if validation.is_valid and not dry_run: members = DataSetMember.objects.create_dataset_members( dataset=dataset, data_list=data_list) dataset = DataSet.objects.get(id=dataset.id) serializer = DataSetDetailsSerializerV6(dataset) url = reverse('dataset_details_view', args=[dataset.id], request=request) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=dict(location=url)) elif not validation.is_valid: raise BadParameter('%s: %s' % ('Error(s) validating data against dataset', [e.to_dict() for e in validation.errors])) resp_dict = [] for dl in data_list: resp_dict.append(convert_data_to_v6_json(dl).get_dict()) return Response(resp_dict)
def post_v6(self, request, dataset_id): """ Adds a datsetmember to the dataset :param request: the HTTP request :type request: :class:`rest_framework.request.Request` :param dataset_id: The id of the dataset :type dataset_id: int encoded as a str :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ template = rest_util.parse_dict(request, 'data_template', required=False) dry_run = rest_util.parse_bool(request, 'dry_run', default_value=False) #file filters data_started = rest_util.parse_timestamp(request, 'data_started', required=False) data_ended = rest_util.parse_timestamp(request, 'data_ended', required=False) rest_util.check_time_range(data_started, data_ended) created_started = rest_util.parse_timestamp(request, 'created_started', required=False) created_ended = rest_util.parse_timestamp(request, 'created_ended', required=False) rest_util.check_time_range(created_started, created_ended) source_started = rest_util.parse_timestamp(request, 'source_started', required=False) source_ended = rest_util.parse_timestamp(request, 'source_ended', required=False) rest_util.check_time_range(source_started, source_ended) source_sensor_classes = rest_util.parse_string_list( request, 'source_sensor_class', required=False) source_sensors = rest_util.parse_string_list(request, 'source_sensor', required=False) source_collections = rest_util.parse_string_list(request, 'source_collection', required=False) source_tasks = rest_util.parse_string_list(request, 'source_task', required=False) mod_started = rest_util.parse_timestamp(request, 'modified_started', required=False) mod_ended = rest_util.parse_timestamp(request, 'modified_ended', required=False) rest_util.check_time_range(mod_started, mod_ended) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_ids = rest_util.parse_int_list(request, 'job_id', required=False) file_names = rest_util.parse_string_list(request, 'file_name', required=False) job_outputs = rest_util.parse_string_list(request, 'job_output', required=False) recipe_ids = rest_util.parse_int_list(request, 'recipe_id', required=False) recipe_type_ids = rest_util.parse_int_list(request, 'recipe_type_id', required=False) recipe_nodes = rest_util.parse_string_list(request, 'recipe_node', required=False) batch_ids = rest_util.parse_int_list(request, 'batch_id', required=False) order = rest_util.parse_string_list(request, 'order', required=False) data = rest_util.parse_dict_list(request, 'data', required=False) data_list = [] try: if data: for d in data: data = DataV6(data=d, do_validate=True).get_data() data_list.append(data) else: data_list = DataSetMember.objects.build_data_list( template=template, data_started=data_started, data_ended=data_ended, created_started=created_started, created_ended=created_ended, source_started=source_started, source_ended=source_ended, source_sensor_classes=source_sensor_classes, source_sensors=source_sensors, source_collections=source_collections, source_tasks=source_tasks, mod_started=mod_started, mod_ended=mod_ended, job_type_ids=job_type_ids, job_type_names=job_type_names, job_ids=job_ids, file_names=file_names, job_outputs=job_outputs, recipe_ids=recipe_ids, recipe_type_ids=recipe_type_ids, recipe_nodes=recipe_nodes, batch_ids=batch_ids, order=order) except InvalidData as ex: message = 'Data is invalid' logger.exception(message) raise BadParameter('%s: %s' % (message, unicode(ex))) if not data_list: resp_dict = { 'No Results': 'No files found from filters and/or no data provided' } return Response(resp_dict) try: dataset = DataSet.objects.get(pk=dataset_id) except DataSet.DoesNotExist: raise Http404 validation = DataSetMember.objects.validate_data_list( dataset_def=dataset.get_definition(), data_list=data_list) members = [] if validation.is_valid and not dry_run: members = DataSetMember.objects.create_dataset_members( dataset=dataset, data_list=data_list) serializer = DataSetMemberSerializerV6(members, many=True) return Response(serializer.data, status=status.HTTP_201_CREATED) elif not validation.is_valid: raise BadParameter('%s: %s' % ('Error(s) validating data against dataset', [e.to_dict() for e in validation.errors])) resp_dict = [] for dl in data_list: resp_dict.append(convert_data_to_v6_json(dl).get_dict()) return Response(resp_dict)
def list(self, request, source_id=None): """Retrieves the jobs for a given source file ID and returns them in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :param source_id: The id of the source :type source_id: int encoded as a string :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ try: ScaleFile.objects.get(id=source_id, file_type='SOURCE') except ScaleFile.DoesNotExist: raise Http404 started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) statuses = rest_util.parse_string_list(request, 'status', required=False) job_ids = rest_util.parse_int_list(request, 'job_id', required=False) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, 'job_type_category', required=False) batch_ids = rest_util.parse_int_list(request, 'batch_id', required=False) error_categories = rest_util.parse_string_list(request, 'error_category', required=False) include_superseded = rest_util.parse_bool(request, 'include_superseded', required=False) order = rest_util.parse_string_list(request, 'order', required=False) jobs = SourceFile.objects.get_source_jobs( source_id, started=started, ended=ended, statuses=statuses, job_ids=job_ids, job_type_ids=job_type_ids, job_type_names=job_type_names, job_type_categories=job_type_categories, batch_ids=batch_ids, error_categories=error_categories, include_superseded=include_superseded, order=order) page = self.paginate_queryset(jobs) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def list(self, request): """Retrieves the product for a given file name and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) time_field = rest_util.parse_string( request, 'time_field', required=False, accepted_values=ProductFile.VALID_TIME_FIELDS) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, 'job_type_category', required=False) job_ids = rest_util.parse_int_list(request, 'job_id', required=False) is_operational = rest_util.parse_bool(request, 'is_operational', required=False) is_published = rest_util.parse_bool(request, 'is_published', default_value=True) file_name = rest_util.parse_string(request, 'file_name', required=False) job_output = rest_util.parse_string(request, 'job_output', required=False) recipe_ids = rest_util.parse_int_list(request, 'recipe_id', required=False) recipe_type_ids = rest_util.parse_int_list(request, 'recipe_type_id', required=False) recipe_job = rest_util.parse_string(request, 'recipe_job', required=False) batch_ids = rest_util.parse_int_list(request, 'batch_id', required=False) order = rest_util.parse_string_list(request, 'order', required=False) products = ProductFile.objects.get_products( started=started, ended=ended, time_field=time_field, job_type_ids=job_type_ids, job_type_names=job_type_names, job_type_categories=job_type_categories, job_ids=job_ids, is_operational=is_operational, is_published=is_published, file_name=file_name, job_output=job_output, recipe_ids=recipe_ids, recipe_type_ids=recipe_type_ids, recipe_job=recipe_job, batch_ids=batch_ids, order=order, ) page = self.paginate_queryset(products) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def _list_v6(self, request): """Retrieves a list of files based on filters and returns it in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ data_started = rest_util.parse_timestamp(request, 'data_started', required=False) data_ended = rest_util.parse_timestamp(request, 'data_ended', required=False) rest_util.check_time_range(data_started, data_ended) source_started = rest_util.parse_timestamp(request, 'source_started', required=False) source_ended = rest_util.parse_timestamp(request, 'source_ended', required=False) rest_util.check_time_range(source_started, source_ended) source_sensor_classes = rest_util.parse_string_list( request, 'source_sensor_class', required=False) source_sensors = rest_util.parse_string_list(request, 'source_sensor', required=False) source_collections = rest_util.parse_string_list(request, 'source_collection', required=False) source_tasks = rest_util.parse_string_list(request, 'source_task', required=False) mod_started = rest_util.parse_timestamp(request, 'modified_started', required=False) mod_ended = rest_util.parse_timestamp(request, 'modified_ended', required=False) rest_util.check_time_range(mod_started, mod_ended) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_ids = rest_util.parse_int_list(request, 'job_id', required=False) file_names = rest_util.parse_string_list(request, 'file_name', required=False) job_outputs = rest_util.parse_string_list(request, 'job_output', required=False) recipe_ids = rest_util.parse_int_list(request, 'recipe_id', required=False) recipe_type_ids = rest_util.parse_int_list(request, 'recipe_type_id', required=False) recipe_nodes = rest_util.parse_string_list(request, 'recipe_node', required=False) batch_ids = rest_util.parse_int_list(request, 'batch_id', required=False) order = rest_util.parse_string_list(request, 'order', required=False) files = ScaleFile.objects.filter_files( data_started=data_started, data_ended=data_ended, source_started=source_started, source_ended=source_ended, source_sensor_classes=source_sensor_classes, source_sensors=source_sensors, source_collections=source_collections, source_tasks=source_tasks, mod_started=mod_started, mod_ended=mod_ended, job_type_ids=job_type_ids, job_type_names=job_type_names, job_ids=job_ids, file_names=file_names, job_outputs=job_outputs, recipe_ids=recipe_ids, recipe_type_ids=recipe_type_ids, recipe_nodes=recipe_nodes, batch_ids=batch_ids, order=order, ) page = self.paginate_queryset(files) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def test_parse_int_list_optional(self): '''Tests parsing an optional list of int parameters that are missing.''' request = MagicMock(Request) request.query_params = QueryDict('', mutable=True) request.query_params.setlist('test', ['1']) self.assertListEqual(rest_util.parse_int_list(request, 'test2', required=False), [])
def list(self, request, source_id=None): """Retrieves the products for a given source file ID and returns them in JSON form :param request: the HTTP GET request :type request: :class:`rest_framework.request.Request` :param source_id: The id of the source :type source_id: int encoded as a string :rtype: :class:`rest_framework.response.Response` :returns: the HTTP response to send back to the user """ try: ScaleFile.objects.get(id=source_id, file_type='SOURCE') except ScaleFile.DoesNotExist: raise Http404 started = rest_util.parse_timestamp(request, 'started', required=False) ended = rest_util.parse_timestamp(request, 'ended', required=False) rest_util.check_time_range(started, ended) time_field = rest_util.parse_string( request, 'time_field', required=False, accepted_values=ProductFile.VALID_TIME_FIELDS) batch_ids = rest_util.parse_int_list(request, 'batch_id', required=False) job_type_ids = rest_util.parse_int_list(request, 'job_type_id', required=False) job_type_names = rest_util.parse_string_list(request, 'job_type_name', required=False) job_type_categories = rest_util.parse_string_list(request, 'job_type_category', required=False) job_ids = rest_util.parse_int_list(request, 'job_id', required=False) is_operational = rest_util.parse_bool(request, 'is_operational', required=False) is_published = rest_util.parse_bool(request, 'is_published', required=False) file_name = rest_util.parse_string(request, 'file_name', required=False) job_output = rest_util.parse_string(request, 'job_output', required=False) recipe_ids = rest_util.parse_int_list(request, 'recipe_id', required=False) recipe_type_ids = rest_util.parse_int_list(request, 'recipe_type_id', required=False) recipe_job = rest_util.parse_string(request, 'recipe_job', required=False) order = rest_util.parse_string_list(request, 'order', required=False) products = SourceFile.objects.get_source_products( source_id, started=started, ended=ended, time_field=time_field, batch_ids=batch_ids, job_type_ids=job_type_ids, job_type_names=job_type_names, job_type_categories=job_type_categories, job_ids=job_ids, is_operational=is_operational, is_published=is_published, file_name=file_name, job_output=job_output, recipe_ids=recipe_ids, recipe_type_ids=recipe_type_ids, recipe_job=recipe_job, order=order) page = self.paginate_queryset(products) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data)
def test_parse_int_list_default(self): '''Tests parsing a required list of int parameters that are provided via default value.''' request = MagicMock(Request) request.query_params = QueryDict('', mutable=True) request.query_params.setlist('test', ['1']) self.assertEqual(rest_util.parse_int_list(request, 'test2', ['2', '3']), [2, 3])