Esempio n. 1
0
 def test_parse_dict_list_post(self):
     """Tests parsing a list of dictionaries."""
     results = [{'name': 'value'}, {'name2': 'value2'}]
     request = MagicMock(Request)
     request.data = QueryDict('', mutable=True)
     request.data.update({
         'test': results,
     })
     self.assertItemsEqual(rest_util.parse_dict_list(request, 'test'),
                           results)
Esempio n. 2
0
    def post(self, request):
        """Submit command message to re-queue jobs that fit the given filter criteria

        :param request: the HTTP GET request
        :type request: :class:`rest_framework.request.Request`
        :returns: the HTTP response to send back to the user
        """

        started = rest_util.parse_timestamp(request, 'started', required=False)
        ended = rest_util.parse_timestamp(request, 'ended', required=False)
        rest_util.check_time_range(started, ended)

        error_categories = rest_util.parse_string_list(request, 'error_categories', required=False)
        error_ids = rest_util.parse_int_list(request, 'error_ids', required=False)
        job_ids = rest_util.parse_int_list(request, 'job_ids', required=False)
        job_status = rest_util.parse_string(request, 'status', required=False)
        job_type_ids = rest_util.parse_int_list(request, 'job_type_ids', required=False)
        priority = rest_util.parse_int(request, 'priority', required=False)

        job_type_names = rest_util.parse_string_list(request, 'job_type_names', required=False)
        batch_ids = rest_util.parse_int_list(request, 'batch_ids', required=False)
        recipe_ids = rest_util.parse_int_list(request, 'recipe_ids', required=False)
        is_superseded = rest_util.parse_bool(request, 'is_superseded', required=False)

        job_types = rest_util.parse_dict_list(request, 'job_types', required=False)

        for jt in job_types:
            if 'name' not in jt or 'version' not in jt:
                raise BadParameter('Job types argument invalid: %s' % job_types)
            existing_job_type = JobType.objects.filter(name=jt['name'], version=jt['version']).first()
            if not existing_job_type:
                raise BadParameter(
                    'Job Type with name: %s and version: %s does not exist' % (jt['name'], jt['version']))
            job_type_ids.append(existing_job_type.id)

        # Create and send message
        msg = create_requeue_jobs_bulk_message(started=started, ended=ended, error_categories=error_categories,
                                               error_ids=error_ids, job_ids=job_ids, job_type_ids=job_type_ids,
                                               priority=priority, status=job_status, job_type_names=job_type_names,
                                               batch_ids=batch_ids, recipe_ids=recipe_ids, is_superseded=is_superseded)
        CommandMessageManager().send_messages([msg])

        return Response(status=status.HTTP_202_ACCEPTED)
Esempio n. 3
0
    def post_v6(self, request, dataset_id):
        """ Adds a datsetmember to the dataset

        :param request: the HTTP request
        :type request: :class:`rest_framework.request.Request`
        :param dataset_id: The id of the dataset
        :type dataset_id: int encoded as a str
        :rtype: :class:`rest_framework.response.Response`
        :returns: the HTTP response to send back to the user
        """

        template = rest_util.parse_dict(request,
                                        'data_template',
                                        required=False)
        dry_run = rest_util.parse_bool(request, 'dry_run', default_value=False)

        #file filters
        data_started = rest_util.parse_timestamp(request,
                                                 'data_started',
                                                 required=False)
        data_ended = rest_util.parse_timestamp(request,
                                               'data_ended',
                                               required=False)
        rest_util.check_time_range(data_started, data_ended)

        created_started = rest_util.parse_timestamp(request,
                                                    'created_started',
                                                    required=False)
        created_ended = rest_util.parse_timestamp(request,
                                                  'created_ended',
                                                  required=False)
        rest_util.check_time_range(created_started, created_ended)

        source_started = rest_util.parse_timestamp(request,
                                                   'source_started',
                                                   required=False)
        source_ended = rest_util.parse_timestamp(request,
                                                 'source_ended',
                                                 required=False)
        rest_util.check_time_range(source_started, source_ended)

        source_sensor_classes = rest_util.parse_string_list(
            request, 'source_sensor_class', required=False)
        source_sensors = rest_util.parse_string_list(request,
                                                     'source_sensor',
                                                     required=False)
        source_collections = rest_util.parse_string_list(request,
                                                         'source_collection',
                                                         required=False)
        source_tasks = rest_util.parse_string_list(request,
                                                   'source_task',
                                                   required=False)

        mod_started = rest_util.parse_timestamp(request,
                                                'modified_started',
                                                required=False)
        mod_ended = rest_util.parse_timestamp(request,
                                              'modified_ended',
                                              required=False)
        rest_util.check_time_range(mod_started, mod_ended)

        job_type_ids = rest_util.parse_int_list(request,
                                                'job_type_id',
                                                required=False)
        job_type_names = rest_util.parse_string_list(request,
                                                     'job_type_name',
                                                     required=False)
        job_ids = rest_util.parse_int_list(request, 'job_id', required=False)
        file_names = rest_util.parse_string_list(request,
                                                 'file_name',
                                                 required=False)
        job_outputs = rest_util.parse_string_list(request,
                                                  'job_output',
                                                  required=False)
        recipe_ids = rest_util.parse_int_list(request,
                                              'recipe_id',
                                              required=False)
        recipe_type_ids = rest_util.parse_int_list(request,
                                                   'recipe_type_id',
                                                   required=False)
        recipe_nodes = rest_util.parse_string_list(request,
                                                   'recipe_node',
                                                   required=False)
        batch_ids = rest_util.parse_int_list(request,
                                             'batch_id',
                                             required=False)

        order = rest_util.parse_string_list(request, 'order', required=False)

        data = rest_util.parse_dict_list(request, 'data', required=False)
        data_list = []

        try:
            if data:
                for d in data:
                    data = DataV6(data=d, do_validate=True).get_data()
                    data_list.append(data)
            else:
                data_list = DataSetMember.objects.build_data_list(
                    template=template,
                    data_started=data_started,
                    data_ended=data_ended,
                    created_started=created_started,
                    created_ended=created_ended,
                    source_started=source_started,
                    source_ended=source_ended,
                    source_sensor_classes=source_sensor_classes,
                    source_sensors=source_sensors,
                    source_collections=source_collections,
                    source_tasks=source_tasks,
                    mod_started=mod_started,
                    mod_ended=mod_ended,
                    job_type_ids=job_type_ids,
                    job_type_names=job_type_names,
                    job_ids=job_ids,
                    file_names=file_names,
                    job_outputs=job_outputs,
                    recipe_ids=recipe_ids,
                    recipe_type_ids=recipe_type_ids,
                    recipe_nodes=recipe_nodes,
                    batch_ids=batch_ids,
                    order=order)
        except InvalidData as ex:
            message = 'Data is invalid'
            logger.exception(message)
            raise BadParameter('%s: %s' % (message, unicode(ex)))

        if not data_list:
            resp_dict = {
                'No Results':
                'No files found from filters and/or no data provided'
            }
            return Response(resp_dict)

        try:
            dataset = DataSet.objects.get(pk=dataset_id)
        except DataSet.DoesNotExist:
            raise Http404

        validation = DataSetMember.objects.validate_data_list(
            dataset_def=dataset.get_definition(), data_list=data_list)
        members = []
        if validation.is_valid and not dry_run:
            members = DataSetMember.objects.create_dataset_members(
                dataset=dataset, data_list=data_list)
            serializer = DataSetMemberSerializerV6(members, many=True)
            return Response(serializer.data, status=status.HTTP_201_CREATED)
        elif not validation.is_valid:
            raise BadParameter('%s: %s' %
                               ('Error(s) validating data against dataset',
                                [e.to_dict() for e in validation.errors]))

        resp_dict = []
        for dl in data_list:
            resp_dict.append(convert_data_to_v6_json(dl).get_dict())
        return Response(resp_dict)
Esempio n. 4
0
    def create_all_v6(self, request):
        """Creates or edits a dataset - including the dataset members - and returns a link to the detail URL"""

        title = rest_util.parse_string(request, 'title', required=False)
        description = rest_util.parse_string(request,
                                             'description',
                                             required=False)
        definition = rest_util.parse_dict(request, 'definition', required=True)

        template = rest_util.parse_dict(request,
                                        'data_template',
                                        required=False)
        dry_run = rest_util.parse_bool(request, 'dry_run', default_value=False)

        # file filters
        data_started = rest_util.parse_timestamp(request,
                                                 'data_started',
                                                 required=False)
        data_ended = rest_util.parse_timestamp(request,
                                               'data_ended',
                                               required=False)
        rest_util.check_time_range(data_started, data_ended)

        source_started = rest_util.parse_timestamp(request,
                                                   'source_started',
                                                   required=False)
        source_ended = rest_util.parse_timestamp(request,
                                                 'source_ended',
                                                 required=False)
        rest_util.check_time_range(source_started, source_ended)

        source_sensor_classes = rest_util.parse_string_list(
            request, 'source_sensor_class', required=False)
        source_sensors = rest_util.parse_string_list(request,
                                                     'source_sensor',
                                                     required=False)
        source_collections = rest_util.parse_string_list(request,
                                                         'source_collection',
                                                         required=False)
        source_tasks = rest_util.parse_string_list(request,
                                                   'source_task',
                                                   required=False)

        mod_started = rest_util.parse_timestamp(request,
                                                'modified_started',
                                                required=False)
        mod_ended = rest_util.parse_timestamp(request,
                                              'modified_ended',
                                              required=False)
        rest_util.check_time_range(mod_started, mod_ended)

        job_type_ids = rest_util.parse_int_list(request,
                                                'job_type_id',
                                                required=False)
        job_type_names = rest_util.parse_string_list(request,
                                                     'job_type_name',
                                                     required=False)
        job_ids = rest_util.parse_int_list(request, 'job_id', required=False)
        file_names = rest_util.parse_string_list(request,
                                                 'file_name',
                                                 required=False)
        job_outputs = rest_util.parse_string_list(request,
                                                  'job_output',
                                                  required=False)
        recipe_ids = rest_util.parse_int_list(request,
                                              'recipe_id',
                                              required=False)
        recipe_type_ids = rest_util.parse_int_list(request,
                                                   'recipe_type_id',
                                                   required=False)
        recipe_nodes = rest_util.parse_string_list(request,
                                                   'recipe_node',
                                                   required=False)
        batch_ids = rest_util.parse_int_list(request,
                                             'batch_id',
                                             required=False)

        order = rest_util.parse_string_list(request, 'order', required=False)

        data = rest_util.parse_dict_list(request, 'data', required=False)
        data_list = []

        # validate the definition & create the dataset
        try:
            dataset_def = DataSetDefinitionV6(
                definition=definition, do_validate=True).get_definition()
        except InvalidDataSetDefinition as ex:
            message = 'DataSet definition is invalid'
            logger.exception(message)
            raise BadParameter('%s: %s' % (message, unicode(ex)))

        try:
            dataset = DataSet.objects.create_dataset_v6(
                dataset_def, title=title, description=description)
        except Exception as ex:
            message = 'Unable to create new dataset'
            logger.exception(message)
            raise BadParameter('%s: %s' % (message, unicode(ex)))

        try:
            dataset = DataSet.objects.get_details_v6(dataset.id)
        except DataSet.DoesNotExist:
            raise Http404

        if not data and not template:
            url = reverse('dataset_details_view',
                          args=[dataset.id],
                          request=request)
            serializer = DataSetDetailsSerializerV6(dataset)

            return Response(serializer.data,
                            status=status.HTTP_201_CREATED,
                            headers=dict(location=url))

        # Try and find the data
        if data:
            try:
                for d in data:
                    data = DataV6(data=d, do_validate=True).get_data()
                    data_list.append(data)
            except InvalidData as ex:
                message = 'Data is invalid'
                logger.exception(message)
                raise BadParameter('%s: %s' % (message, unicode(ex)))
        elif template:
            try:
                data_list = DataSetMember.objects.build_data_list(
                    template=template,
                    data_started=data_started,
                    data_ended=data_ended,
                    source_started=source_started,
                    source_ended=source_ended,
                    source_sensor_classes=source_sensor_classes,
                    source_sensors=source_sensors,
                    source_collections=source_collections,
                    source_tasks=source_tasks,
                    mod_started=mod_started,
                    mod_ended=mod_ended,
                    job_type_ids=job_type_ids,
                    job_type_names=job_type_names,
                    job_ids=job_ids,
                    file_names=file_names,
                    job_outputs=job_outputs,
                    recipe_ids=recipe_ids,
                    recipe_type_ids=recipe_type_ids,
                    recipe_nodes=recipe_nodes,
                    batch_ids=batch_ids,
                    order=order)
            except InvalidData as ex:
                message = 'Data is invalid'
                logger.exception(message)
                raise BadParameter('%s: %s' % (message, unicode(ex)))

        if not data_list:
            resp_dict = {
                'No Results':
                'No files found from filters and/or no data provided'
            }
            return Response(resp_dict)

        validation = DataSetMember.objects.validate_data_list(
            dataset_def=dataset_def, data_list=data_list)
        members = []
        if validation.is_valid and not dry_run:

            members = DataSetMember.objects.create_dataset_members(
                dataset=dataset, data_list=data_list)
            dataset = DataSet.objects.get(id=dataset.id)

            serializer = DataSetDetailsSerializerV6(dataset)
            url = reverse('dataset_details_view',
                          args=[dataset.id],
                          request=request)
            return Response(serializer.data,
                            status=status.HTTP_201_CREATED,
                            headers=dict(location=url))
        elif not validation.is_valid:
            raise BadParameter('%s: %s' %
                               ('Error(s) validating data against dataset',
                                [e.to_dict() for e in validation.errors]))

        resp_dict = []
        for dl in data_list:
            resp_dict.append(convert_data_to_v6_json(dl).get_dict())
        return Response(resp_dict)
Esempio n. 5
0
 def test_parse_dict_list_optional(self):
     """Tests parsing an optional dict list with no default value."""
     request = MagicMock(Request)
     request.data = QueryDict('', mutable=True)
     self.assertEqual(
         rest_util.parse_dict_list(request, 'test', required=False), [])