예제 #1
0
    def test_convert_definition_to_v6_json(self):
        """Tests calling convert_data_to_v6_json()"""

        # Try interface with nothing set
        definition = DataSetDefinitionV6()
        json = convert_definition_to_v6_json(definition.get_definition())
        DataSetDefinitionV6(definition=json.get_dict(),
                            do_validate=True)  # Revalidate

        # Try data with a variety of values
        definition = DataSetDefinition(definition={})
        file_param = FileParameter('input_a', ['application/json'])
        json_param = JsonParameter('input_b', 'integer')
        file_param2 = FileParameter('input_c', ['application/json'])
        json_param2 = JsonParameter('input_d', 'integer')
        definition.add_global_parameter(file_param)
        definition.add_global_parameter(json_param)
        definition.add_global_value(FileValue('input_a', [123]))
        definition.add_global_value(JsonValue('input_b', 100))
        definition.add_parameter(file_param2)
        definition.add_parameter(json_param2)
        json = convert_definition_to_v6_json(definition)
        DataSetDefinitionV6(definition=json.get_dict(),
                            do_validate=True)  # Revalidate
        self.assertSetEqual(set(json.get_definition().get_parameters()),
                            {'input_a', 'input_b', 'input_c', 'input_d'})
예제 #2
0
파일: models.py 프로젝트: kaydoh/scale
    def validate_dataset_v6(self, definition, title=None, description=None):
        """Validates the given dataset definiton

        :param definition: The dataset definition
        :type definition: dict
        :returns: The dataset validation
        :rtype: :class:`datset.models.DataSetValidation`
        """

        is_valid = True
        errors = []
        warnings = []

        dataset_definition = None
        try:
            dataset_definition = DataSetDefinitionV6(definition=definition,
                                                     do_validate=True)
        except InvalidDataSetDefinition as ex:
            is_valid = False
            errors.append(ex.error)
            message = 'Dataset definition is invalid: %s' % ex
            logger.info(message)
            pass

        # validate other fields
        return DataSetValidation(is_valid, errors, warnings)
예제 #3
0
파일: models.py 프로젝트: kaydoh/scale
    def get_definition(self):
        """Returns the dataset definition

        :returns: The DataSet definition
        :rtype: :class:`data.dataset.dataset.DataSetDefinition`
        """

        if isinstance(self.definition, basestring):
            self.definition = {}
        return DataSetDefinitionV6(definition=self.definition).get_definition()
예제 #4
0
파일: views.py 프로젝트: kaydoh/scale
    def create_v6(self, request):
        """Creates or edits a dataset and returns a link to the detail URL

        :param request: the HTTP POST request
        :type request: :class:`rest_framework.request.Request`
        :rtype: :class:`rest_framework.response.Response`
        :returns: the HTTP response to send back to the user
        """

        title = rest_util.parse_string(request, 'title', required=False)
        description = rest_util.parse_string(request,
                                             'description',
                                             required=False)
        definition = rest_util.parse_dict(request, 'definition', required=True)
        media_type = rest_util.parse_string_list(request,
                                                 'media_type',
                                                 required=False)

        # validate the definition
        try:
            dataset_def = DataSetDefinitionV6(
                definition=definition, do_validate=True).get_definition()
        except InvalidDataSetDefinition as ex:
            message = 'DataSet definition is invalid'
            logger.exception(message)
            raise BadParameter('%s: %s' % (message, unicode(ex)))

        try:
            dataset = DataSet.objects.create_dataset_v6(
                dataset_def, title=title, description=description)
        except Exception as ex:
            message = 'Unable to create new dataset'
            logger.exception(message)
            raise BadParameter('%s: %s' % (message, unicode(ex)))

        try:
            dataset = DataSet.objects.get_details_v6(dataset.id)
        except DataSet.DoesNotExist:
            raise Http404

        url = reverse('dataset_details_view',
                      args=[dataset.id],
                      request=request)
        serializer = DataSetDetailsSerializerV6(dataset)

        return Response(serializer.data,
                        status=status.HTTP_201_CREATED,
                        headers=dict(location=url))
예제 #5
0
    def setUp(self):
        django.setup()

        self.definition = copy.deepcopy(dataset_test_utils.DATASET_DEFINITION)
        self.dataset_definition = DataSetDefinitionV6(self.definition).get_definition()
예제 #6
0
    def create_all_v6(self, request):
        """Creates or edits a dataset - including the dataset members - and returns a link to the detail URL"""

        title = rest_util.parse_string(request, 'title', required=False)
        description = rest_util.parse_string(request,
                                             'description',
                                             required=False)
        definition = rest_util.parse_dict(request, 'definition', required=True)

        template = rest_util.parse_dict(request,
                                        'data_template',
                                        required=False)
        dry_run = rest_util.parse_bool(request, 'dry_run', default_value=False)

        # file filters
        data_started = rest_util.parse_timestamp(request,
                                                 'data_started',
                                                 required=False)
        data_ended = rest_util.parse_timestamp(request,
                                               'data_ended',
                                               required=False)
        rest_util.check_time_range(data_started, data_ended)

        source_started = rest_util.parse_timestamp(request,
                                                   'source_started',
                                                   required=False)
        source_ended = rest_util.parse_timestamp(request,
                                                 'source_ended',
                                                 required=False)
        rest_util.check_time_range(source_started, source_ended)

        source_sensor_classes = rest_util.parse_string_list(
            request, 'source_sensor_class', required=False)
        source_sensors = rest_util.parse_string_list(request,
                                                     'source_sensor',
                                                     required=False)
        source_collections = rest_util.parse_string_list(request,
                                                         'source_collection',
                                                         required=False)
        source_tasks = rest_util.parse_string_list(request,
                                                   'source_task',
                                                   required=False)

        mod_started = rest_util.parse_timestamp(request,
                                                'modified_started',
                                                required=False)
        mod_ended = rest_util.parse_timestamp(request,
                                              'modified_ended',
                                              required=False)
        rest_util.check_time_range(mod_started, mod_ended)

        job_type_ids = rest_util.parse_int_list(request,
                                                'job_type_id',
                                                required=False)
        job_type_names = rest_util.parse_string_list(request,
                                                     'job_type_name',
                                                     required=False)
        job_ids = rest_util.parse_int_list(request, 'job_id', required=False)
        file_names = rest_util.parse_string_list(request,
                                                 'file_name',
                                                 required=False)
        job_outputs = rest_util.parse_string_list(request,
                                                  'job_output',
                                                  required=False)
        recipe_ids = rest_util.parse_int_list(request,
                                              'recipe_id',
                                              required=False)
        recipe_type_ids = rest_util.parse_int_list(request,
                                                   'recipe_type_id',
                                                   required=False)
        recipe_nodes = rest_util.parse_string_list(request,
                                                   'recipe_node',
                                                   required=False)
        batch_ids = rest_util.parse_int_list(request,
                                             'batch_id',
                                             required=False)

        order = rest_util.parse_string_list(request, 'order', required=False)

        data = rest_util.parse_dict_list(request, 'data', required=False)
        data_list = []

        # validate the definition & create the dataset
        try:
            dataset_def = DataSetDefinitionV6(
                definition=definition, do_validate=True).get_definition()
        except InvalidDataSetDefinition as ex:
            message = 'DataSet definition is invalid'
            logger.exception(message)
            raise BadParameter('%s: %s' % (message, unicode(ex)))

        try:
            dataset = DataSet.objects.create_dataset_v6(
                dataset_def, title=title, description=description)
        except Exception as ex:
            message = 'Unable to create new dataset'
            logger.exception(message)
            raise BadParameter('%s: %s' % (message, unicode(ex)))

        try:
            dataset = DataSet.objects.get_details_v6(dataset.id)
        except DataSet.DoesNotExist:
            raise Http404

        if not data and not template:
            url = reverse('dataset_details_view',
                          args=[dataset.id],
                          request=request)
            serializer = DataSetDetailsSerializerV6(dataset)

            return Response(serializer.data,
                            status=status.HTTP_201_CREATED,
                            headers=dict(location=url))

        # Try and find the data
        if data:
            try:
                for d in data:
                    data = DataV6(data=d, do_validate=True).get_data()
                    data_list.append(data)
            except InvalidData as ex:
                message = 'Data is invalid'
                logger.exception(message)
                raise BadParameter('%s: %s' % (message, unicode(ex)))
        elif template:
            try:
                data_list = DataSetMember.objects.build_data_list(
                    template=template,
                    data_started=data_started,
                    data_ended=data_ended,
                    source_started=source_started,
                    source_ended=source_ended,
                    source_sensor_classes=source_sensor_classes,
                    source_sensors=source_sensors,
                    source_collections=source_collections,
                    source_tasks=source_tasks,
                    mod_started=mod_started,
                    mod_ended=mod_ended,
                    job_type_ids=job_type_ids,
                    job_type_names=job_type_names,
                    job_ids=job_ids,
                    file_names=file_names,
                    job_outputs=job_outputs,
                    recipe_ids=recipe_ids,
                    recipe_type_ids=recipe_type_ids,
                    recipe_nodes=recipe_nodes,
                    batch_ids=batch_ids,
                    order=order)
            except InvalidData as ex:
                message = 'Data is invalid'
                logger.exception(message)
                raise BadParameter('%s: %s' % (message, unicode(ex)))

        if not data_list:
            resp_dict = {
                'No Results':
                'No files found from filters and/or no data provided'
            }
            return Response(resp_dict)

        validation = DataSetMember.objects.validate_data_list(
            dataset_def=dataset_def, data_list=data_list)
        members = []
        if validation.is_valid and not dry_run:

            members = DataSetMember.objects.create_dataset_members(
                dataset=dataset, data_list=data_list)
            dataset = DataSet.objects.get(id=dataset.id)

            serializer = DataSetDetailsSerializerV6(dataset)
            url = reverse('dataset_details_view',
                          args=[dataset.id],
                          request=request)
            return Response(serializer.data,
                            status=status.HTTP_201_CREATED,
                            headers=dict(location=url))
        elif not validation.is_valid:
            raise BadParameter('%s: %s' %
                               ('Error(s) validating data against dataset',
                                [e.to_dict() for e in validation.errors]))

        resp_dict = []
        for dl in data_list:
            resp_dict.append(convert_data_to_v6_json(dl).get_dict())
        return Response(resp_dict)
예제 #7
0
    def test_init_validation(self):
        """Tests the validation done in __init__"""

        # Try minimal acceptable configuration
        DataSetDefinitionV6(do_validate=True)

        # Invalid version
        with self.assertRaises(InvalidDataSetDefinition) as context:
            definition = {'version': 'BAD'}
            DataSetDefinitionV6(definition=definition, do_validate=True)
        self.assertEqual(context.exception.error.name, 'INVALID_VERSION')

        # Valid v6 dataset
        definition = copy.deepcopy(dataset_test_utils.DATASET_DEFINITION)
        definition1 = DataSetDefinitionV6(definition=definition,
                                          do_validate=True).get_definition()
        self.assertItemsEqual(
            definition1.global_data.values['input_a'].file_ids, [1234])
        self.assertItemsEqual(
            definition1.global_data.values['input_b'].file_ids, [1235, 1236])
        self.assertEqual(definition1.global_data.values['input_c'].value, 999)
        self.assertDictEqual(definition1.global_data.values['input_d'].value,
                             {'greeting': 'hello'})

        #duplicate parameter
        param = {
            'version':
            '6',
            'files': [{
                'name': 'input_a'
            }, {
                'name': 'input_f',
                'media_types': ['application/json'],
                'required': False,
                'multiple': True
            }],
            'json': [{
                'name': 'input_g',
                'type': 'integer'
            }, {
                'name': 'input_h',
                'type': 'object',
                'required': False
            }]
        }

        definition['parameters'] = param

        with self.assertRaises(InvalidDataSetDefinition) as context:
            dataset2 = DataSetDefinitionV6(definition=definition,
                                           do_validate=True).get_definition()
            self.assertEqual(context.exception.error.name,
                             'INVALID_DATASET_DEFINITION')

        # Global param/data mismatch
        definition = copy.deepcopy(dataset_test_utils.DATASET_DEFINITION)
        del definition['global_data']['files']['input_a']
        with self.assertRaises(InvalidDataSetDefinition) as context:
            dataset3 = DataSetDefinitionV6(definition=definition,
                                           do_validate=True).get_definition()
        self.assertEqual(context.exception.error.name, 'INVALID_GLOBAL_DATA')