Example #1
0
    def validate(self, parameter):
        """See :meth:`data.data.value.DataValue.validate`
        """

        warnings = super(FileValue, self).validate(parameter)

        if len(self.file_ids) == 0:
            raise InvalidData('NO_FILES', 'Parameter \'%s\' cannot accept zero files' % parameter.name)

        if len(self.file_ids) > 1 and not parameter.multiple:
            raise InvalidData('MULTIPLE_FILES', 'Parameter \'%s\' cannot accept multiple files' % parameter.name)

        return warnings
Example #2
0
    def __init__(self, data=None, do_validate=False):
        """Creates a v6 data JSON object from the given dictionary

        :param data: The data JSON dict
        :type data: dict
        :param do_validate: Whether to perform validation on the JSON schema
        :type do_validate: bool

        :raises :class:`data.data.exceptions.InvalidData`: If the given data is invalid
        """

        if not data:
            data = {}
        self._data = data

        if 'version' not in self._data:
            self._data['version'] = SCHEMA_VERSION

        if self._data['version'] not in SCHEMA_VERSIONS:
            self._convert_from_v1()

        self._populate_default_values()

        try:
            if do_validate:
                validate(self._data, DATA_SCHEMA)
        except ValidationError as ex:
            raise InvalidData('INVALID_DATA', 'Invalid data: %s' % unicode(ex))
Example #3
0
    def validate(self, interface):
        """Validates this data against the given interface. Extra data values that cannot be passed to the interface
        will be removed.

        :param interface: The interface to which this data is being passed
        :type interface: :class:`data.interface.interface.Interface`
        :returns: A list of warnings discovered during validation
        :rtype: list

        :raises :class:`data.data.exceptions.InvalidData`: If the data is invalid
        """

        warnings = []

        # Remove extra data values
        for data_value in self.values.values():
            if data_value.name not in interface.parameters:
                del self.values[data_value.name]

        # Check the data value being passed to each parameter
        for parameter in interface.parameters.values():
            if parameter.name in self.values:
                data_value = self.values[parameter.name]
                warnings.extend(data_value.validate(parameter))
            elif parameter.required:
                raise InvalidData(
                    'PARAM_REQUIRED',
                    'Parameter \'%s\' is required' % parameter.name)

        return warnings
Example #4
0
    def test_validate(self):
        """Tests calling Data.validate()"""

        interface = Interface()
        data = Data()

        interface.add_parameter(FileParameter('input_1', ['application/json']))
        interface.add_parameter(JsonParameter('input_2', 'integer'))
        data.add_value(FileValue('input_1', [123]))
        data.add_value(JsonValue('input_2', 100))
        data.add_value(JsonValue('extra_input_1', 'hello'))
        data.add_value(JsonValue('extra_input_2', 'there'))

        # Valid data
        data.validate(interface)
        # Ensure extra data values are removed
        self.assertSetEqual(set(data.values.keys()), {'input_1', 'input_2'})

        # Data is missing required input 3
        interface.add_parameter(FileParameter('input_3', ['image/gif'], required=True))
        with self.assertRaises(InvalidData) as context:
            data.validate(interface)
        self.assertEqual(context.exception.error.name, 'PARAM_REQUIRED')

        data.add_value(FileValue('input_3', [999]))  # Input 3 taken care of now

        # Invalid data
        interface.add_parameter(JsonParameter('input_4', 'string'))
        mock_value = MagicMock()
        mock_value.name = 'input_4'
        mock_value.validate.side_effect = InvalidData('MOCK', '')
        data.add_value(mock_value)
        with self.assertRaises(InvalidData) as context:
            data.validate(interface)
        self.assertEqual(context.exception.error.name, 'MOCK')
Example #5
0
    def __init__(self, data=None):
        """Creates a recipe data object from the given dictionary

        :param data: The data JSON dict
        :type data: dict

        :raises :class:`data.data.exceptions.InvalidData`: If the data is invalid
        """

        if not data:
            data = {}
        self.data_dict = data
        param_names = set()

        if 'version' not in self.data_dict:
            self.data_dict['version'] = DEFAULT_VERSION
        if not self.data_dict['version'] == '1.0':
            msg = 'Invalid data: %s is an unsupported version number'
            raise InvalidData('INVALID_VERSION',
                              msg % self.data_dict['version'])

        # Be able to handle v1 job results (convert it into v1 job data)
        if 'output_data' in self.data_dict and 'input_data' not in self.data_dict:
            the_data_dict = self.data_dict['output_data']
            self.data_dict['input_data'] = the_data_dict
            del self.data_dict['output_data']

        if 'input_data' not in self.data_dict:
            self.data_dict['input_data'] = []
        for data_input in self.data_dict['input_data']:
            if not 'name' in data_input:
                raise InvalidData(
                    'INVALID_DATA',
                    'Invalid data: Every data input must have a "name" field')
            name = data_input['name']
            if name in param_names:
                raise InvalidData(
                    'INVALID_DATA',
                    'Invalid data: %s cannot be defined more than once' % name)
            else:
                param_names.add(name)
Example #6
0
    def add_value(self, value):
        """Adds the data value

        :param value: The data value to add
        :type value: :class:`data.data.value.DataValue`

        :raises :class:`data.data.exceptions.InvalidData`: If the value is a duplicate
        """

        if value.name in self.values:
            raise InvalidData('DUPLICATE_VALUE',
                              'Duplicate value \'%s\'' % value.name)

        self.values[value.name] = value
Example #7
0
    def validate(self, parameter):
        """Validates this data value against its parameter

        :param parameter: The parameter to which this data is being passed
        :type parameter: :class:`data.interface.parameter.Parameter`
        :returns: A list of warnings discovered during validation
        :rtype: :func:`list`

        :raises :class:`data.data.exceptions.InvalidData`: If the data is invalid
        """

        if self.param_type != parameter.param_type:
            msg = 'Parameter \'%s\' of type \'%s\' cannot accept data of type \'%s\''
            msg = msg % (parameter.name, parameter.param_type, self.param_type)
            raise InvalidData('MISMATCHED_PARAM_TYPE', msg)

        return []
Example #8
0
    def add_value_from_output_data(self, input_name, output_name, output_data):
        """Adds an output value from the given output data to this data with the given input name. This is used to pass
        output data from a recipe node to the input data of another recipe node.

        :param input_name: The name of the input value to add
        :type input_name: string
        :param output_name: The name of the output value in the output data
        :type output_name: string
        :param output_data: The output data
        :type output_data: :class:`data.data.data.Data`

        :raises :class:`data.data.exceptions.InvalidData`: If the value is a duplicate or the key does not exist in the output data
        """

        try:
            new_value = output_data.values[output_name].copy()
        except KeyError:
            raise InvalidData(
                'MISSING_VALUE',
                'Output name \'%s\' is not present in output data' %
                output_name)
        new_value.name = input_name
        self.add_value(new_value)
Example #9
0
    def validate(self, parameter):
        """See :meth:`data.data.value.DataValue.validate`
        """

        warnings = super(JsonValue, self).validate(parameter)

        if parameter.json_type == 'array' and not isinstance(self.value, list):
            raise InvalidData('INVALID_JSON_TYPE', 'Parameter \'%s\' must receive an array' % parameter.name)
        elif parameter.json_type == 'boolean' and not isinstance(self.value, bool):
            raise InvalidData('INVALID_JSON_TYPE', 'Parameter \'%s\' must receive a boolean' % parameter.name)
        elif parameter.json_type == 'integer' and not isinstance(self.value, (int, long)):
            raise InvalidData('INVALID_JSON_TYPE', 'Parameter \'%s\' must receive an integer' % parameter.name)
        elif parameter.json_type == 'number' and not isinstance(self.value, Number):
            raise InvalidData('INVALID_JSON_TYPE', 'Parameter \'%s\' must receive a number' % parameter.name)
        elif parameter.json_type == 'object' and not isinstance(self.value, dict):
            raise InvalidData('INVALID_JSON_TYPE', 'Parameter \'%s\' must receive a JSON object' % parameter.name)
        elif parameter.json_type == 'string' and not isinstance(self.value, basestring):
            raise InvalidData('INVALID_JSON_TYPE', 'Parameter \'%s\' must receive a string' % parameter.name)

        return warnings
Example #10
0
    def build_data_list(self,
                        template,
                        data_started=None,
                        data_ended=None,
                        created_started=None,
                        created_ended=None,
                        source_started=None,
                        source_ended=None,
                        source_sensor_classes=None,
                        source_sensors=None,
                        source_collections=None,
                        source_tasks=None,
                        mod_started=None,
                        mod_ended=None,
                        job_type_ids=None,
                        job_type_names=None,
                        job_ids=None,
                        is_published=None,
                        is_superseded=None,
                        file_names=None,
                        file_name_search=None,
                        job_outputs=None,
                        recipe_ids=None,
                        recipe_type_ids=None,
                        recipe_nodes=None,
                        batch_ids=None,
                        order=None,
                        file_type=None,
                        media_type=None):
        """Builds a list of data dictionaries from a template and file filters

        :param template: The template to fill with files found through filters
        :type template: dict
        :param data_started: Query files where data started after this time.
        :type data_started: :class:`datetime.datetime`
        :param data_ended: Query files where data ended before this time.
        :type data_ended: :class:`datetime.datetime`
        :param created_started: Query files created after this time.
        :type created_started: :class:`datetime.datetime`
        :param created_ended: Query files created before this time.
        :type created_ended: :class:`datetime.datetime`
        :param source_started: Query files where source collection started after this time.
        :type source_started: :class:`datetime.datetime`
        :param source_ended: Query files where source collection ended before this time.
        :type source_ended: :class:`datetime.datetime`
        :param source_sensor_classes: Query files with the given source sensor class.
        :type source_sensor_classes: :func:`list`
        :param source_sensor: Query files with the given source sensor.
        :type source_sensor: :func:`list`
        :param source_collection: Query files with the given source class.
        :type source_collection: :func:`list`
        :param source_tasks: Query files with the given source tasks.
        :type source_tasks: :func:`list`
        :param mod_started: Query files where the last modified date is after this time.
        :type mod_started: :class:`datetime.datetime`
        :param mod_ended: Query files where the last modified date is before this time.
        :type mod_ended: :class:`datetime.datetime`
        :param job_type_ids: Query files with jobs with the given type identifier.
        :type job_type_ids: :func:`list`
        :param job_type_names: Query files with jobs with the given type name.
        :type job_type_names: :func:`list`
        :keyword job_ids: Query files with a given job id
        :type job_ids: :func:`list`
        :param is_published: Query files flagged as currently exposed for publication.
        :type is_published: bool
        :param is_superseded: Query files that have/have not been superseded.
        :type is_superseded: bool
        :param file_names: Query files with the given file names.
        :type file_names: :func:`list`
        :param file_name_search: Query files with the given string in their file name.
        :type file_name_search: : string
        :keyword job_outputs: Query files with the given job outputs
        :type job_outputs: :func:`list`
        :keyword recipe_ids: Query files with a given recipe id
        :type recipe_ids: :func:`list`
        :keyword recipe_nodes: Query files with a given recipe nodes
        :type recipe_nodes: :func:`list`
        :keyword recipe_type_ids: Query files with the given recipe types
        :type recipe_type_ids: :func:`list`
        :keyword batch_ids: Query files with batches with the given identifiers.
        :type batch_ids: :func:`list`
        :param order: A list of fields to control the sort order.
        :type order: :func:`list`
        :keyword file_type: Query files with a given file_type
        :type recipe_ids: :func:`list`
        """

        files = ScaleFile.objects.filter_files(
            data_started=data_started,
            data_ended=data_ended,
            created_started=created_started,
            created_ended=created_ended,
            source_started=source_started,
            source_ended=source_ended,
            source_sensor_classes=source_sensor_classes,
            source_sensors=source_sensors,
            source_collections=source_collections,
            source_tasks=source_tasks,
            mod_started=mod_started,
            mod_ended=mod_ended,
            job_type_ids=job_type_ids,
            job_type_names=job_type_names,
            job_ids=job_ids,
            file_names=file_names,
            file_name_search=file_name_search,
            job_outputs=job_outputs,
            recipe_ids=recipe_ids,
            recipe_type_ids=recipe_type_ids,
            recipe_nodes=recipe_nodes,
            batch_ids=batch_ids,
            order=order,
            file_type=file_type,
            media_type=media_type)

        data_list = []
        try:
            for f in files:
                entry = copy.deepcopy(template)
                file_params = entry['files']
                for p in file_params:
                    if file_params[p] == 'FILE_VALUE':
                        file_params[p] = [f.id]
                data_list.append(
                    DataV6(data=entry, do_validate=True).get_data())
        except (KeyError, TypeError) as ex:
            raise InvalidData('INVALID_TEMPLATE',
                              "Specified template is invalid: %s" % ex)

        return data_list