def inner(values): if len(values) != 1: raise RESTValidationError( errors=[FieldError(label, 'Multiple values specified.')]) term_value = choices.get(values[0]) if not term_value: raise RESTValidationError( errors=[FieldError( label, 'Allowed values: [{}]'.format(', '.join(choices)))]) return Q('term', **{field: term_value})
def create_b2safe_file(external_pids, bucket): """Create a FileInstance which contains a PID in its uri.""" validate_schema( external_pids, { 'type': 'array', 'items': { 'type': 'object', 'properties': { 'ePIC_PID': { 'type': 'string' }, 'key': { 'type': 'string' } }, 'additionalProperties': False, 'required': ['ePIC_PID', 'key'] } }) keys_list = [e['key'] for e in external_pids] keys_set = set(keys_list) if len(keys_list) != len(keys_set): raise InvalidDepositError([ FieldError('external_pids', 'Field external_pids contains duplicate keys.') ]) for external_pid in external_pids: if not external_pid['ePIC_PID'].startswith('http://hdl.handle.net/'): external_pid['ePIC_PID'] = 'http://hdl.handle.net/' + \ external_pid['ePIC_PID'] if external_pid['key'].startswith('/'): raise InvalidDepositError([ FieldError('external_pids', 'File key cannot start with a "/".') ]) try: # Create the file instance if it does not already exist file_instance = FileInstance.get_by_uri(external_pid['ePIC_PID']) if file_instance is None: file_instance = FileInstance.create() file_instance.set_uri(external_pid['ePIC_PID'], 1, 0, storage_class='B') assert file_instance.storage_class == 'B' # Add the file to the bucket if it is not already in it current_version = ObjectVersion.get(bucket, external_pid['key']) if not current_version or \ current_version.file_id != file_instance.id: ObjectVersion.create(bucket, external_pid['key'], file_instance.id) except IntegrityError as e: raise InvalidDepositError( [FieldError('external_pids', 'File URI already exists.')])
def inner(values): terms = current_custom_metadata.terms available_terms = current_custom_metadata.available_vocabulary_set must_conditions = [] for value in values: # Matches this: # [vocabulary:term]:value parsed = re.match(r'\[([-\w]+\:[-\w]+)\]\:(.+)', value) if not parsed: raise RESTValidationError( errors=[FieldError( field, 'The parameter should have the format: ' 'custom=[field_name]:field_value.')]) search_key, search_value = parsed.groups() if search_key not in available_terms: raise RESTValidationError( errors=[FieldError( field, 'The "{}" term is not supported.' .format(search_key))]) # TODO: check if the search value has the correct type # for now we have only 'keyword' and 'text' # TODO: move this to a central place # get the elasticsearch custom field name custom_fields_mapping = dict( keyword='custom_keywords', text='custom_text' ) custom_type = terms[search_key]['term_type'] es_field = custom_fields_mapping[custom_type] must_conditions.append({ 'nested': { 'path': es_field, # 'score_mode': 'avg', 'query': { 'bool': {'must': [ {'match': {es_field + '.key': search_key}}, {'match': {es_field + '.value': search_value}} # TODO: in the future also filter ".community" ]} } } }) return Q('bool', must=must_conditions)
def validate(self, **kwargs): """Validate data using schema with ``JSONResolver``.""" if '$schema' in self and self['$schema']: try: schema = self['$schema'] if not isinstance(schema, dict): schema = {'$ref': schema} resolver = current_app.extensions[ 'invenio-records'].ref_resolver_cls.from_schema(schema) validator = DepositValidator(schema, resolver=resolver) result = {} result['errors'] = [ FieldError(list(error.path), str(error.message)) for error in validator.iter_errors(self) ] if result['errors']: raise DepositValidationError(None, errors=result['errors']) except RefResolutionError: raise DepositValidationError('Schema {} not found.'.format( self['$schema'])) else: raise DepositValidationError('You need to provide a valid schema.')
def search_factory(self, search, query_parser=None): """Parse query using elasticsearch DSL query. :param self: REST view. :param search: Elastic search DSL search instance. :returns: Tuple with search instance and URL arguments. """ from invenio_records_rest.facets import default_facets_factory from invenio_records_rest.sorter import default_sorter_factory search_index = search._index[0] # TODO: make "scheme" optional? for field in ('id', 'scheme', 'relation'): if field not in request.values: raise RESTValidationError( errors=[FieldError(field, 'Required field.')]) search, urlkwargs = default_facets_factory(search, search_index) search, sortkwargs = default_sorter_factory(search, search_index) for key, value in sortkwargs.items(): urlkwargs.add(key, value) # Apply 'identity' grouping by default if 'groupBy' not in request.values: search = search.filter(Q('term', Grouping='identity')) urlkwargs['groupBy'] = 'identity' # Exclude the identifiers by which the search was made (large aggregate) search = search.source(exclude=['*.SearchIdentifier']) return search, urlkwargs
def inner(values): if len(values) != 1 or values[0].count('--') != 1 or values[0] == '--': raise RESTValidationError( errors=[FieldError(field, 'Invalid range format.')]) range_ends = values[0].split('--') range_args = dict() ineq_opers = [{'strict': 'gt', 'nonstrict': 'gte'}, {'strict': 'lt', 'nonstrict': 'lte'}] date_maths = [start_date_math, end_date_math] # Add the proper values to the dict for (range_end, strict, opers, date_math) in zip(range_ends, ['>', '<'], ineq_opers, date_maths): if range_end != '': # If first char is '>' for start or '<' for end if range_end[0] == strict: dict_key = opers['strict'] range_end = range_end[1:] else: dict_key = opers['nonstrict'] if date_math: range_end = '{0}||{1}'.format(range_end, date_math) range_args[dict_key] = range_end args = kwargs.copy() args.update(range_args) return Range(**{field: args})
def __init__(self, community_ids): """Initialize the error with community IDs.""" msg = _('Provided community does not exist: ') self.errors = [ FieldError('metadata.communities', msg + c_id) for c_id in community_ids ]
def validate(self, **kwargs): """Validate data using schema with ``JSONResolver``.""" # def _concat_deque(queue): # """Helper for joining dequeue object.""" # result = '' # for i in queue: # if isinstance(i, int): # result += '[' + str(i) + ']' # else: # result += '/' + i # return result result = {} try: schema = self['$schema'] if not isinstance(schema, dict): schema = {'$ref': schema} resolver = current_app.extensions[ 'invenio-records'].ref_resolver_cls.from_schema(schema) result['errors'] = [ FieldError(list(error.path), str(error.message)) for error in Draft4Validator(schema, resolver=resolver).iter_errors(self) ] if result['errors']: raise DepositValidationError(None, errors=result['errors']) except RefResolutionError: raise DepositValidationError('Schema with given url not found.') except KeyError: raise DepositValidationError('Schema field is required.')
def json_patch_loader(user=None): data = request.get_json(force=True) if data is None: abort(400) modified_fields = { cmd['path'] for cmd in data if 'path' in cmd and 'op' in cmd and cmd['op'] != 'test' } errors = [ FieldError(field, 'Unknown field {}.'.format(field)) for field in _fields_with_profile.intersection(modified_fields) ] if len(errors) > 0: raise RESTValidationError(errors=errors) original = { 'email': user.email, 'active': user.active, 'password': None } # if invenio-userprofiles is loaded add profile's fields if 'full_name' in fields: original.update({ 'full_name': user.profile.full_name, 'username': user.profile.username }) try: patched = apply_patch(original, data) except (JsonPatchException, JsonPointerException): raise PatchJSONFailureRESTError() if patched['password'] is None: del patched['password'] if 'full_name' in fields: _fix_profile(patched) return patched
def default_role_json_patch_loader(role=None): """Create JSON PATCH data loaders for role modifications. :param role: the modified role. :returns: a JSON corresponding to the patched role. """ data = request.get_json(force=True) if data is None: abort(400) modified_fields = { cmd['path'][1:] for cmd in data if 'path' in cmd and 'op' in cmd and cmd['op'] != 'test' } errors = [ FieldError(field, 'Unknown or immutable field {}.'.format(field)) for field in modified_fields.difference(_role_fields) ] if len(errors) > 0: raise RESTValidationError(errors=errors) original = {'name': role.name, 'description': role.description} try: patched = apply_patch(original, data) except (JsonPatchException, JsonPointerException): raise PatchJSONFailureRESTError() return patched
def __init__(self, community_ids, *args, **kwargs): """Initialize the error with community IDs.""" msg = _('Provided community does not exist: ') self.errors = [ FieldError('metadata.communities', msg + c_id) for c_id in community_ids ] super(MissingCommunityError, self).__init__(*args, **kwargs)
class VersioningFilesError(RESTValidationError): """Error when new version's files exist in one of the old versions.""" errors = [ FieldError(None, _( "New version's files must differ from all previous versions."), code=10) ]
def handle_validation_error(err): field = '/'.join([str(x) for x in err.path]) if err.validator == 'required' or err.validator == 'additionalProperties': try: field = err.message.split('\'')[1] except IndexError: pass # ignore return InvalidRecordError(errors=[FieldError(field, err.message)])
def json_loader(**kwargs): """Default data loader when Invenio Userprofiles is not installed.""" data = request.get_json(force=True) for key in data: if key not in allowed_fields: raise RESTValidationError( errors=[FieldError(key, 'Unknown field {}'.format(key))]) return data
def handle_error(self, error, *args, **kwargs): """Handle errors during parsing.""" if isinstance(error, ValidationError): _errors = [] for field, messages in error.messages.items(): _errors.extend([FieldError(field, msg) for msg in messages]) raise RESTValidationError(errors=_errors) super(FlaskParser, self).handle_error(error, *args, **kwargs)
def __init__(self, errors=None, **kwargs): """Initialize exception.""" _errors = [] if errors: for field, messages in errors.items(): _errors.extend([FieldError(field, msg) for msg in messages]) super(SearchPaginationRESTError, self).__init__(errors=_errors, **kwargs)
def inner(values): if len(values) != 1: raise RESTValidationError( errors=[FieldError(name, 'Only one parameter is allowed.')]) values = [value.strip() for value in values[0].split(',')] if len(values) != 4: raise RESTValidationError(errors=[ FieldError( name, 'Invalid bounds: four comma-separated numbers required. ' 'Example: 143.37158,-38.99357,146.90918,-37.35269') ]) try: bottom_left_lon = Decimal(values[0]) bottom_left_lat = Decimal(values[1]) top_right_lon = Decimal(values[2]) top_right_lat = Decimal(values[3]) except InvalidOperation: raise RESTValidationError( errors=[FieldError(name, 'Invalid number in bounds.')]) try: if not (-90 <= bottom_left_lat <= 90) or \ not (-90 <= top_right_lat <= 90): raise RESTValidationError(errors=[ FieldError(name, 'Latitude must be between -90 and 90.') ]) if not (-180 <= bottom_left_lon <= 180) or \ not (-180 <= top_right_lon <= 180): raise RESTValidationError(errors=[ FieldError(name, 'Longitude must be between -180 and 180.') ]) if top_right_lat <= bottom_left_lat: raise RESTValidationError(errors=[ FieldError( name, 'Top-right latitude must be greater than ' 'bottom-left latitude.') ]) except InvalidOperation: # comparison with "NaN" raises exception raise RESTValidationError(errors=[ FieldError(name, 'Invalid number: "NaN" is not a permitted value.') ]) query = { field: { 'top_right': { 'lat': top_right_lat, 'lon': top_right_lon, }, 'bottom_left': { 'lat': bottom_left_lat, 'lon': bottom_left_lon, } } } if type: query['type'] = type return Q('geo_bounding_box', **query)
def post(self, pid, record, **kwargs): """Reject request post method.""" data = self.loader() reject_reason = data.get("reject_reason") document_pid = data.get("document_pid") if record["state"] != "PENDING": raise DocumentRequestError( "You cannot cancel a Document Request that " "is in state: {}".format(record["state"])) if not reject_reason: raise DocumentRequestError( "Missing required field: reject reason", errors=[ FieldError( field="reject_reason", message="Reject reason is required.", ) ], ) if reject_reason == "IN_CATALOG" and not document_pid: raise DocumentRequestError( "Document PID required for reject reason {}".format( reject_reason), errors=[ FieldError( field="document_pid", message="DocumentPID is required.", ) ], ) if reject_reason == "IN_CATALOG": record["document_pid"] = document_pid record["state"] = "REJECTED" record["reject_reason"] = reject_reason record.commit() db.session.commit() current_app_ils.document_request_indexer.index(record) send_document_request_mail(record, action="request_rejected") return self.make_response(pid, record, 202)
def check_record_immutable_fields(record): """Checks that the previous community and owner fields are preserved""" previous_md = record.model.json for field in ['community', '$schema']: if previous_md.get(field) != record.get(field): raise AlteredRecordError(errors=[ FieldError(field, 'The {} field cannot be changed.'.format(field)) ])
class OngoingMultipartUploadError(RESTValidationError): """Error for when no files have been provided.""" errors = [ FieldError(None, _( 'A multipart file upload is in progress. Please wait for it to ' 'finish or delete the multipart filed upload.' ), code=10) ]
def account_json_loader(**kwargs): """Accounts REST API data loader for JSON input.""" data = request.get_json(force=True) for key in data: # only "active" field is immutable if key != 'active': raise RESTValidationError( errors=[FieldError(key, 'Field {} is immutable'.format(key))]) return data
def serialize(pid, record, links_factory=None): """Serialize a single record and persistent identifier. :param pid: Persistent identifier instance. :param record: Record instance. :param links_factory: Factory function for record links. """ if record['$schema'] != Video.get_record_schema(): raise RESTValidationError( errors=[FieldError(str(record.id), 'Unsupported format')]) return VTT(record=record).format()
def check_patch_input_loader(record, immutable_paths): data = request.get_json(force=True) if data is None: abort(400) modified_fields = {cmd['path'] for cmd in data if 'path' in cmd and 'op' in cmd and cmd['op'] != 'test'} errors = [FieldError(field, 'The field "{}" is immutable.'.format(field)) for field in immutable_paths.intersection(modified_fields)] if len(errors) > 0: raise RESTValidationError(errors=errors) return data
def validate(self, **kwargs): """Validate ILS record.""" # JSON schema validation try: super().validate(**kwargs) except ValidationError as jve: path = ".".join(str(x) for x in jve.path) errors = [FieldError(path, jve.message)] raise IlsValidationError(description="Record validation error", errors=errors, original_exception=jve) # Custom record validation if self._validator: self._validator.validate(self, **kwargs)
def handle_validation_error(err): fieldpath = '/'.join([str(x) for x in err.path]) message = err.message field = None if err.validator == 'required' or err.validator == 'additionalProperties': try: field = err.message.split('\'')[1] fieldpath = (fieldpath + '/' if fieldpath else '') + field except IndexError: pass # ignore if err.validator == 'required' and len(err.path) == 1 and \ err.path[0] == 'community_specific' and is_valid_uuid(field): message = 'The "community_specific" metadata object must contain '\ 'an object named "{}" containing the '\ 'community-specific metadata fields'.format(field) return InvalidRecordError(errors=[FieldError(fieldpath, message)])
def json_patch_loader(user=None): """JSON patch loader. :param user: the modified account. :returns: a JSON corresponding to the patched account. """ data = request.get_json(force=True) if data is None: abort(400) modified_fields = { cmd['path'][1:] for cmd in data if 'path' in cmd and 'op' in cmd and cmd['op'] != 'test' } errors = [ FieldError(field, 'Unknown or immutable field {}.'.format(field)) for field in modified_fields.difference(fields) ] if len(errors) > 0: raise RESTValidationError(errors=errors) original = { 'email': user.email, 'active': user.active, 'password': None } # if invenio-userprofiles is loaded add profile's fields if 'full_name' in fields: original.update({ 'full_name': user.profile.full_name, 'username': user.profile.username }) try: patched = apply_patch(original, data) except (JsonPatchException, JsonPointerException): raise PatchJSONFailureRESTError() if patched['password'] is None: del patched['password'] if 'full_name' in fields: _fix_profile(patched) return patched
def validate(self, **kwargs): """Validate data using schema with ``JSONResolver``.""" if '$schema' in self and self['$schema']: try: schema = self['$schema'] if not isinstance(schema, dict): schema = {'$ref': schema} resolver = current_app.extensions['invenio-records']\ .ref_resolver_cls.from_schema(schema) validator = RecordValidator(schema, resolver=resolver) errors = [ FieldError(get_error_path(error), str(error.message)) for error in validator.iter_errors(self) ] if errors: raise RecordValidationError(None, errors=errors) except RefResolutionError: raise RecordValidationError( f'Schema {self["$schema"]} not found.') else: raise RecordValidationError('You need to provide a valid schema.')
def account_json_patch_loader(user=None, **kwargs): """Accounts REST API data loader for JSON Patch input.""" data = request.get_json(force=True) if data is None: abort(400) modified_fields = { cmd['path'][1:] for cmd in data if 'path' in cmd and 'op' in cmd and cmd['op'] != 'test' } errors = [ FieldError(field, 'Unknown or immutable field {}.'.format(field)) # only "active" field is immutable for field in modified_fields if field != 'active' ] if len(errors) > 0: raise RESTValidationError(errors=errors) original = {'active': user.active} try: patched = apply_patch(original, data) except (JsonPatchException, JsonPointerException): raise PatchJSONFailureRESTError() return patched
def _abort(message, field=None, status=None): if field: raise RESTValidationError([FieldError(field, message)]) raise RESTValidationError(description=message)
class MissingFilesError(RESTValidationError): """Error for when no files have been provided.""" errors = [ FieldError(None, _('Minimum one file must be provided.'), code=10) ]