def _extract_sorting(self, limit): """Extracts filters from QueryString parameters.""" specified = self.request.validated['querystring'].get('_sort', []) sorting = [] modified_field_used = self.model.modified_field in specified for field in specified: field = field.strip() m = re.match(r'^([\-+]?)([\w\.]+)$', field) if m: order, field = m.groups() if not self.is_known_field(field): error_details = { 'location': 'querystring', 'description': "Unknown sort field '{}'".format(field) } raise_invalid(self.request, **error_details) direction = -1 if order == '-' else 1 sorting.append(Sort(field, direction)) if not modified_field_used: # Add a sort by the ``modified_field`` in descending order # useful for pagination sorting.append(Sort(self.model.modified_field, -1)) return sorting
def fxa_oauth_token(request): """Return OAuth token from authorization code. """ state = request.validated['state'] code = request.validated['code'] # Require on-going session stored_redirect = request.registry.cache.get(state) # Make sure we cannot try twice with the same code request.registry.cache.delete(state) if not stored_redirect: error_msg = 'The OAuth session was not found, please re-authenticate.' return http_error(httpexceptions.HTTPRequestTimeout(), errno=ERRORS.MISSING_AUTH_TOKEN, message=error_msg) # Trade the OAuth code for a longer-lived token auth_client = OAuthClient(server_url=fxa_conf(request, 'oauth_uri'), client_id=fxa_conf(request, 'client_id'), client_secret=fxa_conf(request, 'client_secret')) try: token = auth_client.trade_code(code) except fxa_errors.OutOfProtocolError: raise httpexceptions.HTTPServiceUnavailable() except fxa_errors.InProtocolError as error: logger.error(error) error_details = { 'name': 'code', 'location': 'querystring', 'description': 'Firefox Account code validation failed.' } raise_invalid(request, **error_details) return httpexceptions.HTTPFound(location='%s%s' % (stored_redirect, token))
def delete(self): """Record ``DELETE`` endpoint: delete a record and return it. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotFound` if the record is not found. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and record modified in the iterim. """ self._raise_400_if_invalid_id(self.record_id) record = self._get_record_or_404(self.record_id) self._raise_412_if_modified(record) # Retreive the last_modified information from a querystring if present. last_modified = self.request.GET.get('last_modified') if last_modified: last_modified = native_value(last_modified.strip('"')) if not isinstance(last_modified, six.integer_types): error_details = { 'name': 'last_modified', 'location': 'querystring', 'description': 'Invalid value for %s' % last_modified } raise_invalid(self.request, **error_details) # If less or equal than current record. Ignore it. if last_modified <= record[self.model.modified_field]: last_modified = None deleted = self.model.delete_record(record, last_modified=last_modified) return self.postprocess(deleted, action=ACTIONS.DELETE)
def post_reset_password(request): user_id = request.matchdict["user_id"] parent_id = user_id try: user = request.registry.storage.get(parent_id=parent_id, resource_name="account", object_id=user_id) except storage_exceptions.ObjectNotFoundError: # Don't give information on the existence of a user id: return a generic message. return {"message": "A temporary reset password has been sent by mail"} settings = request.registry.settings user_email = user["id"] email_regexp = settings.get("account_validation.email_regexp", DEFAULT_EMAIL_REGEXP) compiled_email_regexp = re.compile(email_regexp) if not compiled_email_regexp.match(user_email): error_details = { "name": "data.id", "description": f"The user id should match {email_regexp}.", } raise_invalid(request, **error_details) reset_password = str(uuid.uuid4()) hashed_reset_password = hash_password(reset_password) cache_reset_password(hashed_reset_password, user_id, request.registry) # Send a temporary reset password by mail. Emailer(request, user).send_temporary_reset_password(reset_password) return {"message": "A temporary reset password has been sent by mail"}
def _raise_304_if_not_modified(self, record=None): """Raise 304 if current timestamp is inferior to the one specified in headers. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified` """ if_none_match = self.request.headers.get('If-None-Match') if not if_none_match: return if_none_match = decode_header(if_none_match) try: if not (if_none_match[0] == if_none_match[-1] == '"'): raise ValueError() modified_since = int(if_none_match[1:-1]) except (IndexError, ValueError): if if_none_match == '*': return error_details = { 'location': 'headers', 'description': "Invalid value for If-None-Match" } raise_invalid(self.request, **error_details) if record: current_timestamp = record[self.model.modified_field] else: current_timestamp = self.model.timestamp() if current_timestamp <= modified_since: response = HTTPNotModified() self._add_timestamp_header(response, timestamp=current_timestamp) raise response
def _extract_pagination_rules_from_token(self, limit, sorting): """Get pagination params.""" token = self.request.validated['querystring'].get('_token', None) filters = [] offset = 0 if token: error_msg = None try: tokeninfo = json.loads(decode64(token)) if not isinstance(tokeninfo, dict): raise ValueError() last_record = tokeninfo['last_record'] offset = tokeninfo['offset'] nonce = tokeninfo['nonce'] except (ValueError, KeyError, TypeError): error_msg = '_token has invalid content' # We don't want pagination tokens to be reused several times (#1171). # The cache backend is used to keep track of "nonces". if self.request.method.lower() == 'delete' and error_msg is None: registry = self.request.registry deleted = registry.cache.delete(nonce) if deleted is None: error_msg = '_token was already used or has expired.' if error_msg: error_details = { 'location': 'querystring', 'description': error_msg } raise_invalid(self.request, **error_details) filters = self._build_pagination_rules(sorting, last_record) return filters, offset
def _extract_sorting(self, limit): """Extracts filters from QueryString parameters.""" specified = self.request.GET.get('_sort', '').split(',') sorting = [] modified_field_used = self.model.modified_field in specified for field in specified: field = field.strip() m = re.match(r'^([\-+]?)(\w+)$', field) if m: order, field = m.groups() if not self.is_known_field(field): error_details = { 'location': 'querystring', 'description': "Unknown sort field '{0}'".format(field) } raise_invalid(self.request, **error_details) direction = -1 if order == '-' else 1 sorting.append(Sort(field, direction)) if not modified_field_used: # Add a sort by the ``modified_field`` in descending order # useful for pagination sorting.append(Sort(self.model.modified_field, -1)) return sorting
def resource_create_object(request, resource_cls, uri, resource_name, obj_id): """In the default bucket, the bucket and collection are implicitly created. This helper instantiate the resource and simulate a request with its RootFactory on the instantiated resource. :returns: the created object :rtype: dict """ # Fake context to instantiate a resource. context = RouteFactory(request) context.get_permission_object_id = lambda r, i: uri resource = resource_cls(request, context) # Check that provided id is valid for this resource. if not resource.model.id_generator.match(obj_id): error_details = { 'location': 'path', 'description': "Invalid %s id" % resource_name } raise_invalid(resource.request, **error_details) data = {'id': obj_id} try: obj = resource.model.create_record(data) # Since the current request is not a resource (but a straight Service), # we simulate a request on a resource. # This will be used in the resource event payload. resource.request.current_resource_name = resource_name resource.postprocess(data, action=ACTIONS.CREATE) except storage_exceptions.UnicityError as e: obj = e.record return obj
def process_record(self, new, old=None): new = super(Account, self).process_record(new, old) new['password'] = hash_password(new['password']) # Administrators can reach other accounts and anonymous have no # selected_userid. So do not try to enforce. if self.context.is_administrator or self.context.is_anonymous: return new # Do not let accounts be created without usernames. if self.model.id_field not in new: error_details = { 'name': 'data.id', 'description': 'Accounts must have an ID.', } raise_invalid(self.request, **error_details) # Otherwise, we force the id to match the authenticated username. if new[self.model.id_field] != self.request.selected_userid: error_details = { 'name': 'data.id', 'description': 'Username and account ID do not match.', } raise_invalid(self.request, **error_details) return new
def delete(self): """Record ``DELETE`` endpoint: delete a record and return it. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotFound` if the record is not found. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and record modified in the iterim. """ self._raise_400_if_invalid_id(self.record_id) record = self._get_record_or_404(self.record_id) self._raise_412_if_modified(record) # Retreive the last_modified information from a querystring if present. last_modified = self.request.GET.get('last_modified') if last_modified: last_modified = native_value(last_modified.strip('"')) if not isinstance(last_modified, six.integer_types): error_details = { 'name': 'last_modified', 'location': 'querystring', 'description': 'Invalid value for %s' % last_modified } raise_invalid(self.request, **error_details) # If less or equal than current record. Ignore it. if last_modified <= record[self.model.modified_field]: last_modified = None deleted = self.model.delete_record(record, last_modified=last_modified) return self.postprocess(deleted, action=ACTIONS.DELETE, old=record)
def process_object(self, new, old=None): new = super(Account, self).process_object(new, old) new["password"] = hash_password(new["password"]) # Administrators can reach other accounts and anonymous have no # selected_userid. So do not try to enforce. if self.context.is_administrator or self.context.is_anonymous: return new # Do not let accounts be created without usernames. if self.model.id_field not in new: error_details = { "name": "data.id", "description": "Accounts must have an ID." } raise_invalid(self.request, **error_details) # Otherwise, we force the id to match the authenticated username. if new[self.model.id_field] != self.request.selected_userid: error_details = { "name": "data.id", "description": "Username and account ID do not match.", } raise_invalid(self.request, **error_details) return new
def process_record(self, new, old=None): """Validate records against collection schema, if any.""" new = super(Record, self).process_record(new, old) schema = self._collection.get('schema') settings = self.request.registry.settings schema_validation = 'experimental_collection_schema_validation' if not schema or not asbool(settings.get(schema_validation)): return new collection_timestamp = self._collection[self.model.modified_field] try: stripped = copy.deepcopy(new) stripped.pop(self.model.id_field, None) stripped.pop(self.model.modified_field, None) stripped.pop(self.model.permissions_field, None) stripped.pop(self.schema_field, None) jsonschema.validate(stripped, schema) except jsonschema_exceptions.ValidationError as e: try: field = e.path.pop() if e.path else e.validator_value.pop() except AttributeError: field = None raise_invalid(self.request, name=field, description=e.message) new[self.schema_field] = collection_timestamp return new
def process_record(self, new, old=None): new = super(Account, self).process_record(new, old) # Store password safely in database as str # (bcrypt.hashpw returns base64 bytes). pwd_str = new["password"].encode(encoding='utf-8') hashed = bcrypt.hashpw(pwd_str, bcrypt.gensalt()) new["password"] = hashed.decode(encoding='utf-8') # Administrators can reach other accounts and anonymous have no # selected_userid. So do not try to enforce. if self.context.is_administrator or self.context.is_anonymous: return new # Do not let accounts be created without usernames. if self.model.id_field not in new: error_details = { 'name': 'data.id', 'description': 'Accounts must have an ID.', } raise_invalid(self.request, **error_details) # Otherwise, we force the id to match the authenticated username. if new[self.model.id_field] != self.request.selected_userid: error_details = { 'name': 'data.id', 'description': 'Username and account ID do not match.', } raise_invalid(self.request, **error_details) return new
def _extract_pagination_rules_from_token(self, limit, sorting): """Get pagination params.""" token = self.request.validated['querystring'].get('_token', None) filters = [] offset = 0 if token: error_msg = None try: tokeninfo = json.loads(decode64(token)) if not isinstance(tokeninfo, dict): raise ValueError() last_record = tokeninfo['last_record'] offset = tokeninfo['offset'] nonce = tokeninfo['nonce'] except (ValueError, KeyError, TypeError): error_msg = '_token has invalid content' # We don't want pagination tokens to be reused several times (#1171). # The cache backend is used to keep track of "nonces". if self.request.method.lower() == "delete" and error_msg is None: registry = self.request.registry deleted = registry.cache.delete(nonce) if deleted is None: error_msg = '_token was already used or has expired.' if error_msg: error_details = { 'location': 'querystring', 'description': error_msg } raise_invalid(self.request, **error_details) filters = self._build_pagination_rules(sorting, last_record) return filters, offset
def fxa_oauth_token(request): """Return OAuth token from authorization code. """ state = request.validated['querystring']['state'] code = request.validated['querystring']['code'] # Require on-going session stored_redirect = request.registry.cache.get(state) # Make sure we cannot try twice with the same code request.registry.cache.delete(state) if not stored_redirect: error_msg = 'The OAuth session was not found, please re-authenticate.' return http_error(httpexceptions.HTTPRequestTimeout(), errno=ERRORS.MISSING_AUTH_TOKEN, message=error_msg) # Trade the OAuth code for a longer-lived token auth_client = OAuthClient(server_url=fxa_conf(request, 'oauth_uri'), client_id=fxa_conf(request, 'client_id'), client_secret=fxa_conf(request, 'client_secret')) try: token = auth_client.trade_code(code) except fxa_errors.OutOfProtocolError: raise httpexceptions.HTTPServiceUnavailable() except fxa_errors.InProtocolError as error: logger.error(error) error_details = { 'name': 'code', 'location': 'querystring', 'description': 'Firefox Account code validation failed.' } raise_invalid(request, **error_details) return httpexceptions.HTTPFound(location='%s%s' % (stored_redirect, token))
def _raise_400_if_invalid_id(self, record_id): """Raise 400 if specified record id does not match the format excepted by storage backends. :raises: :class:`pyramid.httpexceptions.HTTPBadRequest` """ is_string = isinstance(record_id, six.string_types) if not is_string or not self.model.id_generator.match(record_id): error_details = {"location": "path", "description": "Invalid record id"} raise_invalid(self.request, **error_details)
def _raise_400_if_id_mismatch(self, new_id, record_id): """Raise 400 if the `new_id`, within the request body, does not match the `record_id`, obtained from request path. :raises: :class:`pyramid.httpexceptions.HTTPBadRequest` """ if new_id != record_id: error_msg = "Record id does not match existing record" error_details = {"name": self.model.id_field, "description": error_msg} raise_invalid(self.request, **error_details)
def _raise_400_if_id_mismatch(self, new_id, object_id): """Raise 400 if the `new_id`, within the request body, does not match the `object_id`, obtained from request path. :raises: :class:`pyramid.httpexceptions.HTTPBadRequest` """ if new_id != object_id: error_msg = "Object id does not match existing object" error_details = {"name": self.model.id_field, "description": error_msg} raise_invalid(self.request, **error_details)
def _raise_400_if_invalid_id(self, object_id): """Raise 400 if specified object id does not match the format excepted by storage backends. :raises: :class:`pyramid.httpexceptions.HTTPBadRequest` """ is_string = isinstance(object_id, str) if not is_string or not self.model.id_generator.match(object_id): error_details = {"location": "path", "description": "Invalid object id"} raise_invalid(self.request, **error_details)
def get_login(request): """Initiates to login dance for the specified scopes and callback URI using appropriate redirections.""" # Settings. provider = request.matchdict["provider"] settings_prefix = "multiauth.policy.%s." % provider issuer = request.registry.settings[settings_prefix + "issuer"] client_id = request.registry.settings[settings_prefix + "client_id"] userid_field = request.registry.settings.get(settings_prefix + "userid_field") state_ttl = int( request.registry.settings.get(settings_prefix + "state_ttl_seconds", DEFAULT_STATE_TTL_SECONDS)) state_length = int( request.registry.settings.get(settings_prefix + "state_length", DEFAULT_STATE_LENGTH)) # Read OpenID configuration (cached by issuer) oid_config = fetch_openid_config(issuer) auth_endpoint = oid_config["authorization_endpoint"] scope = request.GET["scope"] callback = request.GET["callback"] prompt = request.GET.get("prompt") # Check that email scope is requested if userid field is configured as email. if userid_field == "email" and "email" not in scope: error_details = { "name": "scope", "description": "Provider %s requires 'email' scope" % provider, } raise_invalid(request, **error_details) # Generate a random string as state. # And save it until code is traded. state = random_bytes_hex(state_length) request.registry.cache.set("openid:state:" + state, callback, ttl=state_ttl) # Redirect the client to the Identity Provider that will eventually redirect # to the OpenID token endpoint. token_uri = request.route_url("openid_token", provider=provider) + "?" params = dict(client_id=client_id, response_type="code", scope=scope, redirect_uri=token_uri, state=state) if prompt: # The 'prompt' parameter is optional. params["prompt"] = prompt redirect = "{}?{}".format(auth_endpoint, urllib.parse.urlencode(params)) raise httpexceptions.HTTPTemporaryRedirect(redirect)
def resource_create_object(request, resource_cls, uri): """Implicitly create a resource (or fail silently). In the default bucket, the bucket and collection are implicitly created. This helper creates one of those resources using a simulated request and context that is appropriate for the resource. Also runs create events as though the resource were created in a subrequest. If the resource already exists, do nothing. """ resource_name, matchdict = view_lookup(request, uri) # Build a fake request, mainly used to populate the create events that # will be triggered by the resource. fakerequest = build_request(request, { 'method': 'PUT', 'path': uri, }) fakerequest.matchdict = matchdict fakerequest.bound_data = request.bound_data fakerequest.authn_type = request.authn_type fakerequest.selected_userid = request.selected_userid fakerequest.errors = request.errors fakerequest.current_resource_name = resource_name obj_id = matchdict['id'] # Fake context, required to instantiate a resource. context = RouteFactory(fakerequest) context.resource_name = resource_name resource = resource_cls(fakerequest, context) # Check that provided id is valid for this resource. if not resource.model.id_generator.match(obj_id): error_details = { 'location': 'path', 'description': 'Invalid {} id'.format(resource_name) } raise_invalid(resource.request, **error_details) data = {'id': obj_id} try: obj = resource.model.create_record(data) except UnicityError as e: # The record already exists; skip running events return e.record # Since the current request is not a resource (but a straight Service), # we simulate a request on a resource. # This will be used in the resource event payload. resource.postprocess(obj, action=ACTIONS.CREATE) return obj
def _raise_412_if_modified(self, record=None): """Raise 412 if current timestamp is superior to the one specified in headers. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` """ if_match = self.request.headers.get('If-Match') if_none_match = self.request.headers.get('If-None-Match') if not if_match and not if_none_match: return error_details = { 'location': 'header', 'description': ("Invalid value for If-Match. The value should " "be integer between double quotes.")} try: if_match = decode_header(if_match) if if_match else None if_none_match = decode_header(if_none_match) if if_none_match else None except UnicodeDecodeError: raise_invalid(self.request, **error_details) if record and if_none_match == '*': if record.get(self.model.deleted_field, False): # Tombstones should not prevent creation. return modified_since = -1 # Always raise. elif if_match: try: if not (if_match[0] == if_match[-1] == '"'): raise ValueError() modified_since = int(if_match[1:-1]) except (IndexError, ValueError): raise_invalid(self.request, **error_details) else: # In case _raise_304_if_not_modified() did not raise. return if record: current_timestamp = record[self.model.modified_field] else: current_timestamp = self.model.timestamp() if current_timestamp > modified_since: error_msg = 'Resource was modified meanwhile' details = {'existing': record} if record else {} response = http_error(HTTPPreconditionFailed(), errno=ERRORS.MODIFIED_MEANWHILE, message=error_msg, details=details) self._add_timestamp_header(response, timestamp=current_timestamp) raise response
def _raise_400_if_invalid_id(self, record_id): """Raise 400 if specified record id does not match the format excepted by storage backends. :raises: :class:`pyramid.httpexceptions.HTTPBadRequest` """ if not self.model.id_generator.match(six.text_type(record_id)): error_details = { 'location': 'path', 'description': "Invalid record id" } raise_invalid(self.request, **error_details)
def _extract_posted_body_id(request): try: # Anonymous creation with POST. return request.json["data"]["id"] except (ValueError, KeyError): # Bad POST data. if request.method.lower() == "post": error_details = {"name": "data.id", "description": "data.id in body: Required"} raise_invalid(request, **error_details) # Anonymous GET error_msg = "Cannot read accounts." raise http_error(httpexceptions.HTTPUnauthorized(), error=error_msg)
def _raise_400_if_invalid_id(self, record_id): """Raise 400 if specified record id does not match the format excepted by storage backends. :raises: :class:`pyramid.httpexceptions.HTTPBadRequest` """ is_string = isinstance(record_id, str) if not is_string or not self.model.id_generator.match(record_id): error_details = { 'location': 'path', 'description': 'Invalid record id' } raise_invalid(self.request, **error_details)
def _raise_400_if_id_mismatch(self, new_id, record_id): """Raise 400 if the `new_id`, within the request body, does not match the `record_id`, obtained from request path. :raises: :class:`pyramid.httpexceptions.HTTPBadRequest` """ if new_id != record_id: error_msg = 'Record id does not match existing record' error_details = { 'name': self.model.id_field, 'description': error_msg } raise_invalid(self.request, **error_details)
def _raise_412_if_modified(self, record=None): """Raise 412 if current timestamp is superior to the one specified in headers. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` """ if_match = self.request.headers.get('If-Match') if_none_match = self.request.headers.get('If-None-Match') if not if_match and not if_none_match: return if_match = decode_header(if_match) if if_match else None if record and if_none_match and decode_header(if_none_match) == '*': if record.get(self.model.deleted_field, False): # Tombstones should not prevent creation. return modified_since = -1 # Always raise. elif if_match: try: if not (if_match[0] == if_match[-1] == '"'): raise ValueError() modified_since = int(if_match[1:-1]) except (IndexError, ValueError): message = ("Invalid value for If-Match. The value should " "be integer between double quotes.") error_details = { 'location': 'headers', 'description': message } raise_invalid(self.request, **error_details) else: # In case _raise_304_if_not_modified() did not raise. return if record: current_timestamp = record[self.model.modified_field] else: current_timestamp = self.model.timestamp() if current_timestamp > modified_since: error_msg = 'Resource was modified meanwhile' details = {'existing': record} if record else {} response = http_error(HTTPPreconditionFailed(), errno=ERRORS.MODIFIED_MEANWHILE, message=error_msg, details=details) self._add_timestamp_header(response, timestamp=current_timestamp) raise response
def get_login(request): """Initiates to login dance for the specified scopes and callback URI using appropriate redirections.""" # Settings. provider = request.matchdict['provider'] settings_prefix = 'multiauth.policy.%s.' % provider issuer = request.registry.settings[settings_prefix + 'issuer'] client_id = request.registry.settings[settings_prefix + 'client_id'] userid_field = request.registry.settings.get(settings_prefix + 'userid_field') state_ttl = int( request.registry.settings.get(settings_prefix + 'state_ttl_seconds', DEFAULT_STATE_TTL_SECONDS)) state_length = int( request.registry.settings.get(settings_prefix + 'state_length', DEFAULT_STATE_LENGTH)) # Read OpenID configuration (cached by issuer) oid_config = fetch_openid_config(issuer) auth_endpoint = oid_config['authorization_endpoint'] scope = request.GET['scope'] callback = request.GET['callback'] # Check that email scope is requested if userid field is configured as email. if userid_field == 'email' and 'email' not in scope: error_details = { 'name': 'scope', 'description': "Provider %s requires 'email' scope" % provider, } raise_invalid(request, **error_details) # Generate a random string as state. # And save it until code is traded. state = random_bytes_hex(state_length) request.registry.cache.set('openid:state:' + state, callback, ttl=state_ttl) # Redirect the client to the Identity Provider that will eventually redirect # to the OpenID token endpoint. token_uri = request.route_url('openid_token', provider=provider) + '?' params = dict(client_id=client_id, response_type='code', scope=scope, redirect_uri=token_uri, state=state) redirect = '{}?{}'.format(auth_endpoint, urllib.parse.urlencode(params)) raise httpexceptions.HTTPTemporaryRedirect(redirect)
def post_search(request): try: body = json.loads(request.body.decode("utf-8")) except json.decoder.JSONDecodeError: if not request.body: body = {} else: error_details = { "name": "JSONDecodeError", "description": "Please make sure your request body is a valid JSON payload.", } raise_invalid(request, **error_details) return search_view(request, **body)
def apply_changes(self, record, changes): """Merge `changes` into `record` fields. .. note:: This is used in the context of PATCH only. Override this to control field changes at record level, for example: .. code-block:: python def apply_changes(self, record, changes): # Ignore value change if inferior if record['position'] > changes.get('position', -1): changes.pop('position', None) return super(MyResource, self).apply_changes(record, changes) :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPBadRequest` if result does not comply with resource schema. :returns: the new record with `changes` applied. :rtype: dict """ for field, value in changes.items(): has_changed = record.get(field, value) != value if self.mapping.is_readonly(field) and has_changed: error_details = { 'name': field, 'description': 'Cannot modify {0}'.format(field) } raise_invalid(self.request, **error_details) updated = record.copy() # recursive patch and remove field if null attribute is passed (RFC 7396) content_type = str(self.request.headers.get('Content-Type')) if content_type == 'application/merge-patch+json': recursive_update_dict(updated, changes, ignores=[None]) else: updated.update(**changes) try: return self.mapping.deserialize(updated) except colander.Invalid as e: # Transform the errors we got from colander into Cornice errors. # We could not rely on Service schema because the record should be # validated only once the changes are applied for field, error in e.asdict().items(): raise_invalid(self.request, name=field, description=error)
def _extract_posted_body_id(request): try: # Anonymous creation with POST. return request.json['data']['id'] except (ValueError, KeyError): # Bad POST data. if request.method.lower() == 'post': error_details = { 'name': 'data.id', 'description': 'data.id in body: Required' } raise_invalid(request, **error_details) # Anonymous GET error_msg = 'Cannot read accounts.' raise http_error(httpexceptions.HTTPUnauthorized(), error=error_msg)
def get_token(request): """Trades the specified code and state against access and ID tokens. The client is redirected to the original ``callback`` URI with the result in querystring.""" # Settings. provider = request.matchdict["provider"] settings_prefix = "multiauth.policy.%s." % provider issuer = request.registry.settings[settings_prefix + "issuer"] client_id = request.registry.settings[settings_prefix + "client_id"] client_secret = request.registry.settings[settings_prefix + "client_secret"] # Read OpenID configuration (cached by issuer) oid_config = fetch_openid_config(issuer) token_endpoint = oid_config["token_endpoint"] code = request.GET["code"] state = request.GET["state"] # State can be used only once. callback = request.registry.cache.delete("openid:state:" + state) if callback is None: error_details = { "name": "state", "description": "Invalid state", "errno": ERRORS.INVALID_AUTH_TOKEN.value, } raise_invalid(request, **error_details) # Trade the code for tokens on the Identity Provider. # Google Identity requires to specify again redirect_uri. redirect_uri = request.route_url("openid_token", provider=provider) data = { "code": code, "client_id": client_id, "client_secret": client_secret, "redirect_uri": redirect_uri, "grant_type": "authorization_code", } resp = requests.post(token_endpoint, data=data) # The IdP response is forwarded to the client in the querystring/location hash. # (eg. callback=`http://localhost:3000/#tokens=`) token_info = resp.text.encode("utf-8") encoded_token = base64.b64encode(token_info) redirect = callback + urllib.parse.quote(encoded_token.decode("utf-8")) raise httpexceptions.HTTPTemporaryRedirect(redirect)
def facebook_token(request): """Return OAuth token from authorization code. """ state = request.validated['querystring']['state'] code = request.validated['querystring']['code'] # Require on-going session stored_redirect = request.registry.cache.get(state) # Make sure we cannot try twice with the same code request.registry.cache.delete(state) if not stored_redirect: error_msg = 'The Facebook Auth session was not found, please re-authenticate.' return http_error(httpexceptions.HTTPRequestTimeout(), errno=ERRORS.MISSING_AUTH_TOKEN, message=error_msg) url = facebook_conf(request, 'token_endpoint') params = { 'client_id': facebook_conf(request, 'client_id'), 'client_secret': facebook_conf(request, 'client_secret'), 'redirect_uri': request.route_url(token.name), 'code': code, } resp = requests.get(url, params=params) if resp.status_code == 400: response_body = resp.json() logger.error( "Facebook Token Validation Failed: {}".format(response_body)) error_details = { 'name': 'code', 'location': 'querystring', 'description': 'Facebook OAuth code validation failed.' } raise_invalid(request, **error_details) try: resp.raise_for_status() except requests.exceptions.HTTPError: logger.exception("Facebook Token Protocol Error") raise httpexceptions.HTTPServiceUnavailable() else: response_body = resp.json() access_token = response_body['access_token'] return httpexceptions.HTTPFound(location='%s%s' % (stored_redirect, access_token))
def _extract_limit(self): """Extract limit value from QueryString parameters.""" paginate_by = self.request.registry.settings["paginate_by"] limit = self.request.GET.get("_limit", paginate_by) if limit: try: limit = int(limit) except ValueError: error_details = {"location": "querystring", "description": "_limit should be an integer"} raise_invalid(self.request, **error_details) # If limit is higher than paginate_by setting, ignore it. if limit and paginate_by: limit = min(limit, paginate_by) return limit
def resource_create_object(request, resource_cls, uri): """In the default bucket, the bucket and collection are implicitly created. This helper instantiate the resource and simulate a request with its RootFactory on the instantiated resource. :returns: the created object :rtype: dict """ resource_name, matchdict = view_lookup(request, uri) # Build a fake request, mainly used to populate the create events that # will be triggered by the resource. fakerequest = build_request(request, { 'method': 'PUT', 'path': uri, }) fakerequest.matchdict = matchdict fakerequest.bound_data = request.bound_data fakerequest.authn_type = request.authn_type fakerequest.selected_userid = request.selected_userid fakerequest.errors = request.errors fakerequest.current_resource_name = resource_name obj_id = matchdict['id'] # Fake context, required to instantiate a resource. context = RouteFactory(fakerequest) context.resource_name = resource_name resource = resource_cls(fakerequest, context) # Check that provided id is valid for this resource. if not resource.model.id_generator.match(obj_id): error_details = { 'location': 'path', 'description': "Invalid %s id" % resource_name } raise_invalid(resource.request, **error_details) data = {'id': obj_id} try: obj = resource.model.create_record(data) # Since the current request is not a resource (but a straight Service), # we simulate a request on a resource. # This will be used in the resource event payload. resource.postprocess(data, action=ACTIONS.CREATE) except storage_exceptions.UnicityError as e: obj = e.record return obj
def get_token(request): """Trades the specified code and state against access and ID tokens. The client is redirected to the original ``callback`` URI with the result in querystring.""" # Settings. provider = request.matchdict['provider'] settings_prefix = 'multiauth.policy.%s.' % provider issuer = request.registry.settings[settings_prefix + 'issuer'] client_id = request.registry.settings[settings_prefix + 'client_id'] client_secret = request.registry.settings[settings_prefix + 'client_secret'] # Read OpenID configuration (cached by issuer) oid_config = fetch_openid_config(issuer) token_endpoint = oid_config['token_endpoint'] code = request.GET['code'] state = request.GET['state'] # State can be used only once. callback = request.registry.cache.delete('openid:state:' + state) if callback is None: error_details = { 'name': 'state', 'description': 'Invalid state', 'errno': ERRORS.INVALID_AUTH_TOKEN.value, } raise_invalid(request, **error_details) # Trade the code for tokens on the Identity Provider. # Google Identity requires to specify again redirect_uri. redirect_uri = request.route_url('openid_token', provider=provider) + '?' data = { 'code': code, 'client_id': client_id, 'client_secret': client_secret, 'redirect_uri': redirect_uri, 'grant_type': 'authorization_code', } resp = requests.post(token_endpoint, data=data) # The IdP response is forwarded to the client in the querystring/location hash. # (eg. callback=`http://localhost:3000/#tokens=`) redirect = callback + urllib.parse.quote(resp.text) raise httpexceptions.HTTPTemporaryRedirect(redirect)
def get_login(request): """Initiates to login dance for the specified scopes and callback URI using appropriate redirections.""" # Settings. provider = request.matchdict['provider'] settings_prefix = 'multiauth.policy.%s.' % provider issuer = request.registry.settings[settings_prefix + 'issuer'] client_id = request.registry.settings[settings_prefix + 'client_id'] userid_field = request.registry.settings.get(settings_prefix + 'userid_field') state_ttl = int(request.registry.settings.get(settings_prefix + 'state_ttl_seconds', DEFAULT_STATE_TTL_SECONDS)) state_length = int(request.registry.settings.get(settings_prefix + 'state_length', DEFAULT_STATE_LENGTH)) # Read OpenID configuration (cached by issuer) oid_config = fetch_openid_config(issuer) auth_endpoint = oid_config['authorization_endpoint'] scope = request.GET['scope'] callback = request.GET['callback'] prompt = request.GET.get('prompt') # Check that email scope is requested if userid field is configured as email. if userid_field == 'email' and 'email' not in scope: error_details = { 'name': 'scope', 'description': "Provider %s requires 'email' scope" % provider, } raise_invalid(request, **error_details) # Generate a random string as state. # And save it until code is traded. state = random_bytes_hex(state_length) request.registry.cache.set('openid:state:' + state, callback, ttl=state_ttl) # Redirect the client to the Identity Provider that will eventually redirect # to the OpenID token endpoint. token_uri = request.route_url('openid_token', provider=provider) + '?' params = dict(client_id=client_id, response_type='code', scope=scope, redirect_uri=token_uri, state=state) if prompt: # The 'prompt' parameter is optional. params['prompt'] = prompt redirect = '{}?{}'.format(auth_endpoint, urllib.parse.urlencode(params)) raise httpexceptions.HTTPTemporaryRedirect(redirect)
def process_object(self, new, old=None): """Validate records against collection or bucket schema, if any.""" new = super().process_object(new, old) # Is schema validation enabled? settings = self.request.registry.settings schema_validation = "experimental_collection_schema_validation" if not asbool(settings.get(schema_validation)): return new # Remove internal and auto-assigned fields from schemas and record. ignored_fields = ( self.model.modified_field, self.schema_field, self.model.permissions_field, ) # The schema defined on the collection will be validated first. if "schema" in self._collection: schema = self._collection["schema"] try: validate_schema(new, schema, ignore_fields=ignored_fields, id_field=self.model.id_field) except ValidationError as e: raise_invalid(self.request, name=e.field, description=e.message) except RefResolutionError as e: raise_invalid(self.request, name="schema", description=str(e)) # Assign the schema version to the record. schema_timestamp = self._collection[self.model.modified_field] new[self.schema_field] = schema_timestamp # Validate also from the record:schema field defined on the bucket. validate_from_bucket_schema_or_400( new, resource_name="record", request=self.request, ignore_fields=ignored_fields, id_field=self.model.id_field, ) return new
def validate_from_bucket_schema_or_400(data, resource_name, request, id_field, ignore_fields=[]): """Lookup in the parent objects if a schema was defined for this resource. If the schema validation feature is enabled, if a schema is/are defined, and if the data does not validate it/them, then it raises a 400 exception. """ settings = request.registry.settings schema_validation = "experimental_collection_schema_validation" # If disabled from settings, do nothing. if not asbool(settings.get(schema_validation)): return bucket_id = request.matchdict["bucket_id"] bucket_uri = utils.instance_uri(request, "bucket", id=bucket_id) buckets = request.bound_data.setdefault("buckets", {}) if bucket_uri not in buckets: # Unknown yet, fetch from storage. bucket = object_exists_or_404(request, resource_name="bucket", parent_id="", object_id=bucket_id) buckets[bucket_uri] = bucket # Let's see if the bucket defines a schema for this resource. metadata_field = f"{resource_name}:schema" bucket = buckets[bucket_uri] if metadata_field not in bucket: return # Validate or fail with 400. schema = bucket[metadata_field] try: validate_schema(data, schema, ignore_fields=ignore_fields, id_field=id_field) except ValidationError as e: raise_invalid(request, name=e.field, description=e.message) except RefResolutionError as e: raise_invalid(request, name="schema", description=str(e))
def _extract_limit(self): """Extract limit value from QueryString parameters.""" paginate_by = self.request.registry.settings['paginate_by'] limit = self.request.GET.get('_limit', paginate_by) if limit: try: limit = int(limit) except ValueError: error_details = { 'location': 'querystring', 'description': "_limit should be an integer" } raise_invalid(self.request, **error_details) # If limit is higher than paginate_by setting, ignore it. if limit and paginate_by: limit = min(limit, paginate_by) return limit
def apply_changes(self, record, changes): """Merge `changes` into `record` fields. .. note:: This is used in the context of PATCH only. Override this to control field changes at record level, for example: .. code-block:: python def apply_changes(self, record, changes): # Ignore value change if inferior if record['position'] > changes.get('position', -1): changes.pop('position', None) return super(MyResource, self).apply_changes(record, changes) :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPBadRequest` if result does not comply with resource schema. :returns: the new record with `changes` applied. :rtype: dict """ for field, value in changes.items(): has_changed = record.get(field, value) != value if self.mapping.is_readonly(field) and has_changed: error_details = { 'name': field, 'description': 'Cannot modify {0}'.format(field) } raise_invalid(self.request, **error_details) updated = record.copy() updated.update(**changes) try: return self.mapping.deserialize(updated) except colander.Invalid as e: # Transform the errors we got from colander into Cornice errors. # We could not rely on Service schema because the record should be # validated only once the changes are applied for field, error in e.asdict().items(): raise_invalid(self.request, name=field, description=error)
def process_record(self, new, old=None): """Validate records against collection schema, if any.""" new = super().process_record(new, old) schema = self._collection.get('schema') settings = self.request.registry.settings schema_validation = 'experimental_collection_schema_validation' if not schema or not asbool(settings.get(schema_validation)): return new # Remove internal and auto-assigned fields from schema and record. internal_fields = (self.model.id_field, self.model.modified_field, self.schema_field, self.model.permissions_field) required_fields = [f for f in schema.get('required', []) if f not in internal_fields] # jsonschema doesn't accept 'required': [] yet. # See https://github.com/Julian/jsonschema/issues/337. # In the meantime, strip out 'required' if no other fields are required. if required_fields: schema = {**schema, 'required': required_fields} else: schema = {f: v for f, v in schema.items() if f != 'required'} data = {f: v for f, v in new.items() if f not in internal_fields} # Validate or fail with 400. try: jsonschema.validate(data, schema) except jsonschema_exceptions.ValidationError as e: if e.path: field = e.path[-1] elif e.validator_value: field = e.validator_value[-1] else: field = e.schema_path[-1] raise_invalid(self.request, name=field, description=e.message) # Assign the schema version (collection object timestamp) to the record. collection_timestamp = self._collection[self.model.modified_field] new[self.schema_field] = collection_timestamp return new
def _extract_pagination_rules_from_token(self, limit, sorting): """Get pagination params.""" queryparams = self.request.GET token = queryparams.get("_token", None) filters = [] offset = 0 if token: try: tokeninfo = json.loads(decode64(token)) if not isinstance(tokeninfo, dict): raise ValueError() last_record = tokeninfo["last_record"] offset = tokeninfo["offset"] except (ValueError, KeyError, TypeError): error_msg = "_token has invalid content" error_details = {"location": "querystring", "description": error_msg} raise_invalid(self.request, **error_details) filters = self._build_pagination_rules(sorting, last_record) return filters, offset
def _extract_partial_fields(self): """Extract the fields to do the projection from QueryString parameters. """ fields = self.request.GET.get("_fields", None) if fields: fields = fields.split(",") root_fields = [f.split(".")[0] for f in fields] known_fields = self._get_known_fields() invalid_fields = set(root_fields) - set(known_fields) preserve_unknown = self.mapping.get_option("preserve_unknown") if not preserve_unknown and invalid_fields: error_msg = "Fields %s do not exist" % ",".join(invalid_fields) error_details = {"name": "Invalid _fields parameter", "description": error_msg} raise_invalid(self.request, **error_details) # Since id and last_modified are part of the synchronisation # API, force their presence in payloads. fields = fields + [self.model.id_field, self.model.modified_field] return fields
def process_record(self, new, old=None): """Validate records against collection or bucket schema, if any.""" new = super().process_record(new, old) # Is schema validation enabled? settings = self.request.registry.settings schema_validation = "experimental_collection_schema_validation" if not asbool(settings.get(schema_validation)): return new # Remove internal and auto-assigned fields from schemas and record. internal_fields = ( self.model.id_field, self.model.modified_field, self.schema_field, self.model.permissions_field, ) # The schema defined on the collection will be validated first. if "schema" in self._collection: schema = self._collection["schema"] try: validate_schema(new, schema, ignore_fields=internal_fields) except ValidationError as e: raise_invalid(self.request, name=e.field, description=e.message) except RefResolutionError as e: raise_invalid(self.request, name="schema", description=str(e)) # Assign the schema version to the record. schema_timestamp = self._collection[self.model.modified_field] new[self.schema_field] = schema_timestamp # Validate also from the record:schema field defined on the bucket. validate_from_bucket_schema_or_400( new, resource_name="record", request=self.request, ignore_fields=internal_fields ) return new
def _extract_partial_fields(self): """Extract the fields to do the projection from QueryString parameters. """ fields = self.request.validated['querystring'].get('_fields') if fields: root_fields = [f.split('.')[0] for f in fields] known_fields = self._get_known_fields() invalid_fields = set(root_fields) - set(known_fields) preserve_unknown = self.schema.get_option('preserve_unknown') if not preserve_unknown and invalid_fields: error_msg = 'Fields {} do not exist'.format(','.join(invalid_fields)) error_details = { 'name': 'Invalid _fields parameter', 'description': error_msg } raise_invalid(self.request, **error_details) # Since id and last_modified are part of the synchronisation # API, force their presence in payloads. fields = fields + [self.model.id_field, self.model.modified_field] return fields
def validate_from_bucket_schema_or_400(data, resource_name, request, ignore_fields=[]): """Lookup in the parent objects if a schema was defined for this resource. If the schema validation feature is enabled, if a schema is/are defined, and if the data does not validate it/them, then it raises a 400 exception. """ settings = request.registry.settings schema_validation = 'experimental_collection_schema_validation' # If disabled from settings, do nothing. if not asbool(settings.get(schema_validation)): return bucket_id = request.matchdict["bucket_id"] bucket_uri = utils.instance_uri(request, 'bucket', id=bucket_id) buckets = request.bound_data.setdefault('buckets', {}) if bucket_uri not in buckets: # Unknown yet, fetch from storage. bucket = object_exists_or_404(request, collection_id='bucket', parent_id='', object_id=bucket_id) buckets[bucket_uri] = bucket # Let's see if the bucket defines a schema for this resource. metadata_field = "{}:schema".format(resource_name) bucket = buckets[bucket_uri] if metadata_field not in bucket: return # Validate or fail with 400. schema = bucket[metadata_field] try: validate_schema(data, schema, ignore_fields=ignore_fields) except ValidationError as e: raise_invalid(request, name=e.field, description=e.message) except RefResolutionError as e: raise_invalid(request, name='schema', description=str(e))
def save_file(content, request, randomize=True, gzipped=False): folder_pattern = request.registry.settings.get('attachment.folder', '') folder = folder_pattern.format(**request.matchdict) or None # Read file to compute hash. if not isinstance(content, cgi.FieldStorage): error_msg = 'Filename is required.' raise_invalid(request, location='body', description=error_msg) content.file.seek(0) filecontent = content.file.read() if gzipped: original = { 'filename': content.filename, 'hash': sha256(filecontent), 'mimetype': content.type, 'size': len(filecontent), } mimetype = 'application/x-gzip' filename = content.filename + '.gz' # in-memory gzipping out = BytesIO() with gzip.GzipFile(fileobj=out, mode="w") as f: f.write(filecontent) filecontent = out.getvalue() out.seek(0) content.file = out content.filename = filename else: original = None mimetype = content.type filename = content.filename save_options = {'folder': folder, 'randomize': randomize} if gzipped: save_options['extensions'] = ['gz'] try: location = request.attachment.save(content, **save_options) except FileNotAllowed: error_msg = 'File extension is not allowed.' raise_invalid(request, location='body', description=error_msg) # File metadata. fullurl = request.attachment.url(location) size = len(filecontent) filehash = sha256(filecontent) attachment = { 'filename': filename, 'location': fullurl, 'hash': filehash, 'mimetype': mimetype, 'size': size } if original is not None: attachment['original'] = original # Store link between record and attachment (for later deletion). request.registry.storage.create("", FILE_LINKS, { 'location': location, # store relative location. 'bucket_uri': bucket_uri(request), 'collection_uri': collection_uri(request), 'record_uri': record_uri(request) }) return attachment
def patch(self): """Record ``PATCH`` endpoint: modify a record and return its new version. If a request header ``Response-Behavior`` is set to ``light``, only the fields whose value was changed are returned. If set to ``diff``, only the fields whose value became different than the one provided are returned. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotFound` if the record is not found. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and record modified in the iterim. .. seealso:: Add custom behaviour by overriding :meth:`kinto.core.resource.UserResource.apply_changes` or :meth:`kinto.core.resource.UserResource.process_record`. """ self._raise_400_if_invalid_id(self.record_id) existing = self._get_record_or_404(self.record_id) self._raise_412_if_modified(existing) try: # `data` attribute may not be present if only perms are patched. changes = self.request.json.get('data', {}) except ValueError: # If no `data` nor `permissions` is provided in patch, reject! # XXX: This should happen in schema instead (c.f. ShareableViewSet) error_details = { 'name': 'data', 'description': 'Provide at least one of data or permissions', } raise_invalid(self.request, **error_details) updated = self.apply_changes(existing, changes=changes) record_id = updated.setdefault(self.model.id_field, self.record_id) self._raise_400_if_id_mismatch(record_id, self.record_id) new_record = self.process_record(updated, old=existing) changed_fields = [k for k in changes.keys() if existing.get(k) != new_record.get(k)] # Save in storage if necessary. if changed_fields or self.force_patch_update: try: unique_fields = self.mapping.get_option('unique_fields') new_record = self.model.update_record( new_record, unique_fields=unique_fields) except storage_exceptions.UnicityError as e: self._raise_conflict(e) else: # Behave as if storage would have added `id` and `last_modified`. for extra_field in [self.model.modified_field, self.model.id_field]: new_record[extra_field] = existing[extra_field] # Adjust response according to ``Response-Behavior`` header body_behavior = self.request.headers.get('Response-Behavior', 'full') if body_behavior.lower() == 'light': # Only fields that were changed. data = {k: new_record[k] for k in changed_fields} elif body_behavior.lower() == 'diff': # Only fields that are different from those provided. data = {k: new_record[k] for k in changed_fields if changes.get(k) != new_record.get(k)} else: data = new_record timestamp = new_record.get(self.model.modified_field, existing[self.model.modified_field]) self._add_timestamp_header(self.request.response, timestamp=timestamp) return self.postprocess(data, action=ACTIONS.UPDATE, old=existing)
def _extract_filters(self): """Extracts filters from QueryString parameters.""" queryparams = self.request.validated['querystring'] filters = [] for param, value in queryparams.items(): param = param.strip() error_details = { 'name': param, 'location': 'querystring', 'description': 'Invalid value for {}'.format(param) } # Ignore specific fields if param.startswith('_') and param not in ('_since', '_to', '_before'): continue # Handle the _since specific filter. if param in ('_since', '_to', '_before'): if param == '_since': operator = COMPARISON.GT else: if param == '_to': message = ('_to is now deprecated, ' 'you should use _before instead') url = ('https://kinto.readthedocs.io/en/2.4.0/api/' 'resource.html#list-of-available-url-' 'parameters') send_alert(self.request, message, url) operator = COMPARISON.LT if value == '': raise_invalid(self.request, **error_details) filters.append( Filter(self.model.modified_field, value, operator) ) continue allKeywords = '|'.join([i.name.lower() for i in COMPARISON]) m = re.match(r'^('+allKeywords+')_([\w\.]+)$', param) if m: keyword, field = m.groups() operator = getattr(COMPARISON, keyword.upper()) else: operator, field = COMPARISON.EQ, param if not self.is_known_field(field): error_msg = "Unknown filter field '{}'".format(param) error_details['description'] = error_msg raise_invalid(self.request, **error_details) if operator in (COMPARISON.IN, COMPARISON.EXCLUDE): all_integers = all([isinstance(v, int) for v in value]) all_strings = all([isinstance(v, str) for v in value]) has_invalid_value = ( (field == self.model.id_field and not all_strings) or (field == self.model.modified_field and not all_integers) ) if has_invalid_value: raise_invalid(self.request, **error_details) if field == self.model.modified_field and value == '': raise_invalid(self.request, **error_details) filters.append(Filter(field, value, operator)) return filters
def _extract_filters(self, queryparams=None): """Extracts filters from QueryString parameters.""" if not queryparams: queryparams = self.request.GET filters = [] for param, paramvalue in queryparams.items(): param = param.strip() error_details = { 'name': param, 'location': 'querystring', 'description': 'Invalid value for %s' % param } # Ignore specific fields if param.startswith('_') and param not in ('_since', '_to', '_before'): continue # Handle the _since specific filter. if param in ('_since', '_to', '_before'): value = native_value(paramvalue.strip('"')) if not isinstance(value, six.integer_types): raise_invalid(self.request, **error_details) if param == '_since': operator = COMPARISON.GT else: if param == '_to': message = ('_to is now deprecated, ' 'you should use _before instead') url = ('https://kinto.readthedocs.io/en/2.4.0/api/' 'resource.html#list-of-available-url-' 'parameters') send_alert(self.request, message, url) operator = COMPARISON.LT filters.append( Filter(self.model.modified_field, value, operator) ) continue m = re.match(r'^(min|max|not|lt|gt|in|exclude)_(\w+)$', param) if m: keyword, field = m.groups() operator = getattr(COMPARISON, keyword.upper()) else: operator, field = COMPARISON.EQ, param if not self.is_known_field(field): error_msg = "Unknown filter field '{0}'".format(param) error_details['description'] = error_msg raise_invalid(self.request, **error_details) value = native_value(paramvalue) if operator in (COMPARISON.IN, COMPARISON.EXCLUDE): value = set([native_value(v) for v in paramvalue.split(',')]) all_integers = all([isinstance(v, six.integer_types) for v in value]) all_strings = all([isinstance(v, six.text_type) for v in value]) has_invalid_value = ( (field == self.model.id_field and not all_strings) or (field == self.model.modified_field and not all_integers) ) if has_invalid_value: raise_invalid(self.request, **error_details) filters.append(Filter(field, value, operator)) return filters