async def tus_patch(self, *args, **kwargs): await self.dm.load() to_upload = None if "CONTENT-LENGTH" in self.request.headers: # header is optional, we'll be okay with unknown lengths... to_upload = int(self.request.headers["CONTENT-LENGTH"]) if "UPLOAD-LENGTH" in self.request.headers: if self.dm.get("deferred_length"): size = int(self.request.headers["UPLOAD-LENGTH"]) await self.dm.update(size=size) if "UPLOAD-OFFSET" in self.request.headers: offset = int(self.request.headers["UPLOAD-OFFSET"]) else: raise HTTPPreconditionFailed( content={"reason": "No upload-offset header"}) ob_offset = self.dm.get("offset") if offset != ob_offset: raise HTTPConflict( content={ "reason": f"Current upload offset({offset}) does not match " f"object offset {ob_offset}" }) read_bytes = await self.file_storage_manager.append( self.dm, self._iterate_request_data(), offset) if to_upload and read_bytes != to_upload: # pragma: no cover # check length matches if provided raise HTTPPreconditionFailed( content={ "reason": "Upload size does not match what was provided" }) await self.dm.update(offset=offset + read_bytes) headers = { "Upload-Offset": str(self.dm.get_offset()), "Tus-Resumable": "1.0.0", "Access-Control-Expose-Headers": ",".join(["Upload-Offset", "Tus-Resumable", "Tus-Upload-Finished"]), } if self.dm.get("size") is not None and self.dm.get_offset( ) >= self.dm.get("size"): await self.file_storage_manager.finish(self.dm) await self.dm.finish() headers["Tus-Upload-Finished"] = "1" else: await self.dm.save() return Response(headers=headers)
async def tus_patch(self, *args, **kwargs): await self.dm.load() to_upload = None if 'CONTENT-LENGTH' in self.request.headers: # header is optional, we'll be okay with unknown lengths... to_upload = int(self.request.headers['CONTENT-LENGTH']) if 'UPLOAD-LENGTH' in self.request.headers: if self.dm.get('deferred_length'): size = int(self.request.headers['UPLOAD-LENGTH']) await self.dm.update(size=size) if 'UPLOAD-OFFSET' in self.request.headers: offset = int(self.request.headers['UPLOAD-OFFSET']) else: raise HTTPPreconditionFailed( content={'reason': 'No upload-offset header'}) ob_offset = self.dm.get('offset') if offset != ob_offset: raise HTTPConflict( content={ 'reason': f'Current upload offset({offset}) does not match ' f'object offset {ob_offset}' }) read_bytes = await self.file_storage_manager.append( self.dm, self._iterate_request_data(), offset) if to_upload and read_bytes != to_upload: # check length matches if provided raise HTTPPreconditionFailed( content={ 'reason': 'Upload size does not match what was provided' }) await self.dm.update(offset=offset + read_bytes) headers = { 'Upload-Offset': str(self.dm.get_offset()), 'Tus-Resumable': '1.0.0', 'Access-Control-Expose-Headers': ','.join(['Upload-Offset', 'Tus-Resumable', 'Tus-Upload-Finished']) } if self.dm.get('size') is not None and self.dm.get_offset( ) >= self.dm.get('size'): await self.file_storage_manager.finish(self.dm) await self.dm.finish() headers['Tus-Upload-Finished'] = '1' else: await self.dm.save() return Response(headers=headers)
def _validate_parameters(self): if "parameters" in self.__config__: data = self.request.query for parameter in self.__config__["parameters"]: if parameter[ "in"] != "query" or "schema" not in parameter or "name" not in parameter: continue name = parameter["name"] if parameter.get("required") and name not in data: raise HTTPPreconditionFailed( content={ "reason": "Query schema validation error", "message": "{} is required".format( parameter["name"]), "path": [name], "in": "query", "parameter": name, "schema": parameter["schema"], }) elif name not in data: continue try: if parameter["schema"].get("type") == "array": value = data.getall(name) if parameter["schema"].get( "items", {}).get("type") in ("number", "integer"): value = [_safe_int_or_float_cast(v) for v in value] else: value = data[name] if parameter["schema"].get("type") in ("number", "integer"): value = _safe_int_or_float_cast(value) jsonschema.validate(instance=value, schema=parameter["schema"]) except jsonschema.exceptions.ValidationError as e: raise HTTPPreconditionFailed( content={ "reason": "json schema validation error", "message": e.message, "validator": e.validator, "validator_value": e.validator_value, "path": [i for i in e.path], "schema_path": [i for i in e.schema_path], "parameter": name, "in": "query", "schema": parameter["schema"], })
def protect(self): if "last_activity" in self._data: # check for another active upload, fail if we're screwing with # someone else if (time.time() - self._data["last_activity"]) < self._timeout: if self.request.headers.get("TUS-OVERRIDE-UPLOAD", "0") != "1": raise HTTPPreconditionFailed(content={"reason": "There is already an active tusupload"})
async def __call__(self): data = await self.get_data() behaviors = data.get("@behaviors", None) for behavior in behaviors or (): try: self.context.add_behavior(behavior) except (TypeError, ComponentLookupError): return HTTPPreconditionFailed( content={"message": f"{behavior} is not a valid behavior", "behavior": behavior} ) deserializer = query_multi_adapter((self.context, self.request), IResourceDeserializeFromJson) if deserializer is None: raise ErrorResponse( "DeserializationError", "Cannot deserialize type {}".format(self.context.type_name), status=412, reason=error_reasons.DESERIALIZATION_FAILED, ) await notify(BeforeObjectModifiedEvent(self.context, payload=data)) await deserializer(data) await notify(ObjectModifiedEvent(self.context, payload=data)) return Response(status=204)
async def get_data(self) -> Union[TDict, TList]: body = await self.request.json() if not isinstance(body, list) and not isinstance(body, dict): # Technically, strings are also valid json payload... raise HTTPPreconditionFailed( content={"reason": "Invalid json payload"}) return body
async def finish(self, token: str, payload=None): data = await extract_validation_token(token) if data is None: raise HTTPUnauthorized() action = data.get("v_task") if action in app_settings["auth_validation_tasks"]: if "schema" in app_settings["auth_validation_tasks"][action]: schema = app_settings["auth_validation_tasks"][action][ "schema"] try: jsonvalidate(instance=payload, schema=schema) except ValidationError as e: raise HTTPPreconditionFailed( content={ "reason": "json schema validation error", "message": e.message, "validator": e.validator, "validator_value": e.validator_value, "path": [i for i in e.path], "schema_path": [i for i in e.schema_path], "schema": schema, }) task = resolve_dotted_name( app_settings["auth_validation_tasks"][action]["executor"]) result = await task.run(data, payload) else: logger.error(f"Invalid task {action}") raise HTTPNotImplemented() await notify(ValidationEvent(data)) return result
async def __call__(self): """ Iterate values bucket by bucket """ val = self.field.get(self.field.context) if val is None: return {"values": [], "total": 0, "cursor": None} bidx = 0 if "cursor" in self.request.query: cursor = self.request.query["cursor"] try: bidx = int(cursor) except ValueError: raise HTTPPreconditionFailed(content={ "reason": "Invalid bucket type", "cursor": cursor }) annotation = await val.get_annotation(self.context, bidx, create=False) if annotation is None: raise HTTPGone(content={ "reason": "No data found for bucket", "bidx": bidx }) cursor = bidx + 1 return { "values": annotation["items"], "total": len(val), "cursor": cursor }
async def __call__(self): data = await self.request.json() if "@type" not in data or data["@type"] not in app_settings[ "container_types"]: raise HTTPNotFound( content={ "message": "can not create this type %s" % data["@type"] }) if "id" not in data: raise HTTPPreconditionFailed(content={"message": "We need an id"}) if not data.get("title"): data["title"] = data["id"] if "description" not in data: data["description"] = "" value = await self.context.async_contains(data["id"]) if value: # Already exist raise HTTPConflict( content={"message": "Container with id already exists"}) install_addons = data.pop("@addons", None) or [] for addon in install_addons: # validate addon list if addon not in app_settings["available_addons"]: return ErrorResponse( "RequiredParam", "Property '@addons' must refer to a valid addon", status=412, reason=error_reasons.INVALID_ID, ) owner_id = get_authenticated_user_id() container = await create_container(self.context, data.pop("id"), container_type=data.pop("@type"), owner_id=owner_id, **data) task_vars.container.set(container) annotations_container = get_adapter(container, IAnnotations) task_vars.registry.set( await annotations_container.async_get(REGISTRY_DATA_KEY)) for addon in install_addons: await addons.install(container, addon) resp = { "@type": container.type_name, "id": container.id, "title": data["title"] } headers = {"Location": posixpath.join(self.request.path, container.id)} return Response(content=resp, headers=headers)
async def __call__(self): data = await self.request.json() if '@type' not in data or data['@type'] not in app_settings[ 'container_types']: raise HTTPNotFound( content={ 'message': 'can not create this type %s' % data['@type'] }) if 'id' not in data: raise HTTPPreconditionFailed(content={'message': 'We need an id'}) if not data.get('title'): data['title'] = data['id'] if 'description' not in data: data['description'] = '' value = await self.context.async_contains(data['id']) if value: # Already exist raise HTTPConflict( content={'message': 'Container with id already exists'}) install_addons = data.pop('@addons', None) or [] for addon in install_addons: # validate addon list if addon not in app_settings['available_addons']: return ErrorResponse( 'RequiredParam', "Property '@addons' must refer to a valid addon", status=412, reason=error_reasons.INVALID_ID) owner_id = get_authenticated_user_id(self.request) container = await create_container(self.context, data.pop('id'), container_type=data.pop('@type'), owner_id=owner_id, **data) self.request._container_id = container.__name__ self.request.container = container annotations_container = get_adapter(container, IAnnotations) self.request.container_settings = await annotations_container.async_get( REGISTRY_DATA_KEY) for addon in install_addons: await addons.install(container, addon) resp = { '@type': container.type_name, 'id': container.id, 'title': data['title'] } headers = {'Location': posixpath.join(self.request.path, container.id)} return Response(content=resp, headers=headers)
async def append(self, dm, iterable, offset) -> int: count = 0 async for chunk in iterable: resp = await self._append(dm, chunk, offset) size = len(chunk) count += size offset += len(chunk) if resp.status == 308: # verify we're on track with google's resumable api... range_header = resp.headers["Range"] if offset - 1 != int(range_header.split("-")[-1]): # range header is the byte range google has received, # which is different from the total size--off by one raise HTTPPreconditionFailed( content={ "reason": f"Guillotina and google cloud storage " f"offsets do not match. Google: " f"{range_header}, TUS(offset): {offset}" }) elif resp.status in [200, 201]: # file manager will double check offsets and sizes match break return count
def _validate_parameters(self): if "parameters" in self.__config__: data = self.request.url.query for parameter in self.__config__["parameters"]: if parameter["in"] == "query": if "schema" in parameter and "name" in parameter: if parameter["schema"]["type"] == "integer": try: int(data[parameter["name"]]) except ValueError: raise HTTPPreconditionFailed( content={ "reason": "Schema validation error", "message": "can not convert {} to Int".format( data[parameter["name"]]), }) elif parameter["schema"]["type"] == "float": try: float(data[parameter["name"]]) except ValueError: raise HTTPPreconditionFailed( content={ "reason": "Schema validation error", "message": "can not convert {} to Float".format( data[parameter["name"]]), }) else: pass try: if parameter.get( "required", False) and parameter["name"] not in data: raise HTTPPreconditionFailed( content={ "reason": "Schema validation error", "message": "{} is required".format( parameter["name"]), }) except KeyError: logger.warning("`required` is a mandatory field", exc_info=True)
async def upload(self): await self.dm.load() md5 = extension = size = None if "X-UPLOAD-MD5HASH" in self.request.headers: md5 = self.request.headers["X-UPLOAD-MD5HASH"] if "X-UPLOAD-EXTENSION" in self.request.headers: extension = self.request.headers["X-UPLOAD-EXTENSION"] if "X-UPLOAD-SIZE" in self.request.headers: size = int(self.request.headers["X-UPLOAD-SIZE"]) else: if "Content-Length" in self.request.headers: size = int(self.request.headers["Content-Length"]) else: # pragma: no cover raise HTTPPreconditionFailed(content={ "reason": "x-upload-size or content-length header needed" }) if "X-UPLOAD-FILENAME" in self.request.headers: filename = self.request.headers["X-UPLOAD-FILENAME"] elif "X-UPLOAD-FILENAME-B64" in self.request.headers: filename = base64.b64decode( self.request.headers["X-UPLOAD-FILENAME-B64"]).decode("utf-8") else: filename = uuid.uuid4().hex await self.dm.start() await self.dm.update(content_type=self.request.content_type, md5=md5, filename=filename, extension=extension, size=size) await self.file_storage_manager.start(self.dm) read_bytes = await self.file_storage_manager.append( self.dm, self._iterate_request_data(), 0) if read_bytes != size: raise HTTPPreconditionFailed( content={ "reason": "Upload size does not match what was provided" }) await self.file_storage_manager.finish(self.dm) await self.dm.finish()
async def __call__(self): data = await self.request.json() if '@type' not in data or data['@type'] not in app_settings[ 'container_types']: raise HTTPNotFound( content={ 'message': 'can not create this type %s' % data['@type'] }) if 'id' not in data: raise HTTPPreconditionFailed(content={'message': 'We need an id'}) if not data.get('title'): data['title'] = data['id'] if 'description' not in data: data['description'] = '' value = await self.context.async_contains(data['id']) if value: # Already exist raise HTTPConflict( content={'message': 'Container with id already exists'}) container = await create_content(data['@type'], id=data['id'], title=data['title'], description=data['description']) # Special case we don't want the parent pointer container.__name__ = data['id'] await self.context.async_set(data['id'], container) await container.install() self.request._container_id = container.__name__ self.request.container = container user = get_authenticated_user_id(self.request) # Local Roles assign owner as the creator user roleperm = IPrincipalRoleManager(container) roleperm.assign_role_to_principal('guillotina.Owner', user) await notify( ObjectAddedEvent(container, self.context, container.__name__, payload=data)) resp = { '@type': data['@type'], 'id': data['id'], 'title': data['title'] } headers = {'Location': posixpath.join(self.request.path, data['id'])} return Response(content=resp, headers=headers)
async def __call__(self): """ Iterate values bucket by bucket """ val = self.field.get(self.field.context) if val is None: return {"values": {}, "total": 0, "cursor": None} bidx = 0 if "cursor" in self.request.query: cursor = self.request.query["cursor"] try: bidx = int(cursor) except ValueError: raise HTTPPreconditionFailed(content={ "reason": "Invalid bucket type", "cursor": cursor }) try: bucket = val.buckets[bidx] except IndexError: raise HTTPPreconditionFailed(content={ "reason": "Invalid bucket, not found", "bidx": bidx }) annotation = await val.get_annotation(self.context, anno_id=bucket["id"], create=False) if annotation is None: raise HTTPGone(content={ "reason": "No data found for bucket", "bidx": bidx }) cursor = bidx + 1 try: val.buckets[cursor] except IndexError: cursor = None return { "values": dict(zip(annotation["keys"], annotation["values"])), "total": len(val), "cursor": cursor, }
async def __call__(self): user_id: str = self.request.matchdict["user"] actual_user = get_authenticated_user() if actual_user.id == user_id: # Self setting password # Payload : { # 'old_password': '******', # 'new_password': '******', # } data = await self.request.json() try: await actual_user.set_password( data.get("new_password", None), old_password=data.get("old_password", None) ) except AttributeError: raise HTTPNotAcceptable() else: # We validate with recaptcha validator = RecaptchaValidator() status = await validator.validate() if status is False: raise HTTPUnauthorized(content={"text": "Invalid validation"}) # We need to validate is a valid user user = await find_user({"id": user_id}) if user is None: raise HTTPUnauthorized(content={"text": "Invalid operation"}) # We need to validate is a valid user try: email = user.properties.get("email", user.email) except AttributeError: email = None if email is None and "@" in user_id: email = user_id if email is None: raise HTTPPreconditionFailed(content={"reason": "User without mail configured"}) # We need to generate a token and send to user email validation_utility = get_utility(IAuthValidationUtility) if validation_utility is not None: redirect_url = self.request.query.get("redirect_url", None) await validation_utility.start( as_user=user_id, from_user=actual_user.id, email=email, task_description="Reset password", task_id="reset_password", context_description=self.context.title, redirect_url=redirect_url, ) else: raise HTTPNotAcceptable()
def protect(self): if 'last_activity' in self._data: # check for another active upload, fail if we're screwing with # someone else if (time.time() - self._data['last_activity']) < self._timeout: if self.request.headers.get('TUS-OVERRIDE-UPLOAD', '0') != '1': raise HTTPPreconditionFailed( content={ 'reason': 'There is already an active tusupload' })
async def __call__(self): user = get_authenticated_user() session_manager = query_utility(ISessionManagerUtility) if session_manager is not None: try: await session_manager.drop_session(user.id, user._v_session) except AttributeError: raise HTTPPreconditionFailed("Session manager configured but no session on jwt") else: raise HTTPNotAcceptable()
async def _iter_copyable_content(context, request): policy = get_security_policy() data = await request.json() if 'source' not in data: raise HTTPPreconditionFailed(content={'reason': 'No source'}) source = data['source'] if not isinstance(source, list): source = [source] container = find_container(context) container_url = get_object_url(container) for item in source: if item.startswith(container_url): path = item[len(container_url):] ob = await navigate_to(container, path.strip('/')) if ob is None: raise HTTPPreconditionFailed(content={ 'reason': 'Could not find content', 'source': item }) elif '/' in item: ob = await navigate_to(container, item.strip('/')) if ob is None: raise HTTPPreconditionFailed(content={ 'reason': 'Could not find content', 'source': item }) else: try: ob = await get_object_by_uid(item) except KeyError: raise HTTPPreconditionFailed(content={ 'reason': 'Could not find content', 'source': item }) if not policy.check_permission('guillotina.DuplicateContent', ob): raise HTTPPreconditionFailed(content={ 'reason': 'Invalid permission', 'source': item }) yield ob
async def _update_users(group_id, users_added, users_removed): container = get_current_container() for user_id in users_added: try: user: User = await navigate_to(container, f"users/{user_id}") except KeyError: raise HTTPPreconditionFailed( content={"reason": f"inexistent user: {user_id}"}) if group_id not in user.groups: user.user_groups.append(group_id) user.register() for user_id in users_removed: try: user: User = await navigate_to(container, f"users/{user_id}") except KeyError: raise HTTPPreconditionFailed( content={"reason": f"inexistent user: {user_id}"}) if group_id in user.groups: user.user_groups.remove(group_id) user.register()
async def _update_groups(user_id, groups_added, groups_removed): container = get_current_container() for group_id in groups_added: try: group: Group = await navigate_to(container, f"groups/{group_id}") except KeyError: raise HTTPPreconditionFailed( content={"reason": f"inexistent group: {group_id}"}) if user_id not in group.users: group.users.append(user_id) group.register() for group_id in groups_removed: try: group: Group = await navigate_to(container, f"groups/{group_id}") except KeyError: raise HTTPPreconditionFailed( content={"reason": f"inexistent group: {group_id}"}) if user_id in group.users: group.users.remove(user_id) group.register()
async def on_update_groups(group: Group, event: ObjectModifiedEvent) -> None: # Keep group.users and user.user_groups in sync container = get_current_container() users = group.users or [] for user_id in users: try: # Get the user user = await navigate_to(container, f"users/{user_id}") except KeyError: raise HTTPPreconditionFailed(content={"reason": f"inexistent user: {user_id}"}) # Add group to user groups field if group.id not in user.user_groups: user.user_groups.append(group.id) user.register()
async def get_user_info(context, request): userid = get_authenticated_user_id(request) if userid == 'root': raise HTTPBadRequest(content={ 'reason': 'not a valid user' }) user = await utils.find_user(id=userid) if user is None: raise HTTPPreconditionFailed(content={ 'reason': 'Not a valid user' }) if 'password' in user: del user['password'] return user
async def __call__(self): results = [] try: messages = await self.request.json() except json.JSONDecodeError: # no request body present messages = [] if len(messages) >= app_settings["max_batch_size"]: return HTTPPreconditionFailed( content={ "reason": "Exceeded max match size limit", "limit": app_settings["max_batch_size"], }) for message in messages: results.append(await self.handle(message)) return results
async def upload(self): await self.dm.load() md5 = extension = size = None if 'X-UPLOAD-MD5HASH' in self.request.headers: md5 = self.request.headers['X-UPLOAD-MD5HASH'] if 'X-UPLOAD-EXTENSION' in self.request.headers: extension = self.request.headers['X-UPLOAD-EXTENSION'] if 'X-UPLOAD-SIZE' in self.request.headers: size = int(self.request.headers['X-UPLOAD-SIZE']) else: if 'Content-Length' in self.request.headers: size = int(self.request.headers['Content-Length']) else: raise AttributeError( 'x-upload-size or content-length header needed') if 'X-UPLOAD-FILENAME' in self.request.headers: filename = self.request.headers['X-UPLOAD-FILENAME'] elif 'X-UPLOAD-FILENAME-B64' in self.request.headers: filename = base64.b64decode( self.request.headers['X-UPLOAD-FILENAME-B64']).decode("utf-8") else: filename = uuid.uuid4().hex await self.dm.start() await self.dm.update(content_type=self.request.content_type, md5=md5, filename=filename, extension=extension, size=size) await self.file_storage_manager.start(self.dm) read_bytes = await self.file_storage_manager.append( self.dm, self._iterate_request_data(), 0) if read_bytes != size: raise HTTPPreconditionFailed( content={ 'reason': 'Upload size does not match what was provided' }) await self.file_storage_manager.finish(self.dm) await self.dm.finish()
async def _call_validate(self): self._validate_parameters() schema, validator = self.__class__._get_validator() if validator and validator != _sentinal: try: data = await self.request.json() validator.validate(data) except jsonschema.exceptions.ValidationError as e: raise HTTPPreconditionFailed( content={ "reason": "json schema validation error", "message": e.message, "validator": e.validator, "validator_value": e.validator_value, "path": [i for i in e.path], "schema_path": [i for i in e.schema_path], "schema": schema, }) return await self._call_original()
async def do_action(self, action, comments): available_actions = self.actions if action not in available_actions: raise HTTPPreconditionFailed( content={"reason": "Unavailable action"}) action_def = available_actions[action] policy = get_security_policy() if "check_permission" in action_def and not policy.check_permission( action_def["check_permission"], self.context): raise HTTPUnauthorized() # Change permission new_state = action_def["to"] if "set_permission" in self.states[new_state]: await apply_sharing(self.context, self.states[new_state]["set_permission"]) # Write history user = get_authenticated_user_id() history = { "actor": user, "comments": comments, "time": datetime.datetime.now(), "title": action_def["title"], "type": "workflow", "data": { "action": action, "review_state": new_state }, } workflow_behavior = IWorkflowBehavior(self.context) workflow_behavior.review_state = new_state workflow_behavior.history.append(history) workflow_behavior.register() await notify( WorkflowChangedEvent(self.context, self, action, comments)) return history
async def post_user(context, request): data = await request.json() try: jsonschema.validate( data, app_settings['json_schema_definitions']['HydraUser']) except (jsonschema.ValidationError, jsonschema.SchemaError) as e: raise HTTPPreconditionFailed(content={ 'message': e.message }) try: data = await utils.create_user(**data) except asyncpg.exceptions.UniqueViolationError: raise HTTPConflict(content={ 'reason': 'user already exists' }) del data['password'] data['@id'] = str(request.url.with_path(f'/@users/{data["id"]}')) return data
async def edit_user(context, request): data = await request.json() userid = get_authenticated_user_id(request) if userid == 'root': raise HTTPBadRequest(content={ 'reason': 'not a valid user' }) user = await utils.find_user(id=userid) if user is None: raise HTTPPreconditionFailed(content={ 'reason': 'Not a valid user' }) data['id'] = userid for key in ('@id', 'username'): if key in data: del data[key] await utils.update_user(**data)
async def __call__(self): user = get_authenticated_user() data = { "iat": datetime.utcnow(), "exp": datetime.utcnow() + timedelta(seconds=app_settings["jwt"]["token_expiration"]), "id": user.id, } session_manager = query_utility(ISessionManagerUtility) if session_manager is not None: try: session = await session_manager.refresh_session(user.id, user._v_session) data["session"] = session except AttributeError: raise HTTPPreconditionFailed("Session manager configured but no session on jwt") jwt_token = jwt.encode( data, app_settings["jwt"]["secret"], algorithm=app_settings["jwt"]["algorithm"] ).decode("utf-8") await notify(UserRefreshToken(user, jwt_token)) return {"exp": data["exp"], "token": jwt_token}