def response(status, headers=(), answer=None): headers = dict(headers) # Set content length if answer: if hasattr(answer, "encode"): logger.debug("Response content:\n%s", answer) headers["Content-Type"] += "; charset=%s" % self.encoding answer = answer.encode(self.encoding) accept_encoding = [ encoding.strip() for encoding in environ.get("HTTP_ACCEPT_ENCODING", "").split(",") if encoding.strip()] if "gzip" in accept_encoding: zcomp = zlib.compressobj(wbits=16 + zlib.MAX_WBITS) answer = zcomp.compress(answer) + zcomp.flush() headers["Content-Encoding"] = "gzip" headers["Content-Length"] = str(len(answer)) # Add extra headers set in configuration if self.configuration.has_section("headers"): for key in self.configuration.options("headers"): headers[key] = self.configuration.get("headers", key) # Start response time_end = datetime.datetime.now() status = "%d %s" % ( status, client.responses.get(status, "Unknown")) logger.info( "%s response status for %r%s in %.3f seconds: %s", environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo, (time_end - time_begin).total_seconds(), status) # Return response content return status, list(headers.items()), [answer] if answer else []
def do_PROPFIND(self, environ, base_prefix, path, user): """Manage PROPFIND request.""" if not self.access(user, path, "r"): return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning( "Bad PROPFIND request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with self.Collection.acquire_lock("r", user): items = self.Collection.discover( path, environ.get("HTTP_DEPTH", "0")) # take root item for rights checking item = next(items, None) if not item: return httputils.NOT_FOUND if not self.access(user, path, "r", item): return httputils.NOT_ALLOWED # put item back items = itertools.chain([item], items) allowed_items = self._collect_allowed_items(items, user) headers = {"DAV": httputils.DAV_HEADERS, "Content-Type": "text/xml; charset=%s" % self.encoding} status, xml_answer = xml_propfind( base_prefix, path, xml_content, allowed_items, user) if status == client.FORBIDDEN: return httputils.NOT_ALLOWED return status, headers, self.write_xml_content(xml_answer)
def do_REPORT(self, environ, base_prefix, path, user): """Manage REPORT request.""" if not self.access(user, path, "r"): return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning( "Bad REPORT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with contextlib.ExitStack() as lock_stack: lock_stack.enter_context(self.Collection.acquire_lock("r", user)) item = next(self.Collection.discover(path), None) if not item: return httputils.NOT_FOUND if not self.access(user, path, "r", item): return httputils.NOT_ALLOWED if isinstance(item, storage.BaseCollection): collection = item else: collection = item.collection headers = {"Content-Type": "text/xml; charset=%s" % self.encoding} try: status, xml_answer = xml_report( base_prefix, path, xml_content, collection, lock_stack.close) except ValueError as e: logger.warning( "Bad REPORT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return (status, headers, self.write_xml_content(xml_answer))
def _collect_allowed_items(self, items, user): """Get items from request that user is allowed to access.""" for item in items: if isinstance(item, storage.BaseCollection): path = pathutils.unstrip_path(item.path, True) if item.get_meta("tag"): permissions = self.Rights.authorized(user, path, "rw") target = "collection with tag %r" % item.path else: permissions = self.Rights.authorized(user, path, "RW") target = "collection %r" % item.path else: path = pathutils.unstrip_path(item.collection.path, True) permissions = self.Rights.authorized(user, path, "rw") target = "item %r from %r" % (item.href, item.collection.path) if rights.intersect_permissions(permissions, "Ww"): permission = "w" status = "write" elif rights.intersect_permissions(permissions, "Rr"): permission = "r" status = "read" else: permission = "" status = "NO" logger.debug( "%s has %s access to %s", repr(user) if user else "anonymous user", status, target) if permission: yield item, permission
def do_PROPPATCH(self, environ, base_prefix, path, user): """Manage PROPPATCH request.""" if not self.access(user, path, "w"): return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning( "Bad PROPPATCH request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) if not item: return httputils.NOT_FOUND if not self.access(user, path, "w", item): return httputils.NOT_ALLOWED if not isinstance(item, storage.BaseCollection): return httputils.FORBIDDEN headers = {"DAV": httputils.DAV_HEADERS, "Content-Type": "text/xml; charset=%s" % self.encoding} try: xml_answer = xml_proppatch(base_prefix, path, xml_content, item) except ValueError as e: logger.warning( "Bad PROPPATCH request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return (client.MULTI_STATUS, headers, self.write_xml_content(xml_answer))
def get(self, environ, base_prefix, path, user): assert path == "/.web" or path.startswith("/.web/") assert pathutils.sanitize_path(path) == path try: filesystem_path = pathutils.path_to_filesystem( self.folder, path[len("/.web"):].strip("/")) except ValueError as e: logger.debug("Web content with unsafe path %r requested: %s", path, e, exc_info=True) return httputils.NOT_FOUND if os.path.isdir(filesystem_path) and not path.endswith("/"): location = posixpath.basename(path) + "/" return (client.FOUND, {"Location": location, "Content-Type": "text/plain"}, "Redirected to %s" % location) if os.path.isdir(filesystem_path): filesystem_path = os.path.join(filesystem_path, "index.html") if not os.path.isfile(filesystem_path): return httputils.NOT_FOUND content_type = MIMETYPES.get( os.path.splitext(filesystem_path)[1].lower(), FALLBACK_MIMETYPE) with open(filesystem_path, "rb") as f: answer = f.read() last_modified = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(os.fstat(f.fileno()).st_mtime)) headers = { "Content-Type": content_type, "Last-Modified": last_modified} return client.OK, headers, answer
def write_xml_content(self, xml_content): if logger.isEnabledFor(logging.DEBUG): logger.debug("Response content:\n%s", xmlutils.pretty_xml(xml_content)) f = io.BytesIO() ET.ElementTree(xml_content).write(f, encoding=self.encoding, xml_declaration=True) return f.getvalue()
def _list(self): for entry in os.scandir(self._filesystem_path): if not entry.is_file(): continue href = entry.name if not pathutils.is_safe_filesystem_path_component(href): if not href.startswith(".Radicale"): logger.debug("Skipping item %r in %r", href, self.path) continue yield href
def discover(cls, path, depth="0", child_context_manager=( lambda path, href=None: contextlib.ExitStack())): # Path should already be sanitized sane_path = pathutils.strip_path(path) attributes = sane_path.split("/") if sane_path else [] folder = cls._get_collection_root_folder() # Create the root collection cls._makedirs_synced(folder) try: filesystem_path = pathutils.path_to_filesystem(folder, sane_path) except ValueError as e: # Path is unsafe logger.debug("Unsafe path %r requested from storage: %s", sane_path, e, exc_info=True) return # Check if the path exists and if it leads to a collection or an item if not os.path.isdir(filesystem_path): if attributes and os.path.isfile(filesystem_path): href = attributes.pop() else: return else: href = None sane_path = "/".join(attributes) collection = cls(pathutils.unstrip_path(sane_path, True)) if href: yield collection._get(href) return yield collection if depth == "0": return for href in collection._list(): with child_context_manager(sane_path, href): yield collection._get(href) for entry in os.scandir(filesystem_path): if not entry.is_dir(): continue href = entry.name if not pathutils.is_safe_filesystem_path_component(href): if not href.startswith(".Radicale"): logger.debug("Skipping collection %r in %r", href, sane_path) continue sane_child_path = posixpath.join(sane_path, href) child_path = pathutils.unstrip_path(sane_child_path, True) with child_context_manager(sane_child_path): yield cls(child_path)
def read_xml_content(self, environ): content = self.decode(self.read_raw_content(environ), environ) if not content: return None try: xml_content = ET.fromstring(content) except ET.ParseError as e: logger.debug("Request content (Invalid XML):\n%s", content) raise RuntimeError("Failed to parse XML: %s" % e) from e if logger.isEnabledFor(logging.DEBUG): logger.debug("Request content:\n%s", xmlutils.pretty_xml(xml_content)) return xml_content
def do_MKCOL(self, environ, base_prefix, path, user): """Manage MKCOL request.""" permissions = self.rights.authorized(user, path, "Ww") if not permissions: return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning("Bad MKCOL request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking props = xmlutils.props_from_request(xml_content) try: radicale_item.check_and_sanitize_props(props) except ValueError as e: logger.warning("Bad MKCOL request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST if (props.get("tag") and "w" not in permissions or not props.get("tag") and "W" not in permissions): return httputils.NOT_ALLOWED with self.storage.acquire_lock("w", user): item = next(self.storage.discover(path), None) if item: return httputils.METHOD_NOT_ALLOWED parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) parent_item = next(self.storage.discover(parent_path), None) if not parent_item: return httputils.CONFLICT if (not isinstance(parent_item, storage.BaseCollection) or parent_item.get_meta("tag")): return httputils.FORBIDDEN try: self.storage.create_collection(path, props=props) except ValueError as e: logger.warning("Bad MKCOL request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return client.CREATED, {}, None
def verify(cls): item_errors = collection_errors = 0 @contextlib.contextmanager def exception_cm(sane_path, href=None): nonlocal item_errors, collection_errors try: yield except Exception as e: if href: item_errors += 1 name = "item %r in %r" % (href, sane_path) else: collection_errors += 1 name = "collection %r" % sane_path logger.error("Invalid %s: %s", name, e, exc_info=True) remaining_sane_paths = [""] while remaining_sane_paths: sane_path = remaining_sane_paths.pop(0) path = pathutils.unstrip_path(sane_path, True) logger.debug("Verifying collection %r", sane_path) with exception_cm(sane_path): saved_item_errors = item_errors collection = None uids = set() has_child_collections = False for item in cls.discover(path, "1", exception_cm): if not collection: collection = item collection.get_meta() continue if isinstance(item, storage.BaseCollection): has_child_collections = True remaining_sane_paths.append(item.path) elif item.uid in uids: logger.error("Invalid item %r in %r: UID conflict %r", item.href, sane_path, item.uid) else: uids.add(item.uid) logger.debug("Verified item %r in %r", item.href, sane_path) if item_errors == saved_item_errors: collection.sync() if has_child_collections and collection.get_meta("tag"): logger.error( "Invalid collection %r: %r must not have " "child collections", sane_path, collection.get_meta("tag")) return item_errors == 0 and collection_errors == 0
def verify(cls): item_errors = collection_errors = 0 @contextlib.contextmanager def exception_cm(sane_path, href=None): nonlocal item_errors, collection_errors try: yield except Exception as e: if href: item_errors += 1 name = "item %r in %r" % (href, sane_path) else: collection_errors += 1 name = "collection %r" % sane_path logger.error("Invalid %s: %s", name, e, exc_info=True) remaining_sane_paths = [""] while remaining_sane_paths: sane_path = remaining_sane_paths.pop(0) path = pathutils.unstrip_path(sane_path, True) logger.debug("Verifying collection %r", sane_path) with exception_cm(sane_path): saved_item_errors = item_errors collection = None uids = set() has_child_collections = False for item in cls.discover(path, "1", exception_cm): if not collection: collection = item collection.get_meta() continue if isinstance(item, storage.BaseCollection): has_child_collections = True remaining_sane_paths.append(item.path) elif item.uid in uids: logger.error("Invalid item %r in %r: UID conflict %r", item.href, sane_path, item.uid) else: uids.add(item.uid) logger.debug("Verified item %r in %r", item.href, sane_path) if item_errors == saved_item_errors: collection.sync() if has_child_collections and collection.get_meta("tag"): logger.error("Invalid collection %r: %r must not have " "child collections", sane_path, collection.get_meta("tag")) return item_errors == 0 and collection_errors == 0
def do_MKCALENDAR(self, environ, base_prefix, path, user): """Manage MKCALENDAR request.""" if "w" not in self._rights.authorization(user, path): return httputils.NOT_ALLOWED try: xml_content = self._read_xml_request_body(environ) except RuntimeError as e: logger.warning("Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking props = xmlutils.props_from_request(xml_content) props["tag"] = "VCALENDAR" # TODO: use this? # timezone = props.get("C:calendar-timezone") try: radicale_item.check_and_sanitize_props(props) except ValueError as e: logger.warning("Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) with self._storage.acquire_lock("w", user): item = next(self._storage.discover(path), None) if item: return self._webdav_error_response(client.CONFLICT, "D:resource-must-be-null") parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) parent_item = next(self._storage.discover(parent_path), None) if not parent_item: return httputils.CONFLICT if (not isinstance(parent_item, storage.BaseCollection) or parent_item.get_meta("tag")): return httputils.FORBIDDEN try: self._storage.create_collection(path, props=props) except ValueError as e: logger.warning("Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return client.CREATED, {}, None
def _read_xml_request_body(self, environ): content = httputils.decode_request( self.configuration, environ, httputils.read_raw_request_body(self.configuration, environ)) if not content: return None try: xml_content = DefusedET.fromstring(content) except ET.ParseError as e: logger.debug("Request content (Invalid XML):\n%s", content) raise RuntimeError("Failed to parse XML: %s" % e) from e if logger.isEnabledFor(logging.DEBUG): logger.debug("Request content:\n%s", xmlutils.pretty_xml(xml_content)) return xml_content
def get_multi(self, hrefs): # It's faster to check for file name collissions here, because # we only need to call os.listdir once. files = None for href in hrefs: if files is None: # List dir after hrefs returned one item, the iterator may be # empty and the for-loop is never executed. files = os.listdir(self._filesystem_path) path = os.path.join(self._filesystem_path, href) if (not pathutils.is_safe_filesystem_path_component(href) or href not in files and os.path.lexists(path)): logger.debug( "Can't translate name safely to filesystem: %r", href) yield (href, None) else: yield (href, self._get(href, verify_href=False))
def get_multi(self, hrefs): # It's faster to check for file name collissions here, because # we only need to call os.listdir once. files = None for href in hrefs: if files is None: # List dir after hrefs returned one item, the iterator may be # empty and the for-loop is never executed. files = os.listdir(self._filesystem_path) path = os.path.join(self._filesystem_path, href) if (not pathutils.is_safe_filesystem_path_component(href) or href not in files and os.path.lexists(path)): logger.debug("Can't translate name safely to filesystem: %r", href) yield (href, None) else: yield (href, self._get(href, verify_href=False))
def get(self, environ, base_prefix, path, user): if not path.startswith("/.web/infcloud/") and path != "/.web/infcloud": status, headers, answer = super().get(environ, base_prefix, path, user) if status == client.OK and path in ("/.web/", "/.web/index.html"): answer = answer.replace( b"""\ <nav> <ul>""", b"""\ <nav> <ul> <li><a href="infcloud">InfCloud</a></li>""") return status, headers, answer assert pathutils.sanitize_path(path) == path try: filesystem_path = pathutils.path_to_filesystem( self.infcloud_folder, path[len("/.web/infcloud"):].strip("/")) except ValueError as e: logger.debug("Web content with unsafe path %r requested: %s", path, e, exc_info=True) return httputils.NOT_FOUND if os.path.isdir(filesystem_path) and not path.endswith("/"): location = posixpath.basename(path) + "/" return (client.FOUND, { "Location": location, "Content-Type": "text/plain" }, "Redirected to %s" % location) if os.path.isdir(filesystem_path): filesystem_path = os.path.join(filesystem_path, "index.html") if not os.path.isfile(filesystem_path): return httputils.NOT_FOUND content_type = MIMETYPES.get( os.path.splitext(filesystem_path)[1].lower(), FALLBACK_MIMETYPE) with open(filesystem_path, "rb") as f: answer = f.read() last_modified = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(os.fstat(f.fileno()).st_mtime)) headers = { "Content-Type": content_type, "Last-Modified": last_modified } return client.OK, headers, answer
def do_MKCOL(self, environ, base_prefix, path, user): """Manage MKCOL request.""" permissions = self.Rights.authorized(user, path, "Ww") if not permissions: return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning( "Bad MKCOL request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking props = xmlutils.props_from_request(xml_content) try: radicale_item.check_and_sanitize_props(props) except ValueError as e: logger.warning( "Bad MKCOL request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST if (props.get("tag") and "w" not in permissions or not props.get("tag") and "W" not in permissions): return httputils.NOT_ALLOWED with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) if item: return httputils.METHOD_NOT_ALLOWED parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) parent_item = next(self.Collection.discover(parent_path), None) if not parent_item: return httputils.CONFLICT if (not isinstance(parent_item, storage.BaseCollection) or parent_item.get_meta("tag")): return httputils.FORBIDDEN try: self.Collection.create_collection(path, props=props) except ValueError as e: logger.warning( "Bad MKCOL request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return client.CREATED, {}, None
def do_MKCALENDAR(self, environ, base_prefix, path, user): """Manage MKCALENDAR request.""" if not self.Rights.authorized(user, path, "w"): return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning( "Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking props = xmlutils.props_from_request(xml_content) props["tag"] = "VCALENDAR" # TODO: use this? # timezone = props.get("C:calendar-timezone") try: radicale_item.check_and_sanitize_props(props) except ValueError as e: logger.warning( "Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) if item: return self.webdav_error_response( "D", "resource-must-be-null") parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) parent_item = next(self.Collection.discover(parent_path), None) if not parent_item: return httputils.CONFLICT if (not isinstance(parent_item, storage.BaseCollection) or parent_item.get_meta("tag")): return httputils.FORBIDDEN try: self.Collection.create_collection(path, props=props) except ValueError as e: logger.warning( "Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return client.CREATED, {}, None
def do_PROPPATCH(self, environ, base_prefix, path, user, context=None): """Manage PROPPATCH request.""" access = app.Access(self._rights, user, path) if not access.check("w"): return httputils.NOT_ALLOWED try: xml_content = self._read_xml_request_body(environ) except RuntimeError as e: logger.warning( "Bad PROPPATCH request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("Client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with self._storage.acquire_lock("w", user): item = next(self._storage.discover(path), None) if not item: return httputils.NOT_FOUND if not access.check("w", item): return httputils.NOT_ALLOWED if not isinstance(item, storage.BaseCollection): return httputils.FORBIDDEN headers = {"DAV": httputils.DAV_HEADERS, "Content-Type": "text/xml; charset=%s" % self._encoding} try: xml_answer = xml_proppatch(base_prefix, path, xml_content, item) if xml_content is not None: hook_notification_item = HookNotificationItem( HookNotificationItemTypes.CPATCH, access.path, DefusedET.tostring( xml_content, encoding=self._encoding ).decode(encoding=self._encoding), context ) self._hook.notify(hook_notification_item) except ValueError as e: logger.warning( "Bad PROPPATCH request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return client.MULTI_STATUS, headers, self._xml_response(xml_answer)
def acquire_lock(cls, mode, user=None): with cls._lock.acquire(mode): yield # execute hook hook = cls.configuration.get("storage", "hook") if mode == "w" and hook: folder = os.path.expanduser( cls.configuration.get("storage", "filesystem_folder")) logger.debug("Running hook") debug = logger.isEnabledFor(logging.DEBUG) p = subprocess.Popen( hook % {"user": shlex.quote(user or "Anonymous")}, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE if debug else subprocess.DEVNULL, stderr=subprocess.PIPE if debug else subprocess.DEVNULL, shell=True, universal_newlines=True, cwd=folder) stdout_data, stderr_data = p.communicate() if stdout_data: logger.debug("Captured stdout hook:\n%s", stdout_data) if stderr_data: logger.debug("Captured stderr hook:\n%s", stderr_data) if p.returncode != 0: raise subprocess.CalledProcessError(p.returncode, p.args)
def authorized(self, user, path, permissions): user = user or "" sane_path = pathutils.strip_path(path) # Prevent "regex injection" user_escaped = re.escape(user) sane_path_escaped = re.escape(sane_path) rights_config = configparser.ConfigParser({ "login": user_escaped, "path": sane_path_escaped }) try: if not rights_config.read(self.filename): raise RuntimeError("No such file: %r" % self.filename) except Exception as e: raise RuntimeError("Failed to load rights file %r: %s" % (self.filename, e)) from e for section in rights_config.sections(): try: user_pattern = rights_config.get(section, "user") collection_pattern = rights_config.get(section, "collection") user_match = re.fullmatch(user_pattern, user) collection_match = user_match and re.fullmatch( collection_pattern.format( *map(re.escape, user_match.groups())), sane_path) except Exception as e: raise RuntimeError("Error in section %r of rights file %r: " "%s" % (section, self.filename, e)) from e if user_match and collection_match: logger.debug("Rule %r:%r matches %r:%r from section %r", user, sane_path, user_pattern, collection_pattern, section) return rights.intersect_permissions( permissions, rights_config.get(section, "permissions")) else: logger.debug("Rule %r:%r doesn't match %r:%r from section %r", user, sane_path, user_pattern, collection_pattern, section) logger.info("Rights: %r:%r doesn't match any section", user, sane_path) return ""
def do_REPORT(self, environ, base_prefix, path, user): """Manage REPORT request.""" access = app.Access(self._rights, user, path) if not access.check("r"): return httputils.NOT_ALLOWED try: xml_content = self._read_xml_request_body(environ) except RuntimeError as e: logger.warning("Bad REPORT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("Client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with contextlib.ExitStack() as lock_stack: lock_stack.enter_context(self._storage.acquire_lock("r", user)) item = next(self._storage.discover(path), None) if not item: return httputils.NOT_FOUND if not access.check("r", item): return httputils.NOT_ALLOWED if isinstance(item, storage.BaseCollection): collection = item else: collection = item.collection headers = {"Content-Type": "text/xml; charset=%s" % self._encoding} try: status, xml_answer = xml_report(base_prefix, path, xml_content, collection, self._encoding, lock_stack.close) except ValueError as e: logger.warning("Bad REPORT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return status, headers, self._xml_response(xml_answer)
def do_PROPFIND(self, environ, base_prefix, path, user): """Manage PROPFIND request.""" access = app.Access(self._rights, user, path) if not access.check("r"): return httputils.NOT_ALLOWED try: xml_content = self._read_xml_request_body(environ) except RuntimeError as e: logger.warning("Bad PROPFIND request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("Client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with self._storage.acquire_lock("r", user): items = self._storage.discover(path, environ.get("HTTP_DEPTH", "0")) # take root item for rights checking item = next(items, None) if not item: return httputils.NOT_FOUND if not access.check("r", item): return httputils.NOT_ALLOWED # put item back items = itertools.chain([item], items) allowed_items = self._collect_allowed_items(items, user) headers = { "DAV": httputils.DAV_HEADERS, "Content-Type": "text/xml; charset=%s" % self._encoding } status, xml_answer = xml_propfind(base_prefix, path, xml_content, allowed_items, user, self._encoding) if status == client.FORBIDDEN and xml_answer is None: return httputils.NOT_ALLOWED return status, headers, self._xml_response(xml_answer)
def do_PROPPATCH(self, environ, base_prefix, path, user): """Manage PROPPATCH request.""" if not self.access(user, path, "w"): return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning("Bad PROPPATCH request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with self._storage.acquire_lock("w", user): item = next(self._storage.discover(path), None) if not item: return httputils.NOT_FOUND if not self.access(user, path, "w", item): return httputils.NOT_ALLOWED if not isinstance(item, storage.BaseCollection): return httputils.FORBIDDEN headers = { "DAV": httputils.DAV_HEADERS, "Content-Type": "text/xml; charset=%s" % self._encoding } try: xml_answer = xml_proppatch(base_prefix, path, xml_content, item) except ValueError as e: logger.warning("Bad PROPPATCH request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return (client.MULTI_STATUS, headers, self.write_xml_content(xml_answer))
def authorization(self, user, path): user = user or "" sane_path = pathutils.strip_path(path) # Prevent "regex injection" escaped_user = re.escape(user) rights_config = configparser.ConfigParser() try: if not rights_config.read(self._filename): raise RuntimeError("No such file: %r" % self._filename) except Exception as e: raise RuntimeError("Failed to load rights file %r: %s" % (self._filename, e)) from e for section in rights_config.sections(): try: user_pattern = rights_config.get(section, "user") collection_pattern = rights_config.get(section, "collection") # Use empty format() for harmonized handling of curly braces user_match = re.fullmatch(user_pattern.format(), user) collection_match = user_match and re.fullmatch( collection_pattern.format( *map(re.escape, user_match.groups()), user=escaped_user), sane_path) except Exception as e: raise RuntimeError("Error in section %r of rights file %r: " "%s" % (section, self._filename, e)) from e if user_match and collection_match: logger.debug("Rule %r:%r matches %r:%r from section %r", user, sane_path, user_pattern, collection_pattern, section) return rights_config.get(section, "permissions") logger.debug("Rule %r:%r doesn't match %r:%r from section %r", user, sane_path, user_pattern, collection_pattern, section) logger.info("Rights: %r:%r doesn't match any section", user, sane_path) return ""
def response(status, headers=(), answer=None): headers = dict(headers) # Set content length if answer: if hasattr(answer, "encode"): logger.debug("Response content:\n%s", answer) headers["Content-Type"] += "; charset=%s" % self.encoding answer = answer.encode(self.encoding) accept_encoding = [ encoding.strip() for encoding in environ.get( "HTTP_ACCEPT_ENCODING", "").split(",") if encoding.strip() ] if "gzip" in accept_encoding: zcomp = zlib.compressobj(wbits=16 + zlib.MAX_WBITS) answer = zcomp.compress(answer) + zcomp.flush() headers["Content-Encoding"] = "gzip" headers["Content-Length"] = str(len(answer)) # Add extra headers set in configuration if self.configuration.has_section("headers"): for key in self.configuration.options("headers"): headers[key] = self.configuration.get("headers", key) # Start response time_end = datetime.datetime.now() status = "%d %s" % (status, client.responses.get( status, "Unknown")) logger.info("%s response status for %r%s in %.3f seconds: %s", environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo, (time_end - time_begin).total_seconds(), status) # Return response content return status, list(headers.items()), [answer] if answer else []
def acquire_lock(self, mode, user=None): with self._lock.acquire(mode) as lock_file: yield # execute hook hook = self.configuration.get("storage", "hook") if mode == "w" and hook: folder = self.configuration.get("storage", "filesystem_folder") debug = logger.isEnabledFor(logging.DEBUG) popen_kwargs = dict( stdin=subprocess.DEVNULL, stdout=subprocess.PIPE if debug else subprocess.DEVNULL, stderr=subprocess.PIPE if debug else subprocess.DEVNULL, shell=True, universal_newlines=True, cwd=folder) if os.name == "posix": # Pass the lock_file to the subprocess to ensure the lock # doesn't get released if this process is killed but the # child process lives on popen_kwargs["pass_fds"] = [lock_file.fileno()] # Use new process group for child to prevent terminals # from sending SIGINT etc. to it. if os.name == "posix": # Process group is also used to identify child processes popen_kwargs["preexec_fn"] = os.setpgrp elif os.name == "nt": popen_kwargs["creationflags"] = ( subprocess.CREATE_NEW_PROCESS_GROUP) command = hook % {"user": shlex.quote(user or "Anonymous")} logger.debug("Running hook") p = subprocess.Popen(command, **popen_kwargs) try: stdout_data, stderr_data = p.communicate() except BaseException: # Terminate the process on error (e.g. KeyboardInterrupt) p.terminate() p.wait() raise finally: if os.name == "posix": # Try to kill remaning children of process, identified # by process group try: os.killpg(p.pid, signal.SIGKILL) except ProcessLookupError: pass # No remaning processes found else: logger.warning( "Killed remaining child processes of hook") if stdout_data: logger.debug("Captured stdout hook:\n%s", stdout_data) if stderr_data: logger.debug("Captured stderr hook:\n%s", stderr_data) if p.returncode != 0: raise subprocess.CalledProcessError(p.returncode, p.args)
def acquire_lock(self, mode, user=None): with self._lock.acquire(mode): yield # execute hook hook = self.configuration.get("storage", "hook") if mode == "w" and hook: folder = self.configuration.get("storage", "filesystem_folder") debug = logger.isEnabledFor(logging.DEBUG) popen_kwargs = dict( stdin=subprocess.DEVNULL, stdout=subprocess.PIPE if debug else subprocess.DEVNULL, stderr=subprocess.PIPE if debug else subprocess.DEVNULL, shell=True, universal_newlines=True, cwd=folder) # Use new process group for child to prevent terminals # from sending SIGINT etc. if os.name == "posix": # Process group is also used to identify child processes popen_kwargs["preexec_fn"] = os.setpgrp elif os.name == "nt": popen_kwargs["creationflags"] = ( subprocess.CREATE_NEW_PROCESS_GROUP) command = hook % {"user": shlex.quote(user or "Anonymous")} logger.debug("Running storage hook") p = subprocess.Popen(command, **popen_kwargs) try: stdout_data, stderr_data = p.communicate() except BaseException: # e.g. KeyboardInterrupt or SystemExit p.kill() p.wait() raise finally: if os.name == "posix": # Kill remaining children identified by process group with contextlib.suppress(OSError): os.killpg(p.pid, signal.SIGKILL) if stdout_data: logger.debug("Captured stdout from hook:\n%s", stdout_data) if stderr_data: logger.debug("Captured stderr from hook:\n%s", stderr_data) if p.returncode != 0: raise subprocess.CalledProcessError(p.returncode, p.args)
def acquire_lock(cls, mode, user=None): with cls._lock.acquire(mode): yield # execute hook hook = cls.configuration.get("storage", "hook") if mode == "w" and hook: folder = os.path.expanduser(cls.configuration.get( "storage", "filesystem_folder")) logger.debug("Running hook") debug = logger.isEnabledFor(logging.DEBUG) p = subprocess.Popen( hook % {"user": shlex.quote(user or "Anonymous")}, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE if debug else subprocess.DEVNULL, stderr=subprocess.PIPE if debug else subprocess.DEVNULL, shell=True, universal_newlines=True, cwd=folder) stdout_data, stderr_data = p.communicate() if stdout_data: logger.debug("Captured stdout hook:\n%s", stdout_data) if stderr_data: logger.debug("Captured stderr hook:\n%s", stderr_data) if p.returncode != 0: raise subprocess.CalledProcessError(p.returncode, p.args)
def xml_report(base_prefix, path, xml_request, collection, encoding, unlock_storage_fn): """Read and answer REPORT requests. Read rfc3253-3.6 for info. """ multistatus = ET.Element(xmlutils.make_clark("D:multistatus")) if xml_request is None: return client.MULTI_STATUS, multistatus root = xml_request if root.tag in (xmlutils.make_clark("D:principal-search-property-set"), xmlutils.make_clark("D:principal-property-search"), xmlutils.make_clark("D:expand-property")): # We don't support searching for principals or indirect retrieving of # properties, just return an empty result. # InfCloud asks for expand-property reports (even if we don't announce # support for them) and stops working if an error code is returned. logger.warning("Unsupported REPORT method %r on %r requested", xmlutils.make_human_tag(root.tag), path) return client.MULTI_STATUS, multistatus if (root.tag == xmlutils.make_clark("C:calendar-multiget") and collection.get_meta("tag") != "VCALENDAR" or root.tag == xmlutils.make_clark("CR:addressbook-multiget") and collection.get_meta("tag") != "VADDRESSBOOK" or root.tag == xmlutils.make_clark("D:sync-collection") and collection.get_meta("tag") not in ("VADDRESSBOOK", "VCALENDAR")): logger.warning("Invalid REPORT method %r on %r requested", xmlutils.make_human_tag(root.tag), path) return (client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")) prop_element = root.find(xmlutils.make_clark("D:prop")) props = ([prop.tag for prop in prop_element] if prop_element is not None else []) if root.tag in (xmlutils.make_clark("C:calendar-multiget"), xmlutils.make_clark("CR:addressbook-multiget")): # Read rfc4791-7.9 for info hreferences = set() for href_element in root.findall(xmlutils.make_clark("D:href")): href_path = pathutils.sanitize_path( unquote(urlparse(href_element.text).path)) if (href_path + "/").startswith(base_prefix + "/"): hreferences.add(href_path[len(base_prefix):]) else: logger.warning( "Skipping invalid path %r in REPORT request on " "%r", href_path, path) elif root.tag == xmlutils.make_clark("D:sync-collection"): old_sync_token_element = root.find(xmlutils.make_clark("D:sync-token")) old_sync_token = "" if old_sync_token_element is not None and old_sync_token_element.text: old_sync_token = old_sync_token_element.text.strip() logger.debug("Client provided sync token: %r", old_sync_token) try: sync_token, names = collection.sync(old_sync_token) except ValueError as e: # Invalid sync token logger.warning("Client provided invalid sync token %r: %s", old_sync_token, e, exc_info=True) # client.CONFLICT doesn't work with some clients (e.g. InfCloud) return (client.FORBIDDEN, xmlutils.webdav_error("D:valid-sync-token")) hreferences = (pathutils.unstrip_path( posixpath.join(collection.path, n)) for n in names) # Append current sync token to response sync_token_element = ET.Element(xmlutils.make_clark("D:sync-token")) sync_token_element.text = sync_token multistatus.append(sync_token_element) else: hreferences = (path, ) filters = (root.findall(xmlutils.make_clark("C:filter")) + root.findall(xmlutils.make_clark("CR:filter"))) def retrieve_items(collection, hreferences, multistatus): """Retrieves all items that are referenced in ``hreferences`` from ``collection`` and adds 404 responses for missing and invalid items to ``multistatus``.""" collection_requested = False def get_names(): """Extracts all names from references in ``hreferences`` and adds 404 responses for invalid references to ``multistatus``. If the whole collections is referenced ``collection_requested`` gets set to ``True``.""" nonlocal collection_requested for hreference in hreferences: try: name = pathutils.name_from_path(hreference, collection) except ValueError as e: logger.warning( "Skipping invalid path %r in REPORT request" " on %r: %s", hreference, path, e) response = xml_item_response(base_prefix, hreference, found_item=False) multistatus.append(response) continue if name: # Reference is an item yield name else: # Reference is a collection collection_requested = True for name, item in collection.get_multi(get_names()): if not item: uri = pathutils.unstrip_path( posixpath.join(collection.path, name)) response = xml_item_response(base_prefix, uri, found_item=False) multistatus.append(response) else: yield item, False if collection_requested: yield from collection.get_filtered(filters) # Retrieve everything required for finishing the request. retrieved_items = list(retrieve_items(collection, hreferences, multistatus)) collection_tag = collection.get_meta("tag") # Don't access storage after this! unlock_storage_fn() def match(item, filter_): tag = collection_tag if (tag == "VCALENDAR" and filter_.tag != xmlutils.make_clark("C:%s" % filter_)): if len(filter_) == 0: return True if len(filter_) > 1: raise ValueError("Filter with %d children" % len(filter_)) if filter_[0].tag != xmlutils.make_clark("C:comp-filter"): raise ValueError("Unexpected %r in filter" % filter_[0].tag) return radicale_filter.comp_match(item, filter_[0]) if (tag == "VADDRESSBOOK" and filter_.tag != xmlutils.make_clark("CR:%s" % filter_)): for child in filter_: if child.tag != xmlutils.make_clark("CR:prop-filter"): raise ValueError("Unexpected %r in filter" % child.tag) test = filter_.get("test", "anyof") if test == "anyof": return any( radicale_filter.prop_match(item.vobject_item, f, "CR") for f in filter_) if test == "allof": return all( radicale_filter.prop_match(item.vobject_item, f, "CR") for f in filter_) raise ValueError("Unsupported filter test: %r" % test) raise ValueError("Unsupported filter %r for %r" % (filter_.tag, tag)) while retrieved_items: # ``item.vobject_item`` might be accessed during filtering. # Don't keep reference to ``item``, because VObject requires a lot of # memory. item, filters_matched = retrieved_items.pop(0) if filters and not filters_matched: try: if not all(match(item, filter_) for filter_ in filters): continue except ValueError as e: raise ValueError("Failed to filter item %r from %r: %s" % (item.href, collection.path, e)) from e except Exception as e: raise RuntimeError("Failed to filter item %r from %r: %s" % (item.href, collection.path, e)) from e found_props = [] not_found_props = [] for tag in props: element = ET.Element(tag) if tag == xmlutils.make_clark("D:getetag"): element.text = item.etag found_props.append(element) elif tag == xmlutils.make_clark("D:getcontenttype"): element.text = xmlutils.get_content_type(item, encoding) found_props.append(element) elif tag in (xmlutils.make_clark("C:calendar-data"), xmlutils.make_clark("CR:address-data")): element.text = item.serialize() found_props.append(element) else: not_found_props.append(element) uri = pathutils.unstrip_path(posixpath.join(collection.path, item.href)) multistatus.append( xml_item_response(base_prefix, uri, found_props=found_props, not_found_props=not_found_props, found_item=True)) return client.MULTI_STATUS, multistatus
def _handle_request(self, environ): """Manage a request.""" def response(status, headers=(), answer=None): headers = dict(headers) # Set content length if answer: if hasattr(answer, "encode"): logger.debug("Response content:\n%s", answer) headers["Content-Type"] += "; charset=%s" % self.encoding answer = answer.encode(self.encoding) accept_encoding = [ encoding.strip() for encoding in environ.get("HTTP_ACCEPT_ENCODING", "").split(",") if encoding.strip()] if "gzip" in accept_encoding: zcomp = zlib.compressobj(wbits=16 + zlib.MAX_WBITS) answer = zcomp.compress(answer) + zcomp.flush() headers["Content-Encoding"] = "gzip" headers["Content-Length"] = str(len(answer)) # Add extra headers set in configuration if self.configuration.has_section("headers"): for key in self.configuration.options("headers"): headers[key] = self.configuration.get("headers", key) # Start response time_end = datetime.datetime.now() status = "%d %s" % ( status, client.responses.get(status, "Unknown")) logger.info( "%s response status for %r%s in %.3f seconds: %s", environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo, (time_end - time_begin).total_seconds(), status) # Return response content return status, list(headers.items()), [answer] if answer else [] remote_host = "unknown" if environ.get("REMOTE_HOST"): remote_host = repr(environ["REMOTE_HOST"]) elif environ.get("REMOTE_ADDR"): remote_host = environ["REMOTE_ADDR"] if environ.get("HTTP_X_FORWARDED_FOR"): remote_host = "%r (forwarded by %s)" % ( environ["HTTP_X_FORWARDED_FOR"], remote_host) remote_useragent = "" if environ.get("HTTP_USER_AGENT"): remote_useragent = " using %r" % environ["HTTP_USER_AGENT"] depthinfo = "" if environ.get("HTTP_DEPTH"): depthinfo = " with depth %r" % environ["HTTP_DEPTH"] time_begin = datetime.datetime.now() logger.info( "%s request for %r%s received from %s%s", environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo, remote_host, remote_useragent) headers = pprint.pformat(self._headers_log(environ)) logger.debug("Request headers:\n%s", headers) # Let reverse proxies overwrite SCRIPT_NAME if "HTTP_X_SCRIPT_NAME" in environ: # script_name must be removed from PATH_INFO by the client. unsafe_base_prefix = environ["HTTP_X_SCRIPT_NAME"] logger.debug("Script name overwritten by client: %r", unsafe_base_prefix) else: # SCRIPT_NAME is already removed from PATH_INFO, according to the # WSGI specification. unsafe_base_prefix = environ.get("SCRIPT_NAME", "") # Sanitize base prefix base_prefix = pathutils.sanitize_path(unsafe_base_prefix).rstrip("/") logger.debug("Sanitized script name: %r", base_prefix) # Sanitize request URI (a WSGI server indicates with an empty path, # that the URL targets the application root without a trailing slash) path = pathutils.sanitize_path(environ.get("PATH_INFO", "")) logger.debug("Sanitized path: %r", path) # Get function corresponding to method function = getattr(self, "do_%s" % environ["REQUEST_METHOD"].upper()) # If "/.well-known" is not available, clients query "/" if path == "/.well-known" or path.startswith("/.well-known/"): return response(*httputils.NOT_FOUND) # Ask authentication backend to check rights login = password = "" external_login = self.Auth.get_external_login(environ) authorization = environ.get("HTTP_AUTHORIZATION", "") if external_login: login, password = external_login login, password = login or "", password or "" elif authorization.startswith("Basic"): authorization = authorization[len("Basic"):].strip() login, password = self.decode(base64.b64decode( authorization.encode("ascii")), environ).split(":", 1) user = self.Auth.login(login, password) or "" if login else "" if user and login == user: logger.info("Successful login: %r", user) elif user: logger.info("Successful login: %r -> %r", login, user) elif login: logger.info("Failed login attempt: %r", login) # Random delay to avoid timing oracles and bruteforce attacks delay = self.configuration.getfloat("auth", "delay") if delay > 0: random_delay = delay * (0.5 + random.random()) logger.debug("Sleeping %.3f seconds", random_delay) time.sleep(random_delay) if user and not pathutils.is_safe_path_component(user): # Prevent usernames like "user/calendar.ics" logger.info("Refused unsafe username: %r", user) user = "" # Create principal collection if user: principal_path = "/%s/" % user if self.Rights.authorized(user, principal_path, "W"): with self.Collection.acquire_lock("r", user): principal = next( self.Collection.discover(principal_path, depth="1"), None) if not principal: with self.Collection.acquire_lock("w", user): try: self.Collection.create_collection(principal_path) except ValueError as e: logger.warning("Failed to create principal " "collection %r: %s", user, e) user = "" else: logger.warning("Access to principal path %r denied by " "rights backend", principal_path) if self.configuration.getboolean("internal", "internal_server"): # Verify content length content_length = int(environ.get("CONTENT_LENGTH") or 0) if content_length: max_content_length = self.configuration.getint( "server", "max_content_length") if max_content_length and content_length > max_content_length: logger.info("Request body too large: %d", content_length) return response(*httputils.REQUEST_ENTITY_TOO_LARGE) if not login or user: status, headers, answer = function( environ, base_prefix, path, user) if (status, headers, answer) == httputils.NOT_ALLOWED: logger.info("Access to %r denied for %s", path, repr(user) if user else "anonymous user") else: status, headers, answer = httputils.NOT_ALLOWED if ((status, headers, answer) == httputils.NOT_ALLOWED and not user and not external_login): # Unknown or unauthorized user logger.debug("Asking client for authentication") status = client.UNAUTHORIZED realm = self.configuration.get("auth", "realm") headers = dict(headers) headers.update({ "WWW-Authenticate": "Basic realm=\"%s\"" % realm}) return response(status, headers, answer)
def check_and_sanitize_items(vobject_items, is_collection=False, tag=None): """Check vobject items for common errors and add missing UIDs. ``is_collection`` indicates that vobject_item contains unrelated components. The ``tag`` of the collection. """ if tag and tag not in ("VCALENDAR", "VADDRESSBOOK"): raise ValueError("Unsupported collection tag: %r" % tag) if not is_collection and len(vobject_items) != 1: raise ValueError("Item contains %d components" % len(vobject_items)) if tag == "VCALENDAR": if len(vobject_items) > 1: raise RuntimeError("VCALENDAR collection contains %d " "components" % len(vobject_items)) vobject_item = vobject_items[0] if vobject_item.name != "VCALENDAR": raise ValueError("Item type %r not supported in %r " "collection" % (vobject_item.name, tag)) component_uids = set() for component in vobject_item.components(): if component.name in ("VTODO", "VEVENT", "VJOURNAL"): component_uid = get_uid(component) if component_uid: component_uids.add(component_uid) component_name = None object_uid = None object_uid_set = False for component in vobject_item.components(): # https://tools.ietf.org/html/rfc4791#section-4.1 if component.name == "VTIMEZONE": continue if component_name is None or is_collection: component_name = component.name elif component_name != component.name: raise ValueError("Multiple component types in object: %r, %r" % (component_name, component.name)) if component_name not in ("VTODO", "VEVENT", "VJOURNAL"): continue component_uid = get_uid(component) if not object_uid_set or is_collection: object_uid_set = True object_uid = component_uid if not component_uid: if not is_collection: raise ValueError("%s component without UID in object" % component_name) component_uid = find_available_uid( component_uids.__contains__) component_uids.add(component_uid) if hasattr(component, "uid"): component.uid.value = component_uid else: component.add("UID").value = component_uid elif not object_uid or not component_uid: raise ValueError("Multiple %s components without UID in " "object" % component_name) elif object_uid != component_uid: raise ValueError( "Multiple %s components with different UIDs in object: " "%r, %r" % (component_name, object_uid, component_uid)) # Workaround for bug in Lightning (Thunderbird) # Rescheduling a single occurrence from a repeating event creates # an event with DTEND and DURATION:PT0S if (hasattr(component, "dtend") and hasattr(component, "duration") and component.duration.value == timedelta(0)): logger.debug( "Quirks: Removing zero duration from %s in " "object %r", component_name, component_uid) del component.duration # vobject interprets recurrence rules on demand try: component.rruleset except Exception as e: raise ValueError("invalid recurrence rules in %s" % component.name) from e elif tag == "VADDRESSBOOK": # https://tools.ietf.org/html/rfc6352#section-5.1 object_uids = set() for vobject_item in vobject_items: if vobject_item.name == "VCARD": object_uid = get_uid(vobject_item) if object_uid: object_uids.add(object_uid) for vobject_item in vobject_items: if vobject_item.name == "VLIST": # Custom format used by SOGo Connector to store lists of # contacts continue if vobject_item.name != "VCARD": raise ValueError("Item type %r not supported in %r " "collection" % (vobject_item.name, tag)) object_uid = get_uid(vobject_item) if not object_uid: if not is_collection: raise ValueError("%s object without UID" % vobject_item.name) object_uid = find_available_uid(object_uids.__contains__) object_uids.add(object_uid) if hasattr(vobject_item, "uid"): vobject_item.uid.value = object_uid else: vobject_item.add("UID").value = object_uid else: for i in vobject_items: raise ValueError("Item type %r not supported in %s collection" % (i.name, repr(tag) if tag else "generic"))
def xml_report(base_prefix, path, xml_request, collection, unlock_storage_fn): """Read and answer REPORT requests. Read rfc3253-3.6 for info. """ multistatus = ET.Element(xmlutils.make_tag("D", "multistatus")) if xml_request is None: return client.MULTI_STATUS, multistatus root = xml_request if root.tag in ( xmlutils.make_tag("D", "principal-search-property-set"), xmlutils.make_tag("D", "principal-property-search"), xmlutils.make_tag("D", "expand-property")): # We don't support searching for principals or indirect retrieving of # properties, just return an empty result. # InfCloud asks for expand-property reports (even if we don't announce # support for them) and stops working if an error code is returned. logger.warning("Unsupported REPORT method %r on %r requested", xmlutils.tag_from_clark(root.tag), path) return client.MULTI_STATUS, multistatus if (root.tag == xmlutils.make_tag("C", "calendar-multiget") and collection.get_meta("tag") != "VCALENDAR" or root.tag == xmlutils.make_tag("CR", "addressbook-multiget") and collection.get_meta("tag") != "VADDRESSBOOK" or root.tag == xmlutils.make_tag("D", "sync-collection") and collection.get_meta("tag") not in ("VADDRESSBOOK", "VCALENDAR")): logger.warning("Invalid REPORT method %r on %r requested", xmlutils.tag_from_clark(root.tag), path) return (client.CONFLICT, xmlutils.webdav_error("D", "supported-report")) prop_element = root.find(xmlutils.make_tag("D", "prop")) props = ( [prop.tag for prop in prop_element] if prop_element is not None else []) if root.tag in ( xmlutils.make_tag("C", "calendar-multiget"), xmlutils.make_tag("CR", "addressbook-multiget")): # Read rfc4791-7.9 for info hreferences = set() for href_element in root.findall(xmlutils.make_tag("D", "href")): href_path = pathutils.sanitize_path( unquote(urlparse(href_element.text).path)) if (href_path + "/").startswith(base_prefix + "/"): hreferences.add(href_path[len(base_prefix):]) else: logger.warning("Skipping invalid path %r in REPORT request on " "%r", href_path, path) elif root.tag == xmlutils.make_tag("D", "sync-collection"): old_sync_token_element = root.find( xmlutils.make_tag("D", "sync-token")) old_sync_token = "" if old_sync_token_element is not None and old_sync_token_element.text: old_sync_token = old_sync_token_element.text.strip() logger.debug("Client provided sync token: %r", old_sync_token) try: sync_token, names = collection.sync(old_sync_token) except ValueError as e: # Invalid sync token logger.warning("Client provided invalid sync token %r: %s", old_sync_token, e, exc_info=True) return (client.CONFLICT, xmlutils.webdav_error("D", "valid-sync-token")) hreferences = (pathutils.unstrip_path( posixpath.join(collection.path, n)) for n in names) # Append current sync token to response sync_token_element = ET.Element(xmlutils.make_tag("D", "sync-token")) sync_token_element.text = sync_token multistatus.append(sync_token_element) else: hreferences = (path,) filters = ( root.findall("./%s" % xmlutils.make_tag("C", "filter")) + root.findall("./%s" % xmlutils.make_tag("CR", "filter"))) def retrieve_items(collection, hreferences, multistatus): """Retrieves all items that are referenced in ``hreferences`` from ``collection`` and adds 404 responses for missing and invalid items to ``multistatus``.""" collection_requested = False def get_names(): """Extracts all names from references in ``hreferences`` and adds 404 responses for invalid references to ``multistatus``. If the whole collections is referenced ``collection_requested`` gets set to ``True``.""" nonlocal collection_requested for hreference in hreferences: try: name = pathutils.name_from_path(hreference, collection) except ValueError as e: logger.warning("Skipping invalid path %r in REPORT request" " on %r: %s", hreference, path, e) response = xml_item_response(base_prefix, hreference, found_item=False) multistatus.append(response) continue if name: # Reference is an item yield name else: # Reference is a collection collection_requested = True for name, item in collection.get_multi(get_names()): if not item: uri = pathutils.unstrip_path( posixpath.join(collection.path, name)) response = xml_item_response(base_prefix, uri, found_item=False) multistatus.append(response) else: yield item, False if collection_requested: yield from collection.get_filtered(filters) # Retrieve everything required for finishing the request. retrieved_items = list(retrieve_items(collection, hreferences, multistatus)) collection_tag = collection.get_meta("tag") # Don't access storage after this! unlock_storage_fn() def match(item, filter_): tag = collection_tag if (tag == "VCALENDAR" and filter_.tag != xmlutils.make_tag("C", filter_)): if len(filter_) == 0: return True if len(filter_) > 1: raise ValueError("Filter with %d children" % len(filter_)) if filter_[0].tag != xmlutils.make_tag("C", "comp-filter"): raise ValueError("Unexpected %r in filter" % filter_[0].tag) return radicale_filter.comp_match(item, filter_[0]) if (tag == "VADDRESSBOOK" and filter_.tag != xmlutils.make_tag("CR", filter_)): for child in filter_: if child.tag != xmlutils.make_tag("CR", "prop-filter"): raise ValueError("Unexpected %r in filter" % child.tag) test = filter_.get("test", "anyof") if test == "anyof": return any( radicale_filter.prop_match(item.vobject_item, f, "CR") for f in filter_) if test == "allof": return all( radicale_filter.prop_match(item.vobject_item, f, "CR") for f in filter_) raise ValueError("Unsupported filter test: %r" % test) return all(radicale_filter.prop_match(item.vobject_item, f, "CR") for f in filter_) raise ValueError("unsupported filter %r for %r" % (filter_.tag, tag)) while retrieved_items: # ``item.vobject_item`` might be accessed during filtering. # Don't keep reference to ``item``, because VObject requires a lot of # memory. item, filters_matched = retrieved_items.pop(0) if filters and not filters_matched: try: if not all(match(item, filter_) for filter_ in filters): continue except ValueError as e: raise ValueError("Failed to filter item %r from %r: %s" % (item.href, collection.path, e)) from e except Exception as e: raise RuntimeError("Failed to filter item %r from %r: %s" % (item.href, collection.path, e)) from e found_props = [] not_found_props = [] for tag in props: element = ET.Element(tag) if tag == xmlutils.make_tag("D", "getetag"): element.text = item.etag found_props.append(element) elif tag == xmlutils.make_tag("D", "getcontenttype"): element.text = xmlutils.get_content_type(item) found_props.append(element) elif tag in ( xmlutils.make_tag("C", "calendar-data"), xmlutils.make_tag("CR", "address-data")): element.text = item.serialize() found_props.append(element) else: not_found_props.append(element) uri = pathutils.unstrip_path( posixpath.join(collection.path, item.href)) multistatus.append(xml_item_response( base_prefix, uri, found_props=found_props, not_found_props=not_found_props, found_item=True)) return client.MULTI_STATUS, multistatus
def discover(cls, path, depth="0", child_context_manager=( lambda path, href=None: contextlib.ExitStack())): # Path should already be sanitized sane_path = pathutils.strip_path(path) attributes = sane_path.split("/") if sane_path else [] folder = cls._get_collection_root_folder() # Create the root collection cls._makedirs_synced(folder) try: filesystem_path = pathutils.path_to_filesystem(folder, sane_path) except ValueError as e: # Path is unsafe logger.debug("Unsafe path %r requested from storage: %s", sane_path, e, exc_info=True) return # Check if the path exists and if it leads to a collection or an item if not os.path.isdir(filesystem_path): if attributes and os.path.isfile(filesystem_path): href = attributes.pop() else: return else: href = None sane_path = "/".join(attributes) collection = cls(pathutils.unstrip_path(sane_path, True)) if href: yield collection.get(href) return yield collection if depth == "0": return for href in collection.list(): with child_context_manager(pathutils.unstrip_path(sane_path, True), href): yield collection.get(href) for entry in os.scandir(filesystem_path): if not entry.is_dir(): continue href = entry.name if not pathutils.is_safe_filesystem_path_component(href): if not href.startswith(".Radicale"): logger.debug("Skipping collection %r in %r", href, sane_path) continue child_path = pathutils.unstrip_path( posixpath.join(sane_path, href), True) with child_context_manager(child_path): yield cls(child_path)
def _handle_request(self, environ): """Manage a request.""" def response(status, headers=(), answer=None): headers = dict(headers) # Set content length if answer: if hasattr(answer, "encode"): logger.debug("Response content:\n%s", answer) headers["Content-Type"] += "; charset=%s" % self._encoding answer = answer.encode(self._encoding) accept_encoding = [ encoding.strip() for encoding in environ.get( "HTTP_ACCEPT_ENCODING", "").split(",") if encoding.strip() ] if "gzip" in accept_encoding: zcomp = zlib.compressobj(wbits=16 + zlib.MAX_WBITS) answer = zcomp.compress(answer) + zcomp.flush() headers["Content-Encoding"] = "gzip" headers["Content-Length"] = str(len(answer)) # Add extra headers set in configuration for key in self.configuration.options("headers"): headers[key] = self.configuration.get("headers", key) # Start response time_end = datetime.datetime.now() status = "%d %s" % (status, client.responses.get( status, "Unknown")) logger.info("%s response status for %r%s in %.3f seconds: %s", environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo, (time_end - time_begin).total_seconds(), status) # Return response content return status, list(headers.items()), [answer] if answer else [] remote_host = "unknown" if environ.get("REMOTE_HOST"): remote_host = repr(environ["REMOTE_HOST"]) elif environ.get("REMOTE_ADDR"): remote_host = environ["REMOTE_ADDR"] if environ.get("HTTP_X_FORWARDED_FOR"): remote_host = "%s (forwarded for %r)" % ( remote_host, environ["HTTP_X_FORWARDED_FOR"]) remote_useragent = "" if environ.get("HTTP_USER_AGENT"): remote_useragent = " using %r" % environ["HTTP_USER_AGENT"] depthinfo = "" if environ.get("HTTP_DEPTH"): depthinfo = " with depth %r" % environ["HTTP_DEPTH"] time_begin = datetime.datetime.now() logger.info("%s request for %r%s received from %s%s", environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo, remote_host, remote_useragent) headers = pprint.pformat(self._headers_log(environ)) logger.debug("Request headers:\n%s", headers) # Let reverse proxies overwrite SCRIPT_NAME if "HTTP_X_SCRIPT_NAME" in environ: # script_name must be removed from PATH_INFO by the client. unsafe_base_prefix = environ["HTTP_X_SCRIPT_NAME"] logger.debug("Script name overwritten by client: %r", unsafe_base_prefix) else: # SCRIPT_NAME is already removed from PATH_INFO, according to the # WSGI specification. unsafe_base_prefix = environ.get("SCRIPT_NAME", "") # Sanitize base prefix base_prefix = pathutils.sanitize_path(unsafe_base_prefix).rstrip("/") logger.debug("Sanitized script name: %r", base_prefix) # Sanitize request URI (a WSGI server indicates with an empty path, # that the URL targets the application root without a trailing slash) path = pathutils.sanitize_path(environ.get("PATH_INFO", "")) logger.debug("Sanitized path: %r", path) # Get function corresponding to method function = getattr(self, "do_%s" % environ["REQUEST_METHOD"].upper(), None) if not function: return response(*httputils.METHOD_NOT_ALLOWED) # If "/.well-known" is not available, clients query "/" if path == "/.well-known" or path.startswith("/.well-known/"): return response(*httputils.NOT_FOUND) # Ask authentication backend to check rights login = password = "" external_login = self._auth.get_external_login(environ) authorization = environ.get("HTTP_AUTHORIZATION", "") if external_login: login, password = external_login login, password = login or "", password or "" elif authorization.startswith("Basic"): authorization = authorization[len("Basic"):].strip() login, password = httputils.decode_request( self.configuration, environ, base64.b64decode(authorization.encode("ascii"))).split(":", 1) user = self._auth.login(login, password) or "" if login else "" if user and login == user: logger.info("Successful login: %r", user) elif user: logger.info("Successful login: %r -> %r", login, user) elif login: logger.warning("Failed login attempt from %s: %r", remote_host, login) # Random delay to avoid timing oracles and bruteforce attacks delay = self.configuration.get("auth", "delay") if delay > 0: random_delay = delay * (0.5 + random.random()) logger.debug("Sleeping %.3f seconds", random_delay) time.sleep(random_delay) if user and not pathutils.is_safe_path_component(user): # Prevent usernames like "user/calendar.ics" logger.info("Refused unsafe username: %r", user) user = "" # Create principal collection if user: principal_path = "/%s/" % user with self._storage.acquire_lock("r", user): principal = next( self._storage.discover(principal_path, depth="1"), None) if not principal: if "W" in self._rights.authorization(user, principal_path): with self._storage.acquire_lock("w", user): try: self._storage.create_collection(principal_path) except ValueError as e: logger.warning( "Failed to create principal " "collection %r: %s", user, e) user = "" else: logger.warning( "Access to principal path %r denied by " "rights backend", principal_path) if self.configuration.get("server", "_internal_server"): # Verify content length content_length = int(environ.get("CONTENT_LENGTH") or 0) if content_length: max_content_length = self.configuration.get( "server", "max_content_length") if max_content_length and content_length > max_content_length: logger.info("Request body too large: %d", content_length) return response(*httputils.REQUEST_ENTITY_TOO_LARGE) if not login or user: status, headers, answer = function(environ, base_prefix, path, user) if (status, headers, answer) == httputils.NOT_ALLOWED: logger.info("Access to %r denied for %s", path, repr(user) if user else "anonymous user") else: status, headers, answer = httputils.NOT_ALLOWED if ((status, headers, answer) == httputils.NOT_ALLOWED and not user and not external_login): # Unknown or unauthorized user logger.debug("Asking client for authentication") status = client.UNAUTHORIZED realm = self.configuration.get("auth", "realm") headers = dict(headers) headers.update({"WWW-Authenticate": "Basic realm=\"%s\"" % realm}) return response(status, headers, answer)
def do_PUT(self, environ, base_prefix, path, user): """Manage PUT request.""" if not self.access(user, path, "w"): return httputils.NOT_ALLOWED try: content = self.read_content(environ) except RuntimeError as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) permissions = self.Rights.authorized(user, path, "Ww") parent_permissions = self.Rights.authorized(user, parent_path, "w") def prepare(vobject_items, tag=None, write_whole_collection=None): if (write_whole_collection or permissions and not parent_permissions): write_whole_collection = True content_type = environ.get("CONTENT_TYPE", "").split(";")[0] tags = {value: key for key, value in xmlutils.MIMETYPES.items()} tag = radicale_item.predict_tag_of_whole_collection( vobject_items, tags.get(content_type)) if not tag: raise ValueError("Can't determine collection tag") collection_path = pathutils.strip_path(path) elif (write_whole_collection is not None and not write_whole_collection or not permissions and parent_permissions): write_whole_collection = False if tag is None: tag = radicale_item.predict_tag_of_parent_collection( vobject_items) collection_path = posixpath.dirname( pathutils.strip_path(path)) props = None stored_exc_info = None items = [] try: if tag: radicale_item.check_and_sanitize_items( vobject_items, is_collection=write_whole_collection, tag=tag) if write_whole_collection and tag == "VCALENDAR": vobject_components = [] vobject_item, = vobject_items for content in ("vevent", "vtodo", "vjournal"): vobject_components.extend( getattr(vobject_item, "%s_list" % content, [])) vobject_components_by_uid = itertools.groupby( sorted(vobject_components, key=radicale_item.get_uid), radicale_item.get_uid) for uid, components in vobject_components_by_uid: vobject_collection = vobject.iCalendar() for component in components: vobject_collection.add(component) item = radicale_item.Item( collection_path=collection_path, vobject_item=vobject_collection) item.prepare() items.append(item) elif write_whole_collection and tag == "VADDRESSBOOK": for vobject_item in vobject_items: item = radicale_item.Item( collection_path=collection_path, vobject_item=vobject_item) item.prepare() items.append(item) elif not write_whole_collection: vobject_item, = vobject_items item = radicale_item.Item( collection_path=collection_path, vobject_item=vobject_item) item.prepare() items.append(item) if write_whole_collection: props = {} if tag: props["tag"] = tag if tag == "VCALENDAR" and vobject_items: if hasattr(vobject_items[0], "x_wr_calname"): calname = vobject_items[0].x_wr_calname.value if calname: props["D:displayname"] = calname if hasattr(vobject_items[0], "x_wr_caldesc"): caldesc = vobject_items[0].x_wr_caldesc.value if caldesc: props["C:calendar-description"] = caldesc radicale_item.check_and_sanitize_props(props) except Exception: stored_exc_info = sys.exc_info() # Use generator for items and delete references to free memory # early def items_generator(): while items: yield items.pop(0) return (items_generator(), tag, write_whole_collection, props, stored_exc_info) try: vobject_items = tuple(vobject.readComponents(content or "")) except Exception as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare(vobject_items) with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) parent_item = next(self.Collection.discover(parent_path), None) if not parent_item: return httputils.CONFLICT write_whole_collection = ( isinstance(item, storage.BaseCollection) or not parent_item.get_meta("tag")) if write_whole_collection: tag = prepared_tag else: tag = parent_item.get_meta("tag") if write_whole_collection: if not self.Rights.authorized(user, path, "w" if tag else "W"): return httputils.NOT_ALLOWED elif not self.Rights.authorized(user, parent_path, "w"): return httputils.NOT_ALLOWED etag = environ.get("HTTP_IF_MATCH", "") if not item and etag: # Etag asked but no item found: item has been removed return httputils.PRECONDITION_FAILED if item and etag and item.etag != etag: # Etag asked but item not matching: item has changed return httputils.PRECONDITION_FAILED match = environ.get("HTTP_IF_NONE_MATCH", "") == "*" if item and match: # Creation asked but item found: item can't be replaced return httputils.PRECONDITION_FAILED if (tag != prepared_tag or prepared_write_whole_collection != write_whole_collection): (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare( vobject_items, tag, write_whole_collection) props = prepared_props if prepared_exc_info: logger.warning( "Bad PUT request on %r: %s", path, prepared_exc_info[1], exc_info=prepared_exc_info) return httputils.BAD_REQUEST if write_whole_collection: try: etag = self.Collection.create_collection( path, prepared_items, props).etag except ValueError as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST else: prepared_item, = prepared_items if (item and item.uid != prepared_item.uid or not item and parent_item.has_uid(prepared_item.uid)): return self.webdav_error_response( "C" if tag == "VCALENDAR" else "CR", "no-uid-conflict") href = posixpath.basename(pathutils.strip_path(path)) try: etag = parent_item.upload(href, prepared_item).etag except ValueError as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST headers = {"ETag": etag} return client.CREATED, headers, None
def read_content(self, environ): content = self.decode(self.read_raw_content(environ), environ) logger.debug("Request content:\n%s", content) return content
def _get(self, href, verify_href=True): if verify_href: try: if not pathutils.is_safe_filesystem_path_component(href): raise pathutils.UnsafePathError(href) path = pathutils.path_to_filesystem( self._filesystem_path, href) except ValueError as e: logger.debug( "Can't translate name %r safely to filesystem in %r: %s", href, self.path, e, exc_info=True) return None else: path = os.path.join(self._filesystem_path, href) try: with open(path, "rb") as f: raw_text = f.read() except (FileNotFoundError, IsADirectoryError): return None except PermissionError: # Windows raises ``PermissionError`` when ``path`` is a directory if (os.name == "nt" and os.path.isdir(path) and os.access(path, os.R_OK)): return None raise # The hash of the component in the file system. This is used to check, # if the entry in the cache is still valid. input_hash = self._item_cache_hash(raw_text) cache_hash, uid, etag, text, name, tag, start, end = \ self._load_item_cache(href, input_hash) if input_hash != cache_hash: with self._acquire_cache_lock("item"): # Lock the item cache to prevent multpile processes from # generating the same data in parallel. # This improves the performance for multiple requests. if self._lock.locked == "r": # Check if another process created the file in the meantime cache_hash, uid, etag, text, name, tag, start, end = \ self._load_item_cache(href, input_hash) if input_hash != cache_hash: try: vobject_items = tuple(vobject.readComponents( raw_text.decode(self._encoding))) radicale_item.check_and_sanitize_items( vobject_items, tag=self.get_meta("tag")) vobject_item, = vobject_items temp_item = radicale_item.Item( collection=self, vobject_item=vobject_item) cache_hash, uid, etag, text, name, tag, start, end = \ self._store_item_cache( href, temp_item, input_hash) except Exception as e: raise RuntimeError("Failed to load item %r in %r: %s" % (href, self.path, e)) from e # Clean cache entries once after the data in the file # system was edited externally. if not self._item_cache_cleaned: self._item_cache_cleaned = True self._clean_item_cache() last_modified = time.strftime( "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(os.path.getmtime(path))) # Don't keep reference to ``vobject_item``, because it requires a lot # of memory. return radicale_item.Item( collection=self, href=href, last_modified=last_modified, etag=etag, text=text, uid=uid, name=name, component_name=tag, time_range=(start, end))
def do_PUT(self, environ, base_prefix, path, user): """Manage PUT request.""" if not self._access(user, path, "w"): return httputils.NOT_ALLOWED try: content = self._read_content(environ) except RuntimeError as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking content_type = environ.get("CONTENT_TYPE", "").split(";")[0] parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) permissions = rights.intersect( self._rights.authorization(user, path), "Ww") parent_permissions = rights.intersect( self._rights.authorization(user, parent_path), "w") try: vobject_items = tuple(vobject.readComponents(content or "")) except Exception as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare( vobject_items, path, content_type, permissions, parent_permissions) with self._storage.acquire_lock("w", user): item = next(self._storage.discover(path), None) parent_item = next(self._storage.discover(parent_path), None) if not parent_item: return httputils.CONFLICT write_whole_collection = ( isinstance(item, storage.BaseCollection) or not parent_item.get_meta("tag")) if write_whole_collection: tag = prepared_tag else: tag = parent_item.get_meta("tag") if write_whole_collection: if ("w" if tag else "W") not in self._rights.authorization( user, path): return httputils.NOT_ALLOWED elif "w" not in self._rights.authorization(user, parent_path): return httputils.NOT_ALLOWED etag = environ.get("HTTP_IF_MATCH", "") if not item and etag: # Etag asked but no item found: item has been removed return httputils.PRECONDITION_FAILED if item and etag and item.etag != etag: # Etag asked but item not matching: item has changed return httputils.PRECONDITION_FAILED match = environ.get("HTTP_IF_NONE_MATCH", "") == "*" if item and match: # Creation asked but item found: item can't be replaced return httputils.PRECONDITION_FAILED if (tag != prepared_tag or prepared_write_whole_collection != write_whole_collection): (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare( vobject_items, path, content_type, permissions, parent_permissions, tag, write_whole_collection) props = prepared_props if prepared_exc_info: logger.warning( "Bad PUT request on %r: %s", path, prepared_exc_info[1], exc_info=prepared_exc_info) return httputils.BAD_REQUEST if write_whole_collection: try: etag = self._storage.create_collection( path, prepared_items, props).etag except ValueError as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST else: prepared_item, = prepared_items if (item and item.uid != prepared_item.uid or not item and parent_item.has_uid(prepared_item.uid)): return self._webdav_error_response("%s:no-uid-conflict" % ( "C" if tag == "VCALENDAR" else "CR")) href = posixpath.basename(pathutils.strip_path(path)) try: etag = parent_item.upload(href, prepared_item).etag except ValueError as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST headers = {"ETag": etag} return client.CREATED, headers, None
def _get(self, href, verify_href=True): if verify_href: try: if not pathutils.is_safe_filesystem_path_component(href): raise pathutils.UnsafePathError(href) path = pathutils.path_to_filesystem(self._filesystem_path, href) except ValueError as e: logger.debug( "Can't translate name %r safely to filesystem in %r: %s", href, self.path, e, exc_info=True) return None else: path = os.path.join(self._filesystem_path, href) try: with open(path, "rb") as f: raw_text = f.read() except (FileNotFoundError, IsADirectoryError): return None except PermissionError: # Windows raises ``PermissionError`` when ``path`` is a directory if (os.name == "nt" and os.path.isdir(path) and os.access(path, os.R_OK)): return None raise # The hash of the component in the file system. This is used to check, # if the entry in the cache is still valid. input_hash = self._item_cache_hash(raw_text) cache_hash, uid, etag, text, name, tag, start, end = \ self._load_item_cache(href, input_hash) if input_hash != cache_hash: with self._acquire_cache_lock("item"): # Lock the item cache to prevent multpile processes from # generating the same data in parallel. # This improves the performance for multiple requests. if self._lock.locked == "r": # Check if another process created the file in the meantime cache_hash, uid, etag, text, name, tag, start, end = \ self._load_item_cache(href, input_hash) if input_hash != cache_hash: try: vobject_items = tuple( vobject.readComponents( raw_text.decode(self._encoding))) radicale_item.check_and_sanitize_items( vobject_items, tag=self.get_meta("tag")) vobject_item, = vobject_items temp_item = radicale_item.Item( collection=self, vobject_item=vobject_item) cache_hash, uid, etag, text, name, tag, start, end = \ self._store_item_cache( href, temp_item, input_hash) except Exception as e: raise RuntimeError("Failed to load item %r in %r: %s" % (href, self.path, e)) from e # Clean cache entries once after the data in the file # system was edited externally. if not self._item_cache_cleaned: self._item_cache_cleaned = True self._clean_item_cache() last_modified = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(os.path.getmtime(path))) # Don't keep reference to ``vobject_item``, because it requires a lot # of memory. return radicale_item.Item(collection=self, href=href, last_modified=last_modified, etag=etag, text=text, uid=uid, name=name, component_name=tag, time_range=(start, end))
def login(self, user, password): """Check if ``user``/``password`` couple is valid.""" SERVER = ldap3.Server(self.configuration.get("auth", "ldap_url")) BASE = self.configuration.get("auth", "ldap_base") ATTRIBUTE = self.configuration.get("auth", "ldap_attribute") FILTER = self.configuration.get("auth", "ldap_filter") BINDDN = self.configuration.get("auth", "ldap_binddn") PASSWORD = self.configuration.get("auth", "ldap_password") SCOPE = self.configuration.get("auth", "ldap_scope") SUPPORT_EXTENDED = self.configuration.get("auth", "ldap_support_extended") if BINDDN and PASSWORD: conn = ldap3.Connection(SERVER, BINDDN, PASSWORD) else: conn = ldap3.Connection(SERVER) conn.bind() try: logger.debug("LDAP whoami: %s" % conn.extend.standard.who_am_i()) except Exception as err: logger.debug("LDAP error: %s" % err) distinguished_name = "%s=%s" % (ATTRIBUTE, ldap3imports.escape_attribute_value(user)) logger.debug("LDAP bind for %s in base %s" % (distinguished_name, BASE)) if FILTER: filter_string = "(&(%s)%s)" % (distinguished_name, FILTER) else: filter_string = distinguished_name logger.debug("LDAP filter: %s" % filter_string) conn.search(search_base=BASE, search_scope=SCOPE, search_filter=filter_string, attributes=[ATTRIBUTE]) users = conn.response if users: user_dn = users[0]['dn'] uid = users[0]['attributes'][ATTRIBUTE] logger.debug("LDAP user %s (%s) found" % (uid, user_dn)) try: conn = ldap3.Connection(SERVER, user_dn, password) conn.bind() logger.debug(conn.result) if SUPPORT_EXTENDED: whoami = conn.extend.standard.who_am_i() logger.debug("LDAP whoami: %s" % whoami) else: logger.debug("LDAP skip extended: call whoami") whoami = conn.result['result'] == 0 if whoami: logger.debug("LDAP bind OK") return uid[0] else: logger.debug("LDAP bind failed") return "" except ldap3.core.exceptions.LDAPInvalidCredentialsResult: logger.debug("LDAP invalid credentials") except Exception as err: logger.debug("LDAP error %s" % err) return "" else: logger.debug("LDAP user %s not found" % user) return ""
def do_PUT(self, environ, base_prefix, path, user): """Manage PUT request.""" if not self.access(user, path, "w"): return httputils.NOT_ALLOWED try: content = self.read_content(environ) except RuntimeError as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) permissions = self.Rights.authorized(user, path, "Ww") parent_permissions = self.Rights.authorized(user, parent_path, "w") def prepare(vobject_items, tag=None, write_whole_collection=None): if (write_whole_collection or permissions and not parent_permissions): write_whole_collection = True content_type = environ.get("CONTENT_TYPE", "").split(";")[0] tags = { value: key for key, value in xmlutils.MIMETYPES.items() } tag = radicale_item.predict_tag_of_whole_collection( vobject_items, tags.get(content_type)) if not tag: raise ValueError("Can't determine collection tag") collection_path = pathutils.strip_path(path) elif (write_whole_collection is not None and not write_whole_collection or not permissions and parent_permissions): write_whole_collection = False if tag is None: tag = radicale_item.predict_tag_of_parent_collection( vobject_items) collection_path = posixpath.dirname(pathutils.strip_path(path)) props = None stored_exc_info = None items = [] try: if tag: radicale_item.check_and_sanitize_items( vobject_items, is_collection=write_whole_collection, tag=tag) if write_whole_collection and tag == "VCALENDAR": vobject_components = [] vobject_item, = vobject_items for content in ("vevent", "vtodo", "vjournal"): vobject_components.extend( getattr(vobject_item, "%s_list" % content, [])) vobject_components_by_uid = itertools.groupby( sorted(vobject_components, key=radicale_item.get_uid), radicale_item.get_uid) for uid, components in vobject_components_by_uid: vobject_collection = vobject.iCalendar() for component in components: vobject_collection.add(component) item = radicale_item.Item( collection_path=collection_path, vobject_item=vobject_collection) item.prepare() items.append(item) elif write_whole_collection and tag == "VADDRESSBOOK": for vobject_item in vobject_items: item = radicale_item.Item( collection_path=collection_path, vobject_item=vobject_item) item.prepare() items.append(item) elif not write_whole_collection: vobject_item, = vobject_items item = radicale_item.Item( collection_path=collection_path, vobject_item=vobject_item) item.prepare() items.append(item) if write_whole_collection: props = {} if tag: props["tag"] = tag if tag == "VCALENDAR" and vobject_items: if hasattr(vobject_items[0], "x_wr_calname"): calname = vobject_items[0].x_wr_calname.value if calname: props["D:displayname"] = calname if hasattr(vobject_items[0], "x_wr_caldesc"): caldesc = vobject_items[0].x_wr_caldesc.value if caldesc: props["C:calendar-description"] = caldesc radicale_item.check_and_sanitize_props(props) except Exception: stored_exc_info = sys.exc_info() # Use generator for items and delete references to free memory # early def items_generator(): while items: yield items.pop(0) return (items_generator(), tag, write_whole_collection, props, stored_exc_info) try: vobject_items = tuple(vobject.readComponents(content or "")) except Exception as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare(vobject_items) with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) parent_item = next(self.Collection.discover(parent_path), None) if not parent_item: return httputils.CONFLICT write_whole_collection = (isinstance(item, storage.BaseCollection) or not parent_item.get_meta("tag")) if write_whole_collection: tag = prepared_tag else: tag = parent_item.get_meta("tag") if write_whole_collection: if not self.Rights.authorized(user, path, "w" if tag else "W"): return httputils.NOT_ALLOWED elif not self.Rights.authorized(user, parent_path, "w"): return httputils.NOT_ALLOWED etag = environ.get("HTTP_IF_MATCH", "") if not item and etag: # Etag asked but no item found: item has been removed return httputils.PRECONDITION_FAILED if item and etag and item.etag != etag: # Etag asked but item not matching: item has changed return httputils.PRECONDITION_FAILED match = environ.get("HTTP_IF_NONE_MATCH", "") == "*" if item and match: # Creation asked but item found: item can't be replaced return httputils.PRECONDITION_FAILED if (tag != prepared_tag or prepared_write_whole_collection != write_whole_collection): (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare(vobject_items, tag, write_whole_collection) props = prepared_props if prepared_exc_info: logger.warning("Bad PUT request on %r: %s", path, prepared_exc_info[1], exc_info=prepared_exc_info) return httputils.BAD_REQUEST if write_whole_collection: try: etag = self.Collection.create_collection( path, prepared_items, props).etag except ValueError as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST else: prepared_item, = prepared_items if (item and item.uid != prepared_item.uid or not item and parent_item.has_uid(prepared_item.uid)): return self.webdav_error_response( "C" if tag == "VCALENDAR" else "CR", "no-uid-conflict") href = posixpath.basename(pathutils.strip_path(path)) try: etag = parent_item.upload(href, prepared_item).etag except ValueError as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST headers = {"ETag": etag} return client.CREATED, headers, None
def read_request_body(configuration, environ): content = decode_request(configuration, environ, read_raw_request_body(configuration, environ)) logger.debug("Request content:\n%s", content) return content
def authorized(self, user, path, permissions): logger.debug( "User %r is trying to access path %r. Permissions: %r", user, path, permissions, ) # everybody can access the root collection if path == "/": logger.debug("Accessing root path. Access granted.") return True user = user or "" sane_path = strip_path(path) full_access = "rw" if ("/" in sane_path) else "RW" pathowner, _ = sane_path.split("/", maxsplit=1) # pathowner can be a user... if user == pathowner: logger.debug("User %r is pathowner. Read & Write Access granted.", user) return full_access # ...or a group maybe_groupname = self.group_prefix + pathowner try: group = grp.getgrnam(maybe_groupname) if user in group.gr_mem: logger.debug( "User %r is in pathowner group %r. Read & Write Access granted.", user, pathowner, ) return full_access except KeyError: logger.debug( "Pathowner %r is neither the user nor a valid group.", pathowner, ) logger.debug("Access to path %r is not granted to user %r.", pathowner, user) return ""