def do_PROPFIND(self, environ, base_prefix, path, user): """Manage PROPFIND request.""" if not self.access(user, path, "r"): return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning( "Bad PROPFIND request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with self.storage.acquire_lock("r", user): items = self.storage.discover(path, environ.get("HTTP_DEPTH", "0")) # take root item for rights checking item = next(items, None) if not item: return httputils.NOT_FOUND if not self.access(user, path, "r", item): return httputils.NOT_ALLOWED # put item back items = itertools.chain([item], items) allowed_items = self._collect_allowed_items(items, user) headers = {"DAV": httputils.DAV_HEADERS, "Content-Type": "text/xml; charset=%s" % self.encoding} status, xml_answer = xml_propfind( base_prefix, path, xml_content, allowed_items, user) if status == client.FORBIDDEN: return httputils.NOT_ALLOWED return status, headers, self.write_xml_content(xml_answer)
def _update_history_etag(self, href, item): """Updates and retrieves the history etag from the history cache. The history cache contains a file for each current and deleted item of the collection. These files contain the etag of the item (empty string for deleted items) and a history etag, which is a hash over the previous history etag and the etag separated by "/". """ history_folder = os.path.join(self._filesystem_path, ".Radicale.cache", "history") try: with open(os.path.join(history_folder, href), "rb") as f: cache_etag, history_etag = pickle.load(f) except (FileNotFoundError, pickle.UnpicklingError, ValueError) as e: if isinstance(e, (pickle.UnpicklingError, ValueError)): logger.warning( "Failed to load history cache entry %r in %r: %s", href, self.path, e, exc_info=True) cache_etag = "" # Initialize with random data to prevent collisions with cleaned # expired items. history_etag = binascii.hexlify(os.urandom(16)).decode("ascii") etag = item.etag if item else "" if etag != cache_etag: self._makedirs_synced(history_folder) history_etag = radicale_item.get_etag( history_etag + "/" + etag).strip("\"") try: # Race: Other processes might have created and locked the file. with self._atomic_write(os.path.join(history_folder, href), "wb") as f: pickle.dump([etag, history_etag], f) except PermissionError: pass return history_etag
def do_PROPFIND(self, environ, base_prefix, path, user): """Manage PROPFIND request.""" if not self.access(user, path, "r"): return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning( "Bad PROPFIND request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with self.Collection.acquire_lock("r", user): items = self.Collection.discover( path, environ.get("HTTP_DEPTH", "0")) # take root item for rights checking item = next(items, None) if not item: return httputils.NOT_FOUND if not self.access(user, path, "r", item): return httputils.NOT_ALLOWED # put item back items = itertools.chain([item], items) allowed_items = self._collect_allowed_items(items, user) headers = {"DAV": httputils.DAV_HEADERS, "Content-Type": "text/xml; charset=%s" % self.encoding} status, xml_answer = xml_propfind( base_prefix, path, xml_content, allowed_items, user) if status == client.FORBIDDEN: return httputils.NOT_ALLOWED return status, headers, self.write_xml_content(xml_answer)
def do_PROPPATCH(self, environ, base_prefix, path, user): """Manage PROPPATCH request.""" if not self.access(user, path, "w"): return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning( "Bad PROPPATCH request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) if not item: return httputils.NOT_FOUND if not self.access(user, path, "w", item): return httputils.NOT_ALLOWED if not isinstance(item, storage.BaseCollection): return httputils.FORBIDDEN headers = {"DAV": httputils.DAV_HEADERS, "Content-Type": "text/xml; charset=%s" % self.encoding} try: xml_answer = xml_proppatch(base_prefix, path, xml_content, item) except ValueError as e: logger.warning( "Bad PROPPATCH request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return (client.MULTI_STATUS, headers, self.write_xml_content(xml_answer))
def do_MOVE(self, environ, base_prefix, path, user): """Manage MOVE request.""" raw_dest = environ.get("HTTP_DESTINATION", "") to_url = urlparse(raw_dest) if to_url.netloc != environ["HTTP_HOST"]: logger.info("Unsupported destination address: %r", raw_dest) # Remote destination server, not supported return httputils.REMOTE_DESTINATION if not self.access(user, path, "w"): return httputils.NOT_ALLOWED to_path = pathutils.sanitize_path(to_url.path) if not (to_path + "/").startswith(base_prefix + "/"): logger.warning( "Destination %r from MOVE request on %r doesn't " "start with base prefix", to_path, path) return httputils.NOT_ALLOWED to_path = to_path[len(base_prefix):] if not self.access(user, to_path, "w"): return httputils.NOT_ALLOWED with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) if not item: return httputils.NOT_FOUND if (not self.access(user, path, "w", item) or not self.access(user, to_path, "w", item)): return httputils.NOT_ALLOWED if isinstance(item, storage.BaseCollection): # TODO: support moving collections return httputils.METHOD_NOT_ALLOWED to_item = next(self.Collection.discover(to_path), None) if isinstance(to_item, storage.BaseCollection): return httputils.FORBIDDEN to_parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(to_path)), True) to_collection = next(self.Collection.discover(to_parent_path), None) if not to_collection: return httputils.CONFLICT tag = item.collection.get_meta("tag") if not tag or tag != to_collection.get_meta("tag"): return httputils.FORBIDDEN if to_item and environ.get("HTTP_OVERWRITE", "F") != "T": return httputils.PRECONDITION_FAILED if (to_item and item.uid != to_item.uid or not to_item and to_collection.path != item.collection.path and to_collection.has_uid(item.uid)): return self.webdav_error_response( "C" if tag == "VCALENDAR" else "CR", "no-uid-conflict") to_href = posixpath.basename(pathutils.strip_path(to_path)) try: self.Collection.move(item, to_collection, to_href) except ValueError as e: logger.warning("Bad MOVE request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return client.NO_CONTENT if to_item else client.CREATED, {}, None
def do_PROPPATCH(self, environ, base_prefix, path, user): """Manage PROPPATCH request.""" access = app.Access(self._rights, user, path) if not access.check("w"): return httputils.NOT_ALLOWED try: xml_content = self._read_xml_request_body(environ) except RuntimeError as e: logger.warning( "Bad PROPPATCH request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("Client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with self._storage.acquire_lock("w", user): item = next(self._storage.discover(path), None) if not item: return httputils.NOT_FOUND if not access.check("w", item): return httputils.NOT_ALLOWED if not isinstance(item, storage.BaseCollection): return httputils.FORBIDDEN headers = {"DAV": httputils.DAV_HEADERS, "Content-Type": "text/xml; charset=%s" % self._encoding} try: xml_answer = xml_proppatch(base_prefix, path, xml_content, item) except ValueError as e: logger.warning( "Bad PROPPATCH request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return client.MULTI_STATUS, headers, self._xml_response(xml_answer)
def do_REPORT(self, environ, base_prefix, path, user): """Manage REPORT request.""" if not self.access(user, path, "r"): return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning( "Bad REPORT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with contextlib.ExitStack() as lock_stack: lock_stack.enter_context(self.Collection.acquire_lock("r", user)) item = next(self.Collection.discover(path), None) if not item: return httputils.NOT_FOUND if not self.access(user, path, "r", item): return httputils.NOT_ALLOWED if isinstance(item, storage.BaseCollection): collection = item else: collection = item.collection headers = {"Content-Type": "text/xml; charset=%s" % self.encoding} try: status, xml_answer = xml_report( base_prefix, path, xml_content, collection, lock_stack.close) except ValueError as e: logger.warning( "Bad REPORT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return (status, headers, self.write_xml_content(xml_answer))
def do_REPORT(self, environ, base_prefix, path, user): """Manage REPORT request.""" if not self.access(user, path, "r"): return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning( "Bad REPORT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT with contextlib.ExitStack() as lock_stack: lock_stack.enter_context(self.storage.acquire_lock("r", user)) item = next(self.storage.discover(path), None) if not item: return httputils.NOT_FOUND if not self.access(user, path, "r", item): return httputils.NOT_ALLOWED if isinstance(item, storage.BaseCollection): collection = item else: collection = item.collection headers = {"Content-Type": "text/xml; charset=%s" % self.encoding} try: status, xml_answer = xml_report( base_prefix, path, xml_content, collection, lock_stack.close) except ValueError as e: logger.warning( "Bad REPORT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return (status, headers, self.write_xml_content(xml_answer))
def do_MOVE(self, environ, base_prefix, path, user): """Manage MOVE request.""" raw_dest = environ.get("HTTP_DESTINATION", "") to_url = urlparse(raw_dest) if to_url.netloc != environ["HTTP_HOST"]: logger.info("Unsupported destination address: %r", raw_dest) # Remote destination server, not supported return httputils.REMOTE_DESTINATION if not self.access(user, path, "w"): return httputils.NOT_ALLOWED to_path = pathutils.sanitize_path(to_url.path) if not (to_path + "/").startswith(base_prefix + "/"): logger.warning("Destination %r from MOVE request on %r doesn't " "start with base prefix", to_path, path) return httputils.NOT_ALLOWED to_path = to_path[len(base_prefix):] if not self.access(user, to_path, "w"): return httputils.NOT_ALLOWED with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) if not item: return httputils.NOT_FOUND if (not self.access(user, path, "w", item) or not self.access(user, to_path, "w", item)): return httputils.NOT_ALLOWED if isinstance(item, storage.BaseCollection): # TODO: support moving collections return httputils.METHOD_NOT_ALLOWED to_item = next(self.Collection.discover(to_path), None) if isinstance(to_item, storage.BaseCollection): return httputils.FORBIDDEN to_parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(to_path)), True) to_collection = next( self.Collection.discover(to_parent_path), None) if not to_collection: return httputils.CONFLICT tag = item.collection.get_meta("tag") if not tag or tag != to_collection.get_meta("tag"): return httputils.FORBIDDEN if to_item and environ.get("HTTP_OVERWRITE", "F") != "T": return httputils.PRECONDITION_FAILED if (to_item and item.uid != to_item.uid or not to_item and to_collection.path != item.collection.path and to_collection.has_uid(item.uid)): return self.webdav_error_response( "C" if tag == "VCALENDAR" else "CR", "no-uid-conflict") to_href = posixpath.basename(pathutils.strip_path(to_path)) try: self.Collection.move(item, to_collection, to_href) except ValueError as e: logger.warning( "Bad MOVE request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return client.NO_CONTENT if to_item else client.CREATED, {}, None
def _list(self): """List collection items.""" if not self.adapter: logger.warning( "No adapter for collection: %r, please provide a full path", self.path) return for uid in self.adapter.get_uids(self.filename): yield uid
def comp_match(item, filter_, level=0): """Check whether the ``item`` matches the comp ``filter_``. If ``level`` is ``0``, the filter is applied on the item's collection. Otherwise, it's applied on the item. See rfc4791-9.7.1. """ # TODO: Filtering VALARM and VFREEBUSY is not implemented # HACK: the filters are tested separately against all components if level == 0: tag = item.name elif level == 1: tag = item.component_name else: logger.warning( "Filters with three levels of comp-filter are not supported") return True if not tag: return False name = filter_.get("name").upper() if len(filter_) == 0: # Point #1 of rfc4791-9.7.1 return name == tag if len(filter_) == 1: if filter_[0].tag == xmlutils.make_tag("C", "is-not-defined"): # Point #2 of rfc4791-9.7.1 return name != tag if name != tag: return False if (level == 0 and name != "VCALENDAR" or level == 1 and name not in ("VTODO", "VEVENT", "VJOURNAL")): logger.warning("Filtering %s is not supported" % name) return True # Point #3 and #4 of rfc4791-9.7.1 components = ([item.vobject_item] if level == 0 else list(getattr(item.vobject_item, "%s_list" % tag.lower()))) for child in filter_: if child.tag == xmlutils.make_tag("C", "prop-filter"): if not any(prop_match(comp, child, "C") for comp in components): return False elif child.tag == xmlutils.make_tag("C", "time-range"): if not time_range_match(item.vobject_item, filter_[0], tag): return False elif child.tag == xmlutils.make_tag("C", "comp-filter"): if not comp_match(item, child, level=level + 1): return False else: raise ValueError("Unexpected %r in comp-filter" % child.tag) return True
def acquire_lock(self, mode, user=None): with self._lock.acquire(mode) as lock_file: yield # execute hook hook = self.configuration.get("storage", "hook") if mode == "w" and hook: folder = self.configuration.get("storage", "filesystem_folder") debug = logger.isEnabledFor(logging.DEBUG) popen_kwargs = dict( stdin=subprocess.DEVNULL, stdout=subprocess.PIPE if debug else subprocess.DEVNULL, stderr=subprocess.PIPE if debug else subprocess.DEVNULL, shell=True, universal_newlines=True, cwd=folder) if os.name == "posix": # Pass the lock_file to the subprocess to ensure the lock # doesn't get released if this process is killed but the # child process lives on popen_kwargs["pass_fds"] = [lock_file.fileno()] # Use new process group for child to prevent terminals # from sending SIGINT etc. to it. if os.name == "posix": # Process group is also used to identify child processes popen_kwargs["preexec_fn"] = os.setpgrp elif os.name == "nt": popen_kwargs["creationflags"] = ( subprocess.CREATE_NEW_PROCESS_GROUP) command = hook % {"user": shlex.quote(user or "Anonymous")} logger.debug("Running hook") p = subprocess.Popen(command, **popen_kwargs) try: stdout_data, stderr_data = p.communicate() except BaseException: # Terminate the process on error (e.g. KeyboardInterrupt) p.terminate() p.wait() raise finally: if os.name == "posix": # Try to kill remaning children of process, identified # by process group try: os.killpg(p.pid, signal.SIGKILL) except ProcessLookupError: pass # No remaning processes found else: logger.warning( "Killed remaining child processes of hook") if stdout_data: logger.debug("Captured stdout hook:\n%s", stdout_data) if stderr_data: logger.debug("Captured stderr hook:\n%s", stderr_data) if p.returncode != 0: raise subprocess.CalledProcessError(p.returncode, p.args)
def comp_match(item, filter_, level=0): """Check whether the ``item`` matches the comp ``filter_``. If ``level`` is ``0``, the filter is applied on the item's collection. Otherwise, it's applied on the item. See rfc4791-9.7.1. """ # TODO: Filtering VALARM and VFREEBUSY is not implemented # HACK: the filters are tested separately against all components if level == 0: tag = item.name elif level == 1: tag = item.component_name else: logger.warning( "Filters with three levels of comp-filter are not supported") return True if not tag: return False name = filter_.get("name").upper() if len(filter_) == 0: # Point #1 of rfc4791-9.7.1 return name == tag if len(filter_) == 1: if filter_[0].tag == xmlutils.make_clark("C:is-not-defined"): # Point #2 of rfc4791-9.7.1 return name != tag if name != tag: return False if (level == 0 and name != "VCALENDAR" or level == 1 and name not in ("VTODO", "VEVENT", "VJOURNAL")): logger.warning("Filtering %s is not supported", name) return True # Point #3 and #4 of rfc4791-9.7.1 components = ([item.vobject_item] if level == 0 else list(getattr(item.vobject_item, "%s_list" % tag.lower()))) for child in filter_: if child.tag == xmlutils.make_clark("C:prop-filter"): if not any(prop_match(comp, child, "C") for comp in components): return False elif child.tag == xmlutils.make_clark("C:time-range"): if not time_range_match(item.vobject_item, filter_[0], tag): return False elif child.tag == xmlutils.make_clark("C:comp-filter"): if not comp_match(item, child, level=level + 1): return False else: raise ValueError("Unexpected %r in comp-filter" % child.tag) return True
def _content_disposition_attachement(self, filename): value = "attachement" try: encoded_filename = quote(filename, encoding=self.encoding) except UnicodeEncodeError: logger.warning("Failed to encode filename: %r", filename, exc_info=True) encoded_filename = "" if encoded_filename: value += "; filename*=%s''%s" % (self.encoding, encoded_filename) return value
def upload(self, href, item): """Upload a new or replace an existing item.""" if href in self.adapter.get_uids(self.filename): uid = self.adapter.replace_vobject(href, item.vobject_item, self.filename) else: uid = self.adapter.append_vobject(item.vobject_item, self.filename) try: return self._get(uid) except KeyError: logger.warning( "Unable to find uploaded event, maybe increase remind_lookahead_month" )
def _load_item_cache(self, href, input_hash): cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache", "item") cache_hash = uid = etag = text = name = tag = start = end = None try: with open(os.path.join(cache_folder, href), "rb") as f: cache_hash, *content = pickle.load(f) if cache_hash == input_hash: uid, etag, text, name, tag, start, end = content except FileNotFoundError: pass except (pickle.UnpicklingError, ValueError) as e: logger.warning("Failed to load item cache entry %r in %r: %s", href, self.path, e, exc_info=True) return cache_hash, uid, etag, text, name, tag, start, end
def get_names(): """Extracts all names from references in ``hreferences`` and adds 404 responses for invalid references to ``multistatus``. If the whole collections is referenced ``collection_requested`` gets set to ``True``.""" nonlocal collection_requested for hreference in hreferences: try: name = pathutils.name_from_path(hreference, collection) except ValueError as e: logger.warning("Skipping invalid path %r in REPORT request" " on %r: %s", hreference, path, e) response = xml_item_response(base_prefix, hreference, found_item=False) multistatus.append(response) continue if name: # Reference is an item yield name else: # Reference is a collection collection_requested = True
def do_PROPPATCH(self, environ, base_prefix, path, user): """Manage PROPPATCH request.""" if not self._access(user, path, "w"): return NOT_ALLOWED try: xml_content = self._read_xml_content(environ) except RuntimeError as e: logger.warning("Bad PROPPATCH request on %r: %s", path, e, exc_info=True) return BAD_REQUEST except socket.timeout as e: logger.debug("client timed out", exc_info=True) return REQUEST_TIMEOUT with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) if not item: return NOT_FOUND if not self._access(user, path, "w", item): return NOT_ALLOWED if not isinstance(item, storage.BaseCollection): return FORBIDDEN headers = { "DAV": DAV_HEADERS, "Content-Type": "text/xml; charset=%s" % self.encoding } try: xml_answer = xmlutils.proppatch(base_prefix, path, xml_content, item) except ValueError as e: logger.warning("Bad PROPPATCH request on %r: %s", path, e, exc_info=True) return BAD_REQUEST return (client.MULTI_STATUS, headers, self._write_xml_content(xml_answer))
def _update_history_etag(self, href, item): """Updates and retrieves the history etag from the history cache. The history cache contains a file for each current and deleted item of the collection. These files contain the etag of the item (empty string for deleted items) and a history etag, which is a hash over the previous history etag and the etag separated by "/". """ history_folder = os.path.join(self._filesystem_path, ".Radicale.cache", "history") try: with open(os.path.join(history_folder, href), "rb") as f: cache_etag, history_etag = pickle.load(f) except (FileNotFoundError, pickle.UnpicklingError, ValueError) as e: if isinstance(e, (pickle.UnpicklingError, ValueError)): logger.warning( "Failed to load history cache entry %r in %r: %s", href, self.path, e, exc_info=True) cache_etag = "" # Initialize with random data to prevent collisions with cleaned # expired items. history_etag = binascii.hexlify(os.urandom(16)).decode("ascii") etag = item.etag if item else "" if etag != cache_etag: self._makedirs_synced(history_folder) history_etag = radicale_item.get_etag(history_etag + "/" + etag).strip("\"") try: # Race: Other processes might have created and locked the file. with self._atomic_write(os.path.join(history_folder, href), "wb") as f: pickle.dump([etag, history_etag], f) except PermissionError: pass return history_etag
def do_MKCALENDAR(self, environ, base_prefix, path, user): """Manage MKCALENDAR request.""" if "w" not in self._rights.authorization(user, path): return httputils.NOT_ALLOWED try: xml_content = self._read_xml_request_body(environ) except RuntimeError as e: logger.warning("Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("Client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking props = xmlutils.props_from_request(xml_content) props = {k: v for k, v in props.items() if v is not None} props["tag"] = "VCALENDAR" # TODO: use this? # timezone = props.get("C:calendar-timezone") try: radicale_item.check_and_sanitize_props(props) except ValueError as e: logger.warning("Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST with self._storage.acquire_lock("w", user): item = next(self._storage.discover(path), None) if item: return self._webdav_error_response(client.CONFLICT, "D:resource-must-be-null") parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) parent_item = next(self._storage.discover(parent_path), None) if not parent_item: return httputils.CONFLICT if (not isinstance(parent_item, storage.BaseCollection) or parent_item.get_meta("tag")): return httputils.FORBIDDEN try: self._storage.create_collection(path, props=props) except ValueError as e: logger.warning("Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return client.CREATED, {}, None
def do_MKCOL(self, environ, base_prefix, path, user, context=None): """Manage MKCOL request.""" permissions = self._rights.authorization(user, path) if not rights.intersect(permissions, "Ww"): return httputils.NOT_ALLOWED try: xml_content = self._read_xml_request_body(environ) except RuntimeError as e: logger.warning("Bad MKCOL request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("Client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking props = xmlutils.props_from_request(xml_content) props = {k: v for k, v in props.items() if v is not None} try: radicale_item.check_and_sanitize_props(props) except ValueError as e: logger.warning("Bad MKCOL request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST if (props.get("tag") and "w" not in permissions or not props.get("tag") and "W" not in permissions): return httputils.NOT_ALLOWED with self._storage.acquire_lock("w", user): item = next(self._storage.discover(path), None) if item: return httputils.METHOD_NOT_ALLOWED parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) parent_item = next(self._storage.discover(parent_path), None) if not parent_item: return httputils.CONFLICT if (not isinstance(parent_item, storage.BaseCollection) or parent_item.get_meta("tag")): return httputils.FORBIDDEN try: self._storage.create_collection(path, props=props) except ValueError as e: logger.warning("Bad MKCOL request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return client.CREATED, {}, None
def do_MKCOL(self, environ, base_prefix, path, user): """Manage MKCOL request.""" permissions = self.Rights.authorized(user, path, "Ww") if not permissions: return NOT_ALLOWED try: xml_content = self._read_xml_content(environ) except RuntimeError as e: logger.warning("Bad MKCOL request on %r: %s", path, e, exc_info=True) return BAD_REQUEST except socket.timeout as e: logger.debug("client timed out", exc_info=True) return REQUEST_TIMEOUT # Prepare before locking props = xmlutils.props_from_request(xml_content) try: storage.check_and_sanitize_props(props) except ValueError as e: logger.warning("Bad MKCOL request on %r: %s", path, e, exc_info=True) return BAD_REQUEST if (props.get("tag") and "w" not in permissions or not props.get("tag") and "W" not in permissions): return NOT_ALLOWED with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) if item: return METHOD_NOT_ALLOWED parent_path = storage.sanitize_path( "/%s/" % posixpath.dirname(path.strip("/"))) parent_item = next(self.Collection.discover(parent_path), None) if not parent_item: return CONFLICT if (not isinstance(parent_item, storage.BaseCollection) or parent_item.get_meta("tag")): return FORBIDDEN try: self.Collection.create_collection(path, props=props) except ValueError as e: logger.warning("Bad MKCOL request on %r: %s", path, e, exc_info=True) return BAD_REQUEST return client.CREATED, {}, None
def do_MKCALENDAR(self, environ, base_prefix, path, user): """Manage MKCALENDAR request.""" if not self.Rights.authorized(user, path, "w"): return NOT_ALLOWED try: xml_content = self._read_xml_content(environ) except RuntimeError as e: logger.warning("Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) return BAD_REQUEST except socket.timeout as e: logger.debug("client timed out", exc_info=True) return REQUEST_TIMEOUT # Prepare before locking props = xmlutils.props_from_request(xml_content) props["tag"] = "VCALENDAR" # TODO: use this? # timezone = props.get("C:calendar-timezone") try: storage.check_and_sanitize_props(props) except ValueError as e: logger.warning("Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) if item: return self._webdav_error_response("D", "resource-must-be-null") parent_path = storage.sanitize_path( "/%s/" % posixpath.dirname(path.strip("/"))) parent_item = next(self.Collection.discover(parent_path), None) if not parent_item: return CONFLICT if (not isinstance(parent_item, storage.BaseCollection) or parent_item.get_meta("tag")): return FORBIDDEN try: self.Collection.create_collection(path, props=props) except ValueError as e: logger.warning("Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) return BAD_REQUEST return client.CREATED, {}, None
def do_MKCOL(self, environ, base_prefix, path, user): """Manage MKCOL request.""" permissions = self.Rights.authorized(user, path, "Ww") if not permissions: return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning( "Bad MKCOL request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking props = xmlutils.props_from_request(xml_content) try: radicale_item.check_and_sanitize_props(props) except ValueError as e: logger.warning( "Bad MKCOL request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST if (props.get("tag") and "w" not in permissions or not props.get("tag") and "W" not in permissions): return httputils.NOT_ALLOWED with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) if item: return httputils.METHOD_NOT_ALLOWED parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) parent_item = next(self.Collection.discover(parent_path), None) if not parent_item: return httputils.CONFLICT if (not isinstance(parent_item, storage.BaseCollection) or parent_item.get_meta("tag")): return httputils.FORBIDDEN try: self.Collection.create_collection(path, props=props) except ValueError as e: logger.warning( "Bad MKCOL request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return client.CREATED, {}, None
def do_MKCALENDAR(self, environ, base_prefix, path, user): """Manage MKCALENDAR request.""" if not self.Rights.authorized(user, path, "w"): return httputils.NOT_ALLOWED try: xml_content = self.read_xml_content(environ) except RuntimeError as e: logger.warning( "Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking props = xmlutils.props_from_request(xml_content) props["tag"] = "VCALENDAR" # TODO: use this? # timezone = props.get("C:calendar-timezone") try: radicale_item.check_and_sanitize_props(props) except ValueError as e: logger.warning( "Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) if item: return self.webdav_error_response( "D", "resource-must-be-null") parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) parent_item = next(self.Collection.discover(parent_path), None) if not parent_item: return httputils.CONFLICT if (not isinstance(parent_item, storage.BaseCollection) or parent_item.get_meta("tag")): return httputils.FORBIDDEN try: self.Collection.create_collection(path, props=props) except ValueError as e: logger.warning( "Bad MKCALENDAR request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST return client.CREATED, {}, None
def xml_report(base_prefix, path, xml_request, collection, encoding, unlock_storage_fn): """Read and answer REPORT requests. Read rfc3253-3.6 for info. """ multistatus = ET.Element(xmlutils.make_clark("D:multistatus")) if xml_request is None: return client.MULTI_STATUS, multistatus root = xml_request if root.tag in (xmlutils.make_clark("D:principal-search-property-set"), xmlutils.make_clark("D:principal-property-search"), xmlutils.make_clark("D:expand-property")): # We don't support searching for principals or indirect retrieving of # properties, just return an empty result. # InfCloud asks for expand-property reports (even if we don't announce # support for them) and stops working if an error code is returned. logger.warning("Unsupported REPORT method %r on %r requested", xmlutils.make_human_tag(root.tag), path) return client.MULTI_STATUS, multistatus if (root.tag == xmlutils.make_clark("C:calendar-multiget") and collection.get_meta("tag") != "VCALENDAR" or root.tag == xmlutils.make_clark("CR:addressbook-multiget") and collection.get_meta("tag") != "VADDRESSBOOK" or root.tag == xmlutils.make_clark("D:sync-collection") and collection.get_meta("tag") not in ("VADDRESSBOOK", "VCALENDAR")): logger.warning("Invalid REPORT method %r on %r requested", xmlutils.make_human_tag(root.tag), path) return (client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")) prop_element = root.find(xmlutils.make_clark("D:prop")) props = ([prop.tag for prop in prop_element] if prop_element is not None else []) if root.tag in (xmlutils.make_clark("C:calendar-multiget"), xmlutils.make_clark("CR:addressbook-multiget")): # Read rfc4791-7.9 for info hreferences = set() for href_element in root.findall(xmlutils.make_clark("D:href")): href_path = pathutils.sanitize_path( unquote(urlparse(href_element.text).path)) if (href_path + "/").startswith(base_prefix + "/"): hreferences.add(href_path[len(base_prefix):]) else: logger.warning( "Skipping invalid path %r in REPORT request on " "%r", href_path, path) elif root.tag == xmlutils.make_clark("D:sync-collection"): old_sync_token_element = root.find(xmlutils.make_clark("D:sync-token")) old_sync_token = "" if old_sync_token_element is not None and old_sync_token_element.text: old_sync_token = old_sync_token_element.text.strip() logger.debug("Client provided sync token: %r", old_sync_token) try: sync_token, names = collection.sync(old_sync_token) except ValueError as e: # Invalid sync token logger.warning("Client provided invalid sync token %r: %s", old_sync_token, e, exc_info=True) # client.CONFLICT doesn't work with some clients (e.g. InfCloud) return (client.FORBIDDEN, xmlutils.webdav_error("D:valid-sync-token")) hreferences = (pathutils.unstrip_path( posixpath.join(collection.path, n)) for n in names) # Append current sync token to response sync_token_element = ET.Element(xmlutils.make_clark("D:sync-token")) sync_token_element.text = sync_token multistatus.append(sync_token_element) else: hreferences = (path, ) filters = (root.findall(xmlutils.make_clark("C:filter")) + root.findall(xmlutils.make_clark("CR:filter"))) def retrieve_items(collection, hreferences, multistatus): """Retrieves all items that are referenced in ``hreferences`` from ``collection`` and adds 404 responses for missing and invalid items to ``multistatus``.""" collection_requested = False def get_names(): """Extracts all names from references in ``hreferences`` and adds 404 responses for invalid references to ``multistatus``. If the whole collections is referenced ``collection_requested`` gets set to ``True``.""" nonlocal collection_requested for hreference in hreferences: try: name = pathutils.name_from_path(hreference, collection) except ValueError as e: logger.warning( "Skipping invalid path %r in REPORT request" " on %r: %s", hreference, path, e) response = xml_item_response(base_prefix, hreference, found_item=False) multistatus.append(response) continue if name: # Reference is an item yield name else: # Reference is a collection collection_requested = True for name, item in collection.get_multi(get_names()): if not item: uri = pathutils.unstrip_path( posixpath.join(collection.path, name)) response = xml_item_response(base_prefix, uri, found_item=False) multistatus.append(response) else: yield item, False if collection_requested: yield from collection.get_filtered(filters) # Retrieve everything required for finishing the request. retrieved_items = list(retrieve_items(collection, hreferences, multistatus)) collection_tag = collection.get_meta("tag") # Don't access storage after this! unlock_storage_fn() def match(item, filter_): tag = collection_tag if (tag == "VCALENDAR" and filter_.tag != xmlutils.make_clark("C:%s" % filter_)): if len(filter_) == 0: return True if len(filter_) > 1: raise ValueError("Filter with %d children" % len(filter_)) if filter_[0].tag != xmlutils.make_clark("C:comp-filter"): raise ValueError("Unexpected %r in filter" % filter_[0].tag) return radicale_filter.comp_match(item, filter_[0]) if (tag == "VADDRESSBOOK" and filter_.tag != xmlutils.make_clark("CR:%s" % filter_)): for child in filter_: if child.tag != xmlutils.make_clark("CR:prop-filter"): raise ValueError("Unexpected %r in filter" % child.tag) test = filter_.get("test", "anyof") if test == "anyof": return any( radicale_filter.prop_match(item.vobject_item, f, "CR") for f in filter_) if test == "allof": return all( radicale_filter.prop_match(item.vobject_item, f, "CR") for f in filter_) raise ValueError("Unsupported filter test: %r" % test) raise ValueError("Unsupported filter %r for %r" % (filter_.tag, tag)) while retrieved_items: # ``item.vobject_item`` might be accessed during filtering. # Don't keep reference to ``item``, because VObject requires a lot of # memory. item, filters_matched = retrieved_items.pop(0) if filters and not filters_matched: try: if not all(match(item, filter_) for filter_ in filters): continue except ValueError as e: raise ValueError("Failed to filter item %r from %r: %s" % (item.href, collection.path, e)) from e except Exception as e: raise RuntimeError("Failed to filter item %r from %r: %s" % (item.href, collection.path, e)) from e found_props = [] not_found_props = [] for tag in props: element = ET.Element(tag) if tag == xmlutils.make_clark("D:getetag"): element.text = item.etag found_props.append(element) elif tag == xmlutils.make_clark("D:getcontenttype"): element.text = xmlutils.get_content_type(item, encoding) found_props.append(element) elif tag in (xmlutils.make_clark("C:calendar-data"), xmlutils.make_clark("CR:address-data")): element.text = item.serialize() found_props.append(element) else: not_found_props.append(element) uri = pathutils.unstrip_path(posixpath.join(collection.path, item.href)) multistatus.append( xml_item_response(base_prefix, uri, found_props=found_props, not_found_props=not_found_props, found_item=True)) return client.MULTI_STATUS, multistatus
def _handle_request(self, environ): """Manage a request.""" def response(status, headers=(), answer=None): headers = dict(headers) # Set content length if answer: if hasattr(answer, "encode"): logger.debug("Response content:\n%s", answer) headers["Content-Type"] += "; charset=%s" % self._encoding answer = answer.encode(self._encoding) accept_encoding = [ encoding.strip() for encoding in environ.get( "HTTP_ACCEPT_ENCODING", "").split(",") if encoding.strip() ] if "gzip" in accept_encoding: zcomp = zlib.compressobj(wbits=16 + zlib.MAX_WBITS) answer = zcomp.compress(answer) + zcomp.flush() headers["Content-Encoding"] = "gzip" headers["Content-Length"] = str(len(answer)) # Add extra headers set in configuration for key in self.configuration.options("headers"): headers[key] = self.configuration.get("headers", key) # Start response time_end = datetime.datetime.now() status = "%d %s" % (status, client.responses.get( status, "Unknown")) logger.info("%s response status for %r%s in %.3f seconds: %s", environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo, (time_end - time_begin).total_seconds(), status) # Return response content return status, list(headers.items()), [answer] if answer else [] remote_host = "unknown" if environ.get("REMOTE_HOST"): remote_host = repr(environ["REMOTE_HOST"]) elif environ.get("REMOTE_ADDR"): remote_host = environ["REMOTE_ADDR"] if environ.get("HTTP_X_FORWARDED_FOR"): remote_host = "%s (forwarded for %r)" % ( remote_host, environ["HTTP_X_FORWARDED_FOR"]) remote_useragent = "" if environ.get("HTTP_USER_AGENT"): remote_useragent = " using %r" % environ["HTTP_USER_AGENT"] depthinfo = "" if environ.get("HTTP_DEPTH"): depthinfo = " with depth %r" % environ["HTTP_DEPTH"] time_begin = datetime.datetime.now() logger.info("%s request for %r%s received from %s%s", environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo, remote_host, remote_useragent) headers = pprint.pformat(self._headers_log(environ)) logger.debug("Request headers:\n%s", headers) # Let reverse proxies overwrite SCRIPT_NAME if "HTTP_X_SCRIPT_NAME" in environ: # script_name must be removed from PATH_INFO by the client. unsafe_base_prefix = environ["HTTP_X_SCRIPT_NAME"] logger.debug("Script name overwritten by client: %r", unsafe_base_prefix) else: # SCRIPT_NAME is already removed from PATH_INFO, according to the # WSGI specification. unsafe_base_prefix = environ.get("SCRIPT_NAME", "") # Sanitize base prefix base_prefix = pathutils.sanitize_path(unsafe_base_prefix).rstrip("/") logger.debug("Sanitized script name: %r", base_prefix) # Sanitize request URI (a WSGI server indicates with an empty path, # that the URL targets the application root without a trailing slash) path = pathutils.sanitize_path(environ.get("PATH_INFO", "")) logger.debug("Sanitized path: %r", path) # Get function corresponding to method function = getattr(self, "do_%s" % environ["REQUEST_METHOD"].upper(), None) if not function: return response(*httputils.METHOD_NOT_ALLOWED) # If "/.well-known" is not available, clients query "/" if path == "/.well-known" or path.startswith("/.well-known/"): return response(*httputils.NOT_FOUND) # Ask authentication backend to check rights login = password = "" external_login = self._auth.get_external_login(environ) authorization = environ.get("HTTP_AUTHORIZATION", "") if external_login: login, password = external_login login, password = login or "", password or "" elif authorization.startswith("Basic"): authorization = authorization[len("Basic"):].strip() login, password = httputils.decode_request( self.configuration, environ, base64.b64decode(authorization.encode("ascii"))).split(":", 1) user = self._auth.login(login, password) or "" if login else "" if user and login == user: logger.info("Successful login: %r", user) elif user: logger.info("Successful login: %r -> %r", login, user) elif login: logger.warning("Failed login attempt from %s: %r", remote_host, login) # Random delay to avoid timing oracles and bruteforce attacks delay = self.configuration.get("auth", "delay") if delay > 0: random_delay = delay * (0.5 + random.random()) logger.debug("Sleeping %.3f seconds", random_delay) time.sleep(random_delay) if user and not pathutils.is_safe_path_component(user): # Prevent usernames like "user/calendar.ics" logger.info("Refused unsafe username: %r", user) user = "" # Create principal collection if user: principal_path = "/%s/" % user with self._storage.acquire_lock("r", user): principal = next( self._storage.discover(principal_path, depth="1"), None) if not principal: if "W" in self._rights.authorization(user, principal_path): with self._storage.acquire_lock("w", user): try: self._storage.create_collection(principal_path) except ValueError as e: logger.warning( "Failed to create principal " "collection %r: %s", user, e) user = "" else: logger.warning( "Access to principal path %r denied by " "rights backend", principal_path) if self.configuration.get("server", "_internal_server"): # Verify content length content_length = int(environ.get("CONTENT_LENGTH") or 0) if content_length: max_content_length = self.configuration.get( "server", "max_content_length") if max_content_length and content_length > max_content_length: logger.info("Request body too large: %d", content_length) return response(*httputils.REQUEST_ENTITY_TOO_LARGE) if not login or user: status, headers, answer = function(environ, base_prefix, path, user) if (status, headers, answer) == httputils.NOT_ALLOWED: logger.info("Access to %r denied for %s", path, repr(user) if user else "anonymous user") else: status, headers, answer = httputils.NOT_ALLOWED if ((status, headers, answer) == httputils.NOT_ALLOWED and not user and not external_login): # Unknown or unauthorized user logger.debug("Asking client for authentication") status = client.UNAUTHORIZED realm = self.configuration.get("auth", "realm") headers = dict(headers) headers.update({"WWW-Authenticate": "Basic realm=\"%s\"" % realm}) return response(status, headers, answer)
def do_PUT(self, environ, base_prefix, path, user): """Manage PUT request.""" if not self._access(user, path, "w"): return httputils.NOT_ALLOWED try: content = self._read_content(environ) except RuntimeError as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking content_type = environ.get("CONTENT_TYPE", "").split(";")[0] parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) permissions = rights.intersect( self._rights.authorization(user, path), "Ww") parent_permissions = rights.intersect( self._rights.authorization(user, parent_path), "w") try: vobject_items = tuple(vobject.readComponents(content or "")) except Exception as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare( vobject_items, path, content_type, permissions, parent_permissions) with self._storage.acquire_lock("w", user): item = next(self._storage.discover(path), None) parent_item = next(self._storage.discover(parent_path), None) if not parent_item: return httputils.CONFLICT write_whole_collection = ( isinstance(item, storage.BaseCollection) or not parent_item.get_meta("tag")) if write_whole_collection: tag = prepared_tag else: tag = parent_item.get_meta("tag") if write_whole_collection: if ("w" if tag else "W") not in self._rights.authorization( user, path): return httputils.NOT_ALLOWED elif "w" not in self._rights.authorization(user, parent_path): return httputils.NOT_ALLOWED etag = environ.get("HTTP_IF_MATCH", "") if not item and etag: # Etag asked but no item found: item has been removed return httputils.PRECONDITION_FAILED if item and etag and item.etag != etag: # Etag asked but item not matching: item has changed return httputils.PRECONDITION_FAILED match = environ.get("HTTP_IF_NONE_MATCH", "") == "*" if item and match: # Creation asked but item found: item can't be replaced return httputils.PRECONDITION_FAILED if (tag != prepared_tag or prepared_write_whole_collection != write_whole_collection): (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare( vobject_items, path, content_type, permissions, parent_permissions, tag, write_whole_collection) props = prepared_props if prepared_exc_info: logger.warning( "Bad PUT request on %r: %s", path, prepared_exc_info[1], exc_info=prepared_exc_info) return httputils.BAD_REQUEST if write_whole_collection: try: etag = self._storage.create_collection( path, prepared_items, props).etag except ValueError as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST else: prepared_item, = prepared_items if (item and item.uid != prepared_item.uid or not item and parent_item.has_uid(prepared_item.uid)): return self._webdav_error_response("%s:no-uid-conflict" % ( "C" if tag == "VCALENDAR" else "CR")) href = posixpath.basename(pathutils.strip_path(path)) try: etag = parent_item.upload(href, prepared_item).etag except ValueError as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST headers = {"ETag": etag} return client.CREATED, headers, None
def do_PUT(self, environ, base_prefix, path, user): """Manage PUT request.""" if not self.access(user, path, "w"): return httputils.NOT_ALLOWED try: content = self.read_content(environ) except RuntimeError as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) permissions = self.Rights.authorized(user, path, "Ww") parent_permissions = self.Rights.authorized(user, parent_path, "w") def prepare(vobject_items, tag=None, write_whole_collection=None): if (write_whole_collection or permissions and not parent_permissions): write_whole_collection = True content_type = environ.get("CONTENT_TYPE", "").split(";")[0] tags = {value: key for key, value in xmlutils.MIMETYPES.items()} tag = radicale_item.predict_tag_of_whole_collection( vobject_items, tags.get(content_type)) if not tag: raise ValueError("Can't determine collection tag") collection_path = pathutils.strip_path(path) elif (write_whole_collection is not None and not write_whole_collection or not permissions and parent_permissions): write_whole_collection = False if tag is None: tag = radicale_item.predict_tag_of_parent_collection( vobject_items) collection_path = posixpath.dirname( pathutils.strip_path(path)) props = None stored_exc_info = None items = [] try: if tag: radicale_item.check_and_sanitize_items( vobject_items, is_collection=write_whole_collection, tag=tag) if write_whole_collection and tag == "VCALENDAR": vobject_components = [] vobject_item, = vobject_items for content in ("vevent", "vtodo", "vjournal"): vobject_components.extend( getattr(vobject_item, "%s_list" % content, [])) vobject_components_by_uid = itertools.groupby( sorted(vobject_components, key=radicale_item.get_uid), radicale_item.get_uid) for uid, components in vobject_components_by_uid: vobject_collection = vobject.iCalendar() for component in components: vobject_collection.add(component) item = radicale_item.Item( collection_path=collection_path, vobject_item=vobject_collection) item.prepare() items.append(item) elif write_whole_collection and tag == "VADDRESSBOOK": for vobject_item in vobject_items: item = radicale_item.Item( collection_path=collection_path, vobject_item=vobject_item) item.prepare() items.append(item) elif not write_whole_collection: vobject_item, = vobject_items item = radicale_item.Item( collection_path=collection_path, vobject_item=vobject_item) item.prepare() items.append(item) if write_whole_collection: props = {} if tag: props["tag"] = tag if tag == "VCALENDAR" and vobject_items: if hasattr(vobject_items[0], "x_wr_calname"): calname = vobject_items[0].x_wr_calname.value if calname: props["D:displayname"] = calname if hasattr(vobject_items[0], "x_wr_caldesc"): caldesc = vobject_items[0].x_wr_caldesc.value if caldesc: props["C:calendar-description"] = caldesc radicale_item.check_and_sanitize_props(props) except Exception: stored_exc_info = sys.exc_info() # Use generator for items and delete references to free memory # early def items_generator(): while items: yield items.pop(0) return (items_generator(), tag, write_whole_collection, props, stored_exc_info) try: vobject_items = tuple(vobject.readComponents(content or "")) except Exception as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare(vobject_items) with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) parent_item = next(self.Collection.discover(parent_path), None) if not parent_item: return httputils.CONFLICT write_whole_collection = ( isinstance(item, storage.BaseCollection) or not parent_item.get_meta("tag")) if write_whole_collection: tag = prepared_tag else: tag = parent_item.get_meta("tag") if write_whole_collection: if not self.Rights.authorized(user, path, "w" if tag else "W"): return httputils.NOT_ALLOWED elif not self.Rights.authorized(user, parent_path, "w"): return httputils.NOT_ALLOWED etag = environ.get("HTTP_IF_MATCH", "") if not item and etag: # Etag asked but no item found: item has been removed return httputils.PRECONDITION_FAILED if item and etag and item.etag != etag: # Etag asked but item not matching: item has changed return httputils.PRECONDITION_FAILED match = environ.get("HTTP_IF_NONE_MATCH", "") == "*" if item and match: # Creation asked but item found: item can't be replaced return httputils.PRECONDITION_FAILED if (tag != prepared_tag or prepared_write_whole_collection != write_whole_collection): (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare( vobject_items, tag, write_whole_collection) props = prepared_props if prepared_exc_info: logger.warning( "Bad PUT request on %r: %s", path, prepared_exc_info[1], exc_info=prepared_exc_info) return httputils.BAD_REQUEST if write_whole_collection: try: etag = self.Collection.create_collection( path, prepared_items, props).etag except ValueError as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST else: prepared_item, = prepared_items if (item and item.uid != prepared_item.uid or not item and parent_item.has_uid(prepared_item.uid)): return self.webdav_error_response( "C" if tag == "VCALENDAR" else "CR", "no-uid-conflict") href = posixpath.basename(pathutils.strip_path(path)) try: etag = parent_item.upload(href, prepared_item).etag except ValueError as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST headers = {"ETag": etag} return client.CREATED, headers, None
def sync(self, old_token=None): # The sync token has the form http://radicale.org/ns/sync/TOKEN_NAME # where TOKEN_NAME is the md5 hash of all history etags of present and # past items of the collection. def check_token_name(token_name): if len(token_name) != 32: return False for c in token_name: if c not in "0123456789abcdef": return False return True old_token_name = None if old_token: # Extract the token name from the sync token if not old_token.startswith("http://radicale.org/ns/sync/"): raise ValueError("Malformed token: %r" % old_token) old_token_name = old_token[len("http://radicale.org/ns/sync/"):] if not check_token_name(old_token_name): raise ValueError("Malformed token: %r" % old_token) # Get the current state and sync-token of the collection. state = {} token_name_hash = md5() # Find the history of all existing and deleted items for href, item in itertools.chain( ((item.href, item) for item in self.get_all()), ((href, None) for href in self._get_deleted_history_hrefs())): history_etag = self._update_history_etag(href, item) state[href] = history_etag token_name_hash.update((href + "/" + history_etag).encode("utf-8")) token_name = token_name_hash.hexdigest() token = "http://radicale.org/ns/sync/%s" % token_name if token_name == old_token_name: # Nothing changed return token, () token_folder = os.path.join(self._filesystem_path, ".Radicale.cache", "sync-token") token_path = os.path.join(token_folder, token_name) old_state = {} if old_token_name: # load the old token state old_token_path = os.path.join(token_folder, old_token_name) try: # Race: Another process might have deleted the file. with open(old_token_path, "rb") as f: old_state = pickle.load(f) except (FileNotFoundError, pickle.UnpicklingError, ValueError) as e: if isinstance(e, (pickle.UnpicklingError, ValueError)): logger.warning( "Failed to load stored sync token %r in %r: %s", old_token_name, self.path, e, exc_info=True) # Delete the damaged file try: os.remove(old_token_path) except (FileNotFoundError, PermissionError): pass raise ValueError("Token not found: %r" % old_token) # write the new token state or update the modification time of # existing token state if not os.path.exists(token_path): self._makedirs_synced(token_folder) try: # Race: Other processes might have created and locked the file. with self._atomic_write(token_path, "wb") as f: pickle.dump(state, f) except PermissionError: pass else: # clean up old sync tokens and item cache self._clean_cache(token_folder, os.listdir(token_folder), max_age=self.configuration.getint( "storage", "max_sync_token_age")) self._clean_history() else: # Try to update the modification time try: # Race: Another process might have deleted the file. os.utime(token_path) except FileNotFoundError: pass changes = [] # Find all new, changed and deleted (that are still in the item cache) # items for href, history_etag in state.items(): if history_etag != old_state.get(href): changes.append(href) # Find all deleted items that are no longer in the item cache for href, history_etag in old_state.items(): if href not in state: changes.append(href) return token, changes
def do_PUT(self, environ, base_prefix, path, user): """Manage PUT request.""" if not self.access(user, path, "w"): return httputils.NOT_ALLOWED try: content = self.read_content(environ) except RuntimeError as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) permissions = self.Rights.authorized(user, path, "Ww") parent_permissions = self.Rights.authorized(user, parent_path, "w") def prepare(vobject_items, tag=None, write_whole_collection=None): if (write_whole_collection or permissions and not parent_permissions): write_whole_collection = True content_type = environ.get("CONTENT_TYPE", "").split(";")[0] tags = { value: key for key, value in xmlutils.MIMETYPES.items() } tag = radicale_item.predict_tag_of_whole_collection( vobject_items, tags.get(content_type)) if not tag: raise ValueError("Can't determine collection tag") collection_path = pathutils.strip_path(path) elif (write_whole_collection is not None and not write_whole_collection or not permissions and parent_permissions): write_whole_collection = False if tag is None: tag = radicale_item.predict_tag_of_parent_collection( vobject_items) collection_path = posixpath.dirname(pathutils.strip_path(path)) props = None stored_exc_info = None items = [] try: if tag: radicale_item.check_and_sanitize_items( vobject_items, is_collection=write_whole_collection, tag=tag) if write_whole_collection and tag == "VCALENDAR": vobject_components = [] vobject_item, = vobject_items for content in ("vevent", "vtodo", "vjournal"): vobject_components.extend( getattr(vobject_item, "%s_list" % content, [])) vobject_components_by_uid = itertools.groupby( sorted(vobject_components, key=radicale_item.get_uid), radicale_item.get_uid) for uid, components in vobject_components_by_uid: vobject_collection = vobject.iCalendar() for component in components: vobject_collection.add(component) item = radicale_item.Item( collection_path=collection_path, vobject_item=vobject_collection) item.prepare() items.append(item) elif write_whole_collection and tag == "VADDRESSBOOK": for vobject_item in vobject_items: item = radicale_item.Item( collection_path=collection_path, vobject_item=vobject_item) item.prepare() items.append(item) elif not write_whole_collection: vobject_item, = vobject_items item = radicale_item.Item( collection_path=collection_path, vobject_item=vobject_item) item.prepare() items.append(item) if write_whole_collection: props = {} if tag: props["tag"] = tag if tag == "VCALENDAR" and vobject_items: if hasattr(vobject_items[0], "x_wr_calname"): calname = vobject_items[0].x_wr_calname.value if calname: props["D:displayname"] = calname if hasattr(vobject_items[0], "x_wr_caldesc"): caldesc = vobject_items[0].x_wr_caldesc.value if caldesc: props["C:calendar-description"] = caldesc radicale_item.check_and_sanitize_props(props) except Exception: stored_exc_info = sys.exc_info() # Use generator for items and delete references to free memory # early def items_generator(): while items: yield items.pop(0) return (items_generator(), tag, write_whole_collection, props, stored_exc_info) try: vobject_items = tuple(vobject.readComponents(content or "")) except Exception as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare(vobject_items) with self.Collection.acquire_lock("w", user): item = next(self.Collection.discover(path), None) parent_item = next(self.Collection.discover(parent_path), None) if not parent_item: return httputils.CONFLICT write_whole_collection = (isinstance(item, storage.BaseCollection) or not parent_item.get_meta("tag")) if write_whole_collection: tag = prepared_tag else: tag = parent_item.get_meta("tag") if write_whole_collection: if not self.Rights.authorized(user, path, "w" if tag else "W"): return httputils.NOT_ALLOWED elif not self.Rights.authorized(user, parent_path, "w"): return httputils.NOT_ALLOWED etag = environ.get("HTTP_IF_MATCH", "") if not item and etag: # Etag asked but no item found: item has been removed return httputils.PRECONDITION_FAILED if item and etag and item.etag != etag: # Etag asked but item not matching: item has changed return httputils.PRECONDITION_FAILED match = environ.get("HTTP_IF_NONE_MATCH", "") == "*" if item and match: # Creation asked but item found: item can't be replaced return httputils.PRECONDITION_FAILED if (tag != prepared_tag or prepared_write_whole_collection != write_whole_collection): (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare(vobject_items, tag, write_whole_collection) props = prepared_props if prepared_exc_info: logger.warning("Bad PUT request on %r: %s", path, prepared_exc_info[1], exc_info=prepared_exc_info) return httputils.BAD_REQUEST if write_whole_collection: try: etag = self.Collection.create_collection( path, prepared_items, props).etag except ValueError as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST else: prepared_item, = prepared_items if (item and item.uid != prepared_item.uid or not item and parent_item.has_uid(prepared_item.uid)): return self.webdav_error_response( "C" if tag == "VCALENDAR" else "CR", "no-uid-conflict") href = posixpath.basename(pathutils.strip_path(path)) try: etag = parent_item.upload(href, prepared_item).etag except ValueError as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST headers = {"ETag": etag} return client.CREATED, headers, None
def xml_report(base_prefix, path, xml_request, collection, unlock_storage_fn): """Read and answer REPORT requests. Read rfc3253-3.6 for info. """ multistatus = ET.Element(xmlutils.make_tag("D", "multistatus")) if xml_request is None: return client.MULTI_STATUS, multistatus root = xml_request if root.tag in ( xmlutils.make_tag("D", "principal-search-property-set"), xmlutils.make_tag("D", "principal-property-search"), xmlutils.make_tag("D", "expand-property")): # We don't support searching for principals or indirect retrieving of # properties, just return an empty result. # InfCloud asks for expand-property reports (even if we don't announce # support for them) and stops working if an error code is returned. logger.warning("Unsupported REPORT method %r on %r requested", xmlutils.tag_from_clark(root.tag), path) return client.MULTI_STATUS, multistatus if (root.tag == xmlutils.make_tag("C", "calendar-multiget") and collection.get_meta("tag") != "VCALENDAR" or root.tag == xmlutils.make_tag("CR", "addressbook-multiget") and collection.get_meta("tag") != "VADDRESSBOOK" or root.tag == xmlutils.make_tag("D", "sync-collection") and collection.get_meta("tag") not in ("VADDRESSBOOK", "VCALENDAR")): logger.warning("Invalid REPORT method %r on %r requested", xmlutils.tag_from_clark(root.tag), path) return (client.CONFLICT, xmlutils.webdav_error("D", "supported-report")) prop_element = root.find(xmlutils.make_tag("D", "prop")) props = ( [prop.tag for prop in prop_element] if prop_element is not None else []) if root.tag in ( xmlutils.make_tag("C", "calendar-multiget"), xmlutils.make_tag("CR", "addressbook-multiget")): # Read rfc4791-7.9 for info hreferences = set() for href_element in root.findall(xmlutils.make_tag("D", "href")): href_path = pathutils.sanitize_path( unquote(urlparse(href_element.text).path)) if (href_path + "/").startswith(base_prefix + "/"): hreferences.add(href_path[len(base_prefix):]) else: logger.warning("Skipping invalid path %r in REPORT request on " "%r", href_path, path) elif root.tag == xmlutils.make_tag("D", "sync-collection"): old_sync_token_element = root.find( xmlutils.make_tag("D", "sync-token")) old_sync_token = "" if old_sync_token_element is not None and old_sync_token_element.text: old_sync_token = old_sync_token_element.text.strip() logger.debug("Client provided sync token: %r", old_sync_token) try: sync_token, names = collection.sync(old_sync_token) except ValueError as e: # Invalid sync token logger.warning("Client provided invalid sync token %r: %s", old_sync_token, e, exc_info=True) return (client.CONFLICT, xmlutils.webdav_error("D", "valid-sync-token")) hreferences = (pathutils.unstrip_path( posixpath.join(collection.path, n)) for n in names) # Append current sync token to response sync_token_element = ET.Element(xmlutils.make_tag("D", "sync-token")) sync_token_element.text = sync_token multistatus.append(sync_token_element) else: hreferences = (path,) filters = ( root.findall("./%s" % xmlutils.make_tag("C", "filter")) + root.findall("./%s" % xmlutils.make_tag("CR", "filter"))) def retrieve_items(collection, hreferences, multistatus): """Retrieves all items that are referenced in ``hreferences`` from ``collection`` and adds 404 responses for missing and invalid items to ``multistatus``.""" collection_requested = False def get_names(): """Extracts all names from references in ``hreferences`` and adds 404 responses for invalid references to ``multistatus``. If the whole collections is referenced ``collection_requested`` gets set to ``True``.""" nonlocal collection_requested for hreference in hreferences: try: name = pathutils.name_from_path(hreference, collection) except ValueError as e: logger.warning("Skipping invalid path %r in REPORT request" " on %r: %s", hreference, path, e) response = xml_item_response(base_prefix, hreference, found_item=False) multistatus.append(response) continue if name: # Reference is an item yield name else: # Reference is a collection collection_requested = True for name, item in collection.get_multi(get_names()): if not item: uri = pathutils.unstrip_path( posixpath.join(collection.path, name)) response = xml_item_response(base_prefix, uri, found_item=False) multistatus.append(response) else: yield item, False if collection_requested: yield from collection.get_filtered(filters) # Retrieve everything required for finishing the request. retrieved_items = list(retrieve_items(collection, hreferences, multistatus)) collection_tag = collection.get_meta("tag") # Don't access storage after this! unlock_storage_fn() def match(item, filter_): tag = collection_tag if (tag == "VCALENDAR" and filter_.tag != xmlutils.make_tag("C", filter_)): if len(filter_) == 0: return True if len(filter_) > 1: raise ValueError("Filter with %d children" % len(filter_)) if filter_[0].tag != xmlutils.make_tag("C", "comp-filter"): raise ValueError("Unexpected %r in filter" % filter_[0].tag) return radicale_filter.comp_match(item, filter_[0]) if (tag == "VADDRESSBOOK" and filter_.tag != xmlutils.make_tag("CR", filter_)): for child in filter_: if child.tag != xmlutils.make_tag("CR", "prop-filter"): raise ValueError("Unexpected %r in filter" % child.tag) test = filter_.get("test", "anyof") if test == "anyof": return any( radicale_filter.prop_match(item.vobject_item, f, "CR") for f in filter_) if test == "allof": return all( radicale_filter.prop_match(item.vobject_item, f, "CR") for f in filter_) raise ValueError("Unsupported filter test: %r" % test) return all(radicale_filter.prop_match(item.vobject_item, f, "CR") for f in filter_) raise ValueError("unsupported filter %r for %r" % (filter_.tag, tag)) while retrieved_items: # ``item.vobject_item`` might be accessed during filtering. # Don't keep reference to ``item``, because VObject requires a lot of # memory. item, filters_matched = retrieved_items.pop(0) if filters and not filters_matched: try: if not all(match(item, filter_) for filter_ in filters): continue except ValueError as e: raise ValueError("Failed to filter item %r from %r: %s" % (item.href, collection.path, e)) from e except Exception as e: raise RuntimeError("Failed to filter item %r from %r: %s" % (item.href, collection.path, e)) from e found_props = [] not_found_props = [] for tag in props: element = ET.Element(tag) if tag == xmlutils.make_tag("D", "getetag"): element.text = item.etag found_props.append(element) elif tag == xmlutils.make_tag("D", "getcontenttype"): element.text = xmlutils.get_content_type(item) found_props.append(element) elif tag in ( xmlutils.make_tag("C", "calendar-data"), xmlutils.make_tag("CR", "address-data")): element.text = item.serialize() found_props.append(element) else: not_found_props.append(element) uri = pathutils.unstrip_path( posixpath.join(collection.path, item.href)) multistatus.append(xml_item_response( base_prefix, uri, found_props=found_props, not_found_props=not_found_props, found_item=True)) return client.MULTI_STATUS, multistatus
def _handle_request(self, environ): """Manage a request.""" def response(status, headers=(), answer=None): headers = dict(headers) # Set content length if answer: if hasattr(answer, "encode"): logger.debug("Response content:\n%s", answer) headers["Content-Type"] += "; charset=%s" % self.encoding answer = answer.encode(self.encoding) accept_encoding = [ encoding.strip() for encoding in environ.get("HTTP_ACCEPT_ENCODING", "").split(",") if encoding.strip()] if "gzip" in accept_encoding: zcomp = zlib.compressobj(wbits=16 + zlib.MAX_WBITS) answer = zcomp.compress(answer) + zcomp.flush() headers["Content-Encoding"] = "gzip" headers["Content-Length"] = str(len(answer)) # Add extra headers set in configuration if self.configuration.has_section("headers"): for key in self.configuration.options("headers"): headers[key] = self.configuration.get("headers", key) # Start response time_end = datetime.datetime.now() status = "%d %s" % ( status, client.responses.get(status, "Unknown")) logger.info( "%s response status for %r%s in %.3f seconds: %s", environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo, (time_end - time_begin).total_seconds(), status) # Return response content return status, list(headers.items()), [answer] if answer else [] remote_host = "unknown" if environ.get("REMOTE_HOST"): remote_host = repr(environ["REMOTE_HOST"]) elif environ.get("REMOTE_ADDR"): remote_host = environ["REMOTE_ADDR"] if environ.get("HTTP_X_FORWARDED_FOR"): remote_host = "%r (forwarded by %s)" % ( environ["HTTP_X_FORWARDED_FOR"], remote_host) remote_useragent = "" if environ.get("HTTP_USER_AGENT"): remote_useragent = " using %r" % environ["HTTP_USER_AGENT"] depthinfo = "" if environ.get("HTTP_DEPTH"): depthinfo = " with depth %r" % environ["HTTP_DEPTH"] time_begin = datetime.datetime.now() logger.info( "%s request for %r%s received from %s%s", environ["REQUEST_METHOD"], environ.get("PATH_INFO", ""), depthinfo, remote_host, remote_useragent) headers = pprint.pformat(self._headers_log(environ)) logger.debug("Request headers:\n%s", headers) # Let reverse proxies overwrite SCRIPT_NAME if "HTTP_X_SCRIPT_NAME" in environ: # script_name must be removed from PATH_INFO by the client. unsafe_base_prefix = environ["HTTP_X_SCRIPT_NAME"] logger.debug("Script name overwritten by client: %r", unsafe_base_prefix) else: # SCRIPT_NAME is already removed from PATH_INFO, according to the # WSGI specification. unsafe_base_prefix = environ.get("SCRIPT_NAME", "") # Sanitize base prefix base_prefix = pathutils.sanitize_path(unsafe_base_prefix).rstrip("/") logger.debug("Sanitized script name: %r", base_prefix) # Sanitize request URI (a WSGI server indicates with an empty path, # that the URL targets the application root without a trailing slash) path = pathutils.sanitize_path(environ.get("PATH_INFO", "")) logger.debug("Sanitized path: %r", path) # Get function corresponding to method function = getattr(self, "do_%s" % environ["REQUEST_METHOD"].upper()) # If "/.well-known" is not available, clients query "/" if path == "/.well-known" or path.startswith("/.well-known/"): return response(*httputils.NOT_FOUND) # Ask authentication backend to check rights login = password = "" external_login = self.Auth.get_external_login(environ) authorization = environ.get("HTTP_AUTHORIZATION", "") if external_login: login, password = external_login login, password = login or "", password or "" elif authorization.startswith("Basic"): authorization = authorization[len("Basic"):].strip() login, password = self.decode(base64.b64decode( authorization.encode("ascii")), environ).split(":", 1) user = self.Auth.login(login, password) or "" if login else "" if user and login == user: logger.info("Successful login: %r", user) elif user: logger.info("Successful login: %r -> %r", login, user) elif login: logger.info("Failed login attempt: %r", login) # Random delay to avoid timing oracles and bruteforce attacks delay = self.configuration.getfloat("auth", "delay") if delay > 0: random_delay = delay * (0.5 + random.random()) logger.debug("Sleeping %.3f seconds", random_delay) time.sleep(random_delay) if user and not pathutils.is_safe_path_component(user): # Prevent usernames like "user/calendar.ics" logger.info("Refused unsafe username: %r", user) user = "" # Create principal collection if user: principal_path = "/%s/" % user if self.Rights.authorized(user, principal_path, "W"): with self.Collection.acquire_lock("r", user): principal = next( self.Collection.discover(principal_path, depth="1"), None) if not principal: with self.Collection.acquire_lock("w", user): try: self.Collection.create_collection(principal_path) except ValueError as e: logger.warning("Failed to create principal " "collection %r: %s", user, e) user = "" else: logger.warning("Access to principal path %r denied by " "rights backend", principal_path) if self.configuration.getboolean("internal", "internal_server"): # Verify content length content_length = int(environ.get("CONTENT_LENGTH") or 0) if content_length: max_content_length = self.configuration.getint( "server", "max_content_length") if max_content_length and content_length > max_content_length: logger.info("Request body too large: %d", content_length) return response(*httputils.REQUEST_ENTITY_TOO_LARGE) if not login or user: status, headers, answer = function( environ, base_prefix, path, user) if (status, headers, answer) == httputils.NOT_ALLOWED: logger.info("Access to %r denied for %s", path, repr(user) if user else "anonymous user") else: status, headers, answer = httputils.NOT_ALLOWED if ((status, headers, answer) == httputils.NOT_ALLOWED and not user and not external_login): # Unknown or unauthorized user logger.debug("Asking client for authentication") status = client.UNAUTHORIZED realm = self.configuration.get("auth", "realm") headers = dict(headers) headers.update({ "WWW-Authenticate": "Basic realm=\"%s\"" % realm}) return response(status, headers, answer)