def _upload_all_nonatomic(self, items, suffix=""): """Upload a new set of items. This takes a list of vobject items and uploads them nonatomic and without existence checks. """ cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache", "item") self._makedirs_synced(cache_folder) hrefs = set() for item in items: uid = item.uid try: cache_content = self._item_cache_content(item) except Exception as e: raise ValueError( "Failed to store item %r in temporary collection %r: %s" % (uid, self.path, e)) from e href_candidates = [] if os.name in ("nt", "posix"): href_candidates.append( lambda: uid if uid.lower().endswith(suffix.lower()) else uid + suffix) href_candidates.extend(( lambda: radicale_item.get_etag(uid).strip('"') + suffix, lambda: radicale_item.find_available_uid(hrefs.__contains__, suffix))) href = None def replace_fn(source, target): nonlocal href while href_candidates: href = href_candidates.pop(0)() if href in hrefs: continue if not pathutils.is_safe_filesystem_path_component(href): if not href_candidates: raise pathutils.UnsafePathError(href) continue try: return os.replace(source, pathutils.path_to_filesystem( self._filesystem_path, href)) except OSError as e: if href_candidates and ( os.name == "posix" and e.errno == 22 or os.name == "nt" and e.errno == 123): continue raise with self._atomic_write(os.path.join(self._filesystem_path, "ign"), newline="", sync_directory=False, replace_fn=replace_fn) as f: f.write(item.serialize()) hrefs.add(href) with self._atomic_write(os.path.join(cache_folder, href), "wb", sync_directory=False) as f: pickle.dump(cache_content, f) self._sync_directory(cache_folder) self._sync_directory(self._filesystem_path)
def _update_history_etag(self, href, item): """Updates and retrieves the history etag from the history cache. The history cache contains a file for each current and deleted item of the collection. These files contain the etag of the item (empty string for deleted items) and a history etag, which is a hash over the previous history etag and the etag separated by "/". """ history_folder = os.path.join(self._filesystem_path, ".Radicale.cache", "history") try: with open(os.path.join(history_folder, href), "rb") as f: cache_etag, history_etag = pickle.load(f) except (FileNotFoundError, pickle.UnpicklingError, ValueError) as e: if isinstance(e, (pickle.UnpicklingError, ValueError)): logger.warning( "Failed to load history cache entry %r in %r: %s", href, self.path, e, exc_info=True) cache_etag = "" # Initialize with random data to prevent collisions with cleaned # expired items. history_etag = binascii.hexlify(os.urandom(16)).decode("ascii") etag = item.etag if item else "" if etag != cache_etag: self._makedirs_synced(history_folder) history_etag = radicale_item.get_etag( history_etag + "/" + etag).strip("\"") try: # Race: Other processes might have created and locked the file. with self._atomic_write(os.path.join(history_folder, href), "wb") as f: pickle.dump([etag, history_etag], f) except PermissionError: pass return history_etag
def _upload_all_nonatomic(self, items, suffix=""): """Upload a new set of items. This takes a list of vobject items and uploads them nonatomic and without existence checks. """ cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache", "item") self._storage._makedirs_synced(cache_folder) hrefs = set() for item in items: uid = item.uid try: cache_content = self._item_cache_content(item) except Exception as e: raise ValueError( "Failed to store item %r in temporary collection %r: %s" % (uid, self.path, e)) from e href_candidate_funtions = [] if os.name in ("nt", "posix"): href_candidate_funtions.append(lambda: uid if uid.lower( ).endswith(suffix.lower()) else uid + suffix) href_candidate_funtions.extend( (lambda: radicale_item.get_etag(uid).strip('"') + suffix, lambda: radicale_item.find_available_uid( hrefs.__contains__, suffix))) href = f = None while href_candidate_funtions: href = href_candidate_funtions.pop(0)() if href in hrefs: continue if not pathutils.is_safe_filesystem_path_component(href): if not href_candidate_funtions: raise pathutils.UnsafePathError(href) continue try: f = open(pathutils.path_to_filesystem( self._filesystem_path, href), "w", newline="", encoding=self._encoding) break except OSError as e: if href_candidate_funtions and ( os.name == "posix" and e.errno == 22 or os.name == "nt" and e.errno == 123): continue raise with f: f.write(item.serialize()) f.flush() self._storage._fsync(f) hrefs.add(href) with open(os.path.join(cache_folder, href), "wb") as f: pickle.dump(cache_content, f) f.flush() self._storage._fsync(f) self._storage._sync_directory(cache_folder) self._storage._sync_directory(self._filesystem_path)
def _update_history_etag(self, href, item): """Updates and retrieves the history etag from the history cache. The history cache contains a file for each current and deleted item of the collection. These files contain the etag of the item (empty string for deleted items) and a history etag, which is a hash over the previous history etag and the etag separated by "/". """ history_folder = os.path.join(self._filesystem_path, ".Radicale.cache", "history") try: with open(os.path.join(history_folder, href), "rb") as f: cache_etag, history_etag = pickle.load(f) except (FileNotFoundError, pickle.UnpicklingError, ValueError) as e: if isinstance(e, (pickle.UnpicklingError, ValueError)): logger.warning( "Failed to load history cache entry %r in %r: %s", href, self.path, e, exc_info=True) cache_etag = "" # Initialize with random data to prevent collisions with cleaned # expired items. history_etag = binascii.hexlify(os.urandom(16)).decode("ascii") etag = item.etag if item else "" if etag != cache_etag: self._makedirs_synced(history_folder) history_etag = radicale_item.get_etag(history_etag + "/" + etag).strip("\"") try: # Race: Other processes might have created and locked the file. with self._atomic_write(os.path.join(history_folder, href), "wb") as f: pickle.dump([etag, history_etag], f) except PermissionError: pass return history_etag
def etag(self): """Encoded as quoted-string (see RFC 2616).""" return get_etag(self.vobject_item.serialize())