def get_num_enabled_aggregations(cls): try: return int( store.load_text_from_file( cls._num_enabled_aggregations_path())) except (TypeError, ValueError): return 0
def default_from_address(): environ_default = os.environ.get("OMD_SITE", "checkmk") + "@" + socket.getfqdn() if cmk_version.is_cma(): return load_text_from_file("/etc/nullmailer/default-from", environ_default).replace('\n', '') return environ_default
def _read_last_event_ids(self) -> FrozenSet[int]: return frozenset( json.loads( store.load_text_from_file( self._path_last_event_ids, default="[]", ) ) )
def load_custom_attr(userid: UserId, key: str, conv_func: Callable[[str], Any], default: Any = None, lock: bool = False) -> Any: path = Path(custom_attr_path(userid, key)) result = store.load_text_from_file(path, default=default, lock=lock) if result == default: return result return conv_func(result.strip())
def test_save_empty( self, mocked_ca_config: ConfigDomainCACertificates, ca_settings, expected_file_content: str, ) -> None: mocked_ca_config.save({ "trusted_certificate_authorities": ca_settings, }) assert load_text_from_file( mocked_ca_config.trusted_cas_file) == expected_file_content
def _try_history_update() -> None: logger.debug("Try license usage history update.") license_usage_dir.mkdir(parents=True, exist_ok=True) with store.locked(next_run_filepath), store.locked(history_filepath): now = datetime.now() next_run_ts = int(rot47(store.load_text_from_file(next_run_filepath, default="_"))) if not _may_update(now.timestamp(), next_run_ts): return history_dump = _create_or_update_history_dump() store.save_bytes_to_file(history_filepath, history_dump.serialize()) store.save_text_to_file(next_run_filepath, rot47(str(_create_next_run_ts(now)))) logger.debug("Successfully updated history.")
def disksync( self, *, removed: Container[_TKey] = (), updated: Iterable[Tuple[_TKey, _TValue]] = (), ) -> None: """Re-load and write the changes of the stored values This method will reload the values from disk, apply the changes (remove keys and update values) as specified by the arguments, and then write the result to disk. When this method returns, the data provided via the Mapping-interface and the data stored on disk must be in sync. """ self._log_debug("synchronizing") self._path.parent.mkdir(parents=True, exist_ok=True) try: store.aquire_lock(self._path) if self._path.stat().st_mtime == self._last_sync: self._log_debug("already loaded") else: self._log_debug("loading from disk") self._data = self._deserializer( store.load_text_from_file(self._path, default="{}", lock=False)) if removed or updated: data = { k: v for k, v in self._data.items() if k not in removed } data.update(updated) self._log_debug("writing to disk") store.save_text_to_file(self._path, self._serializer(data)) self._data = data self._last_sync = self._path.stat().st_mtime except Exception as exc: raise MKGeneralException from exc finally: store.release_lock(self._path)
def _load_result(path: Path) -> Union[str, object]: if remote_automation_call_came_from_pre21(): return store.load_object_from_file(path, default=None) return store.load_text_from_file(path)
def test_save_text_to_file(tmp_path, path_type, data): path = path_type(tmp_path / "lala") store.save_text_to_file(path, data) assert store.load_text_from_file(path) == data
def _read(self, file_path: Path) -> str: return store.load_text_from_file(str(file_path))
def _read_last_events_timestamp(self) -> Optional[int]: with suppress(Exception): return int(store.load_text_from_file(self._path_state_file, default="")) return None