Exemple #1
0
    def get_status_from_file(self) -> JobStatusSpec:
        if not self._jobstatus_path.exists():
            data: JobStatusSpec = {}
            data["state"] = JobStatusStates.INITIALIZED
            data["started"] = time.time()
        else:
            try:
                # Read this data with an explicit lock
                # This prevents a race condition where an empty jobstatus.mk file is read
                data = store.load_object_from_file(str(self._jobstatus_path),
                                                   default={},
                                                   lock=True)

                # Repair broken/invalid files
                if "state" not in data:
                    data["state"] = JobStatusStates.INITIALIZED
                    data["started"] = os.path.getctime(
                        str(self._jobstatus_path))
            finally:
                store.release_lock(str(self._jobstatus_path))

        data.setdefault("pid", None)
        data["loginfo"] = {}
        for field_id, field_path in [
            ("JobProgressUpdate", self._progress_update_path),
            ("JobResult", self._result_message_path),
            ("JobException", self._exceptions_path),
        ]:
            if field_path.exists():
                with field_path.open(encoding="utf-8") as f:
                    data["loginfo"][field_id] = f.read().splitlines()
            else:
                data["loginfo"][field_id] = []

        map_substate_to_active = {
            JobStatusStates.INITIALIZED: True,
            JobStatusStates.RUNNING: True,
            JobStatusStates.FINISHED: False,
            JobStatusStates.STOPPED: False,
            JobStatusStates.EXCEPTION: False,
        }
        data["is_active"] = map_substate_to_active[data["state"]]
        return data
Exemple #2
0
def get_gui_messages(user_id=None):
    if user_id is None:
        user_id = config.user.id
    path = config.config_dir + "/" + ensure_str(user_id) + '/messages.mk'
    messages = store.load_object_from_file(path, default=[])

    # Delete too old messages
    updated = False
    for index, message in enumerate(messages):
        now = time.time()
        valid_till = message.get('valid_till')
        if valid_till is not None and valid_till < now:
            messages.pop(index)
            updated = True

    if updated:
        save_gui_messages(messages)

    return messages
Exemple #3
0
def get_gui_messages(user_id=None):
    if user_id is None:
        user_id = user.id
    path = cmk.utils.paths.profile_dir / user_id / "messages.mk"
    messages = store.load_object_from_file(path, default=[])

    # Delete too old messages
    updated = False
    for index, message in enumerate(messages):
        now = time.time()
        valid_till = message.get("valid_till")
        if valid_till is not None and valid_till < now:
            messages.pop(index)
            updated = True

    if updated:
        save_gui_messages(messages)

    return messages
Exemple #4
0
    def save(self, hostname):
        # type: (HostName) -> None
        """ The job of the save function is to update the item state on disk.
        It simply returns, if it detects that the data wasn't changed at all since the last loading
        If the data on disk has been changed in the meantime, the cached data is updated from disk.
        Afterwards only the actual modifications (update/remove) are applied to the updated cached
        data before it is written back to disk.
        """
        filename = cmk.utils.paths.counters_dir + "/" + hostname
        if not self._removed_item_state_keys and not self._updated_item_states:
            return

        try:
            if not os.path.exists(cmk.utils.paths.counters_dir):
                os.makedirs(cmk.utils.paths.counters_dir)

            store.aquire_lock(filename)
            last_mtime = os.stat(filename).st_mtime
            if last_mtime != self._last_mtime:
                self._item_states = store.load_object_from_file(filename,
                                                                default={})

                # Remove obsolete keys
                for key in self._removed_item_state_keys:
                    try:
                        del self._item_states[key]
                    except KeyError:
                        pass

                # Add updated keys
                self._item_states.update(self._updated_item_states)

            store.save_object_to_file(filename,
                                      self._item_states,
                                      pretty=False)
        except Exception:
            raise MKGeneralException("Cannot write to %s: %s" %
                                     (filename, traceback.format_exc()))
        finally:
            store.release_lock(filename)
Exemple #5
0
    def disksync(
            self,
            *,
            removed: Container[_ValueStoreKey] = (),
            updated: Iterable[Tuple[_ValueStoreKey, Any]] = (),
    ) -> None:
        """Re-load and then store the changes of the item state to disk

        Make sure the object is in sync with the file after writing.
        """
        self._log_debug("value store: synchronizing")

        self._path.parent.mkdir(parents=True, exist_ok=True)

        try:
            store.aquire_lock(self._path)

            if self._path.stat().st_mtime == self._last_sync:
                self._log_debug("value store: already loaded")
            else:
                self._log_debug("value store: loading from disk")
                self._data = store.load_object_from_file(self._path,
                                                         default={},
                                                         lock=False)

            if removed or updated:
                data = {
                    k: v
                    for k, v in self._data.items() if k not in removed
                }
                data.update(updated)
                self._log_debug("value store: writing to disk")
                store.save_object_to_file(self._path, data, pretty=False)
                self._data = data

            self._last_sync = self._path.stat().st_mtime
        except Exception as exc:
            raise MKGeneralException from exc
        finally:
            store.release_lock(self._path)
Exemple #6
0
    def load(
        self,
        *,
        trees: Iterable[BackendSNMPTree],
    ) -> None:
        """Try to read the OIDs data from cache files"""
        for tree in trees:
            for oid in (o for o in tree.oids if o.save_to_cache):  # no point in reading otherwise

                fetchoid = f"{tree.base}.{oid.column}"
                path = self._path / fetchoid

                console.vverbose(f"  Loading {fetchoid} from walk cache {path}\n")
                try:
                    read_walk = store.load_object_from_file(path)
                except Exception:
                    console.verbose(f"  Failed to load {fetchoid} from walk cache {path}\n")
                    if cmk.utils.debug.enabled():
                        raise
                    continue

                if read_walk is not None:
                    self._store[fetchoid] = (oid.save_to_cache, read_walk)  # (True, ...)
Exemple #7
0
def get_history_deltas(hostname, search_timestamp=None):
    if '/' in hostname:
        return None, []  # just for security reasons

    inventory_path = "%s/inventory/%s" % (cmk.utils.paths.var_dir, hostname)
    if not os.path.exists(inventory_path):
        return [], []

    latest_timestamp = str(int(os.stat(inventory_path).st_mtime))
    inventory_archive_dir = "%s/inventory_archive/%s" % (
        cmk.utils.paths.var_dir, hostname)
    try:
        archived_timestamps = sorted(os.listdir(inventory_archive_dir))
    except OSError:
        return [], []

    all_timestamps = archived_timestamps + [latest_timestamp]
    previous_timestamp = None

    if not search_timestamp:
        required_timestamps = all_timestamps
    else:
        new_timestamp_idx = all_timestamps.index(search_timestamp)
        if new_timestamp_idx == 0:
            required_timestamps = [search_timestamp]
        else:
            previous_timestamp = all_timestamps[new_timestamp_idx - 1]
            required_timestamps = [search_timestamp]

    tree_lookup = {}

    def get_tree(timestamp):
        if timestamp is None:
            return StructuredDataTree()

        if timestamp in tree_lookup:
            return tree_lookup[timestamp]

        if timestamp == latest_timestamp:
            inventory_tree = load_filtered_inventory_tree(hostname)
            if inventory_tree is None:
                return
            tree_lookup[timestamp] = inventory_tree
        else:
            inventory_archive_path = "%s/%s" % (inventory_archive_dir,
                                                timestamp)
            tree_lookup[timestamp] = _filter_tree(
                StructuredDataTree().load_from(inventory_archive_path))
        return tree_lookup[timestamp]

    corrupted_history_files = []
    delta_history = []
    for _idx, timestamp in enumerate(required_timestamps):
        cached_delta_path = os.path.join(
            cmk.utils.paths.var_dir, "inventory_delta_cache", hostname,
            "%s_%s" % (previous_timestamp, timestamp))

        cached_data = None
        try:
            cached_data = store.load_object_from_file(cached_delta_path)
        except MKGeneralException:
            pass

        if cached_data:
            new, changed, removed, delta_tree_data = cached_data
            delta_tree = StructuredDataTree()
            delta_tree.create_tree_from_raw_tree(delta_tree_data)
            delta_history.append(
                (timestamp, (new, changed, removed, delta_tree)))
            previous_timestamp = timestamp
            continue

        try:
            previous_tree = get_tree(previous_timestamp)
            current_tree = get_tree(timestamp)
            delta_data = current_tree.compare_with(previous_tree)
            new, changed, removed, delta_tree = delta_data
            if new or changed or removed:
                store.save_file(
                    cached_delta_path,
                    repr((new, changed, removed, delta_tree.get_raw_tree())),
                )
                delta_history.append((timestamp, delta_data))
        except RequestTimeout:
            raise
        except LoadStructuredDataError:
            corrupted_history_files.append(
                str(get_short_inventory_history_filepath(hostname, timestamp)))

        previous_timestamp = timestamp

    return delta_history, corrupted_history_files
 def load_from(self, filepath):
     raw_tree = store.load_object_from_file(filepath)
     return self.create_tree_from_raw_tree(raw_tree)
Exemple #9
0
def _load_ip_lookup_cache(lock):
    # type: (bool) -> NewIPLookupCache
    return _convert_legacy_ip_lookup_cache(
        store.load_object_from_file(_cache_path(), default={}, lock=lock))
Exemple #10
0
def load_acknowledgements():
    return store.load_object_from_file(acknowledgement_path, default=[])
Exemple #11
0
 def _load_activation(self):
     self.__dict__.update(store.load_object_from_file(self._info_path(), {}))
Exemple #12
0
def _load_site_replication_status(site_id, lock=False):
    return store.load_object_from_file(
        _site_replication_status_path(site_id),
        default={},
        lock=lock,
    )
Exemple #13
0
    def load_file(self, name: str, deflt: Any, lock: bool = False) -> Any:
        if self.confdir is None:
            return deflt

        path = self.confdir + "/" + name + ".mk"
        return store.load_object_from_file(path, default=deflt, lock=lock)
Exemple #14
0
 def load_config(self) -> None:
     if not Path(self._bi_configuration_file).exists():
         self._load_config(bi_sample_config)
         return
     self._load_config(
         store.load_object_from_file(self._bi_configuration_file))
Exemple #15
0
 def _do_load_legacy_bookmarks(cls):
     path = config.user.confdir + "/bookmarks.mk"
     return store.load_object_from_file(path, default=[])
Exemple #16
0
 def load(self) -> PersistedSections[TRawDataSection]:
     raw_sections_data = _store.load_object_from_file(self.path, default={})
     return PersistedSections[TRawDataSection](
         {SectionName(k): v
          for k, v in raw_sections_data.items()})
Exemple #17
0
def _load_single_oid_cache(snmp_config):
    # type: (SNMPHostConfig) -> Dict[OID, Optional[RawValue]]
    cache_path = "%s/%s.%s" % (cmk.utils.paths.snmp_scan_cache_dir,
                               snmp_config.hostname, snmp_config.ipaddress)
    return store.load_object_from_file(cache_path, default={})
Exemple #18
0
def _load_single_oid_cache(
        snmp_config: SNMPHostConfig) -> Dict[OID, Optional[SNMPDecodedString]]:
    cache_path = "%s/%s.%s" % (cmk.utils.paths.snmp_scan_cache_dir,
                               snmp_config.hostname, snmp_config.ipaddress)
    return store.load_object_from_file(cache_path, default={})
Exemple #19
0
 def _last_activation_state(self, site_id):
     manager = ActivateChangesManager()
     site_state_path = os.path.join(manager.activation_persisted_dir,
                                    manager.site_filename(site_id))
     return store.load_object_from_file(site_state_path, {})
Exemple #20
0
 def _load_newest_host_labels_per_site(self) -> Dict[SiteId, float]:
     return store.load_object_from_file(
         DiscoveredHostLabelSyncJob.newest_host_labels_per_site_path(), default={})
Exemple #21
0
 def _load_site_state(self, site_id):
     return store.load_object_from_file(self.site_state_path(site_id), {})
Exemple #22
0
 def _read(self, file_path: Path) -> HostsData:
     return store.load_object_from_file(str(file_path), default={})
Exemple #23
0
def get_history_deltas(
    hostname: HostName,
    search_timestamp: Optional[str] = None
) -> Tuple[List[Tuple[str, InventoryDeltaData]], List[str]]:
    if '/' in hostname:
        return [], []  # just for security reasons

    inventory_path = "%s/%s" % (cmk.utils.paths.inventory_output_dir, hostname)
    if not os.path.exists(inventory_path):
        return [], []

    latest_timestamp = str(int(os.stat(inventory_path).st_mtime))
    inventory_archive_dir = "%s/%s" % (cmk.utils.paths.inventory_archive_dir,
                                       hostname)
    try:
        archived_timestamps = sorted(os.listdir(inventory_archive_dir))
    except OSError:
        return [], []

    all_timestamps: List[str] = archived_timestamps + [latest_timestamp]
    previous_timestamp: Optional[str] = None

    if not search_timestamp:
        required_timestamps = all_timestamps
    else:
        new_timestamp_idx = all_timestamps.index(search_timestamp)
        if new_timestamp_idx == 0:
            required_timestamps = [search_timestamp]
        else:
            previous_timestamp = all_timestamps[new_timestamp_idx - 1]
            required_timestamps = [search_timestamp]

    tree_lookup: Dict[str, Any] = {}

    def get_tree(timestamp: Optional[str]) -> StructuredDataNode:
        if timestamp is None:
            return StructuredDataNode()

        if timestamp in tree_lookup:
            return tree_lookup[timestamp]

        if timestamp == latest_timestamp:
            inventory_tree = load_filtered_inventory_tree(hostname)
            if inventory_tree is None:
                raise LoadStructuredDataError()
            tree_lookup[timestamp] = inventory_tree
        else:
            inventory_archive_path = Path(inventory_archive_dir, timestamp)
            tree_lookup[timestamp] = _filter_tree(
                StructuredDataStore.load_file(inventory_archive_path))
        return tree_lookup[timestamp]

    corrupted_history_files = []
    delta_history: List[Tuple[str, InventoryDeltaData]] = []
    for _idx, timestamp in enumerate(required_timestamps):
        cached_delta_path = os.path.join(
            cmk.utils.paths.var_dir, "inventory_delta_cache", hostname,
            "%s_%s" % (previous_timestamp, timestamp))

        cached_data = None
        try:
            cached_data = store.load_object_from_file(cached_delta_path,
                                                      default=None)
        except MKGeneralException:
            pass

        if cached_data:
            new, changed, removed, delta_tree_data = cached_data
            delta_tree = StructuredDataNode.deserialize(delta_tree_data)
            delta_history.append(
                (timestamp, (new, changed, removed, delta_tree)))
            previous_timestamp = timestamp
            continue

        try:
            previous_tree = get_tree(previous_timestamp)
            current_tree = get_tree(timestamp)
            delta_result = current_tree.compare_with(previous_tree)
            delta_data = (delta_result.counter['new'],
                          delta_result.counter['changed'],
                          delta_result.counter['removed'], delta_result.delta)
            new, changed, removed, delta_tree = delta_data
            if new or changed or removed:
                store.save_text_to_file(
                    cached_delta_path,
                    repr((new, changed, removed, delta_tree.serialize())),
                )
                delta_history.append((timestamp, delta_data))
        except LoadStructuredDataError:
            corrupted_history_files.append(
                str(get_short_inventory_history_filepath(hostname, timestamp)))

        previous_timestamp = timestamp

    return delta_history, corrupted_history_files
Exemple #24
0
def test_save_data_to_file(tmp_path, path_type, data):
    path = path_type(tmp_path / "lala")
    store.save_object_to_file(path, data)
    assert store.load_object_from_file(path, default=None) == data
Exemple #25
0
 def _load_result(path: Path) -> Union[str, object]:
     if remote_automation_call_came_from_pre21():
         return store.load_object_from_file(path, default=None)
     return store.load_text_from_file(path)
Exemple #26
0
def test_load_data_from_file_empty(tmp_path, path_type):
    locked_file = tmp_path / "test"
    locked_file.write_text("", encoding="utf-8")
    data = store.load_object_from_file(path_type(tmp_path / "x"),
                                       default="DEF")
    assert data == "DEF"
Exemple #27
0
 def _do_load_legacy_bookmarks(cls):
     if config.user.confdir is None:
         raise Exception("user confdir is None")
     path = config.user.confdir + "/bookmarks.mk"
     return store.load_object_from_file(path, default=[])
Exemple #28
0
def test_load_data_not_locked(tmp_path, path_type):
    locked_file = tmp_path / "locked_file"
    locked_file.write_text("[1, 2]", encoding="utf-8")

    store.load_object_from_file(path_type(locked_file), default=None)
    assert store.have_lock(path_type(locked_file)) is False
 def _load_acknowledgements(self, lock=False):
     return store.load_object_from_file(self._ack_path,
                                        default={},
                                        lock=lock)
Exemple #30
0
def pid_from_file(pid_file: Path) -> Optional[ProcessId]:
    """Read a process id from a given pid file"""
    try:
        return ProcessId(int(load_object_from_file(pid_file, default=None)))
    except Exception:
        return None