Esempio n. 1
0
def _try_history_update() -> None:
    logger.debug("Try license usage history update.")

    license_usage_dir.mkdir(parents=True, exist_ok=True)

    with store.locked(next_run_filepath), store.locked(history_filepath):
        now = datetime.now()
        next_run_ts = int(rot47(store.load_text_from_file(next_run_filepath, default="_")))

        if not _may_update(now.timestamp(), next_run_ts):
            return

        history_dump = _create_or_update_history_dump()
        store.save_bytes_to_file(history_filepath, history_dump.serialize())
        store.save_text_to_file(next_run_filepath, rot47(str(_create_next_run_ts(now))))
        logger.debug("Successfully updated history.")
Esempio n. 2
0
def save_extensions(extensions: LicenseUsageExtensions) -> None:
    license_usage_dir.mkdir(parents=True, exist_ok=True)
    extensions_filepath = _get_extensions_filepath()

    with store.locked(extensions_filepath):
        store.save_bytes_to_file(extensions_filepath,
                                 _serialize_dump(extensions.for_report()))
Esempio n. 3
0
def _get_extensions():
    with store.locked(extensions_filepath):
        raw_extensions = store.load_bytes_from_file(
            extensions_filepath,
            default=b'{}',
        )
    return LicenseUsageExtensions.deserialize(raw_extensions)
Esempio n. 4
0
def page_run_cron() -> None:

    lock_file = _lock_file()

    # Prevent cron jobs from being run too often, also we need
    # locking in order to prevent overlapping runs
    if lock_file.exists():
        last_run = lock_file.stat().st_mtime
        if time.time() - last_run < 59:
            raise MKGeneralException("Cron called too early. Skipping.")

    with lock_file.open("wb"):
        pass  # touches the file

    # The cron page is accessed unauthenticated. After leaving the page_run_cron area
    # into the job functions we always want to have a user context initialized to keep
    # the code free from special cases (if no user logged in, then...).
    # The jobs need to be run in privileged mode in general. Some jobs, like the network
    # scan, switch the user context to a specific other user during execution.
    with store.locked(lock_file), SuperUserContext():
        logger.debug("Starting cron jobs")

        for cron_job in multisite_cronjobs:
            try:
                job_name = cron_job.__name__

                logger.debug("Starting [%s]", job_name)
                cron_job()
                logger.debug("Finished [%s]", job_name)
            except Exception:
                response.set_data("An exception occured. Take a look at the web.log.\n")
                logger.exception("Exception in cron job [%s]", job_name)

        logger.debug("Finished all cron jobs")
        response.set_data("OK\n")
Esempio n. 5
0
def test_locked(locked_file, path_type):
    path = path_type(locked_file)

    assert store.have_lock(path) is False

    with store.locked(path):
        assert store.have_lock(path) is True

    assert store.have_lock(path) is False
Esempio n. 6
0
def load_extensions() -> LicenseUsageExtensions:
    extensions_filepath = _get_extensions_filepath()
    with store.locked(extensions_filepath):
        raw_extensions = deserialize_dump(
            store.load_bytes_from_file(
                extensions_filepath,
                default=b"{}",
            ))
    return LicenseUsageExtensions.parse(raw_extensions)
Esempio n. 7
0
def test_locked(tmp_path, path_type):
    locked_file = tmp_path / "locked_file"
    locked_file.write_text(u"", encoding="utf-8")

    path = path_type(locked_file)

    assert store.have_lock(path) is False

    with store.locked(path):
        assert store.have_lock(path) is True

    assert store.have_lock(path) is False
Esempio n. 8
0
    def _check_compilation_status(self) -> None:
        current_configstatus = self.compute_current_configstatus()
        if not self._compilation_required(current_configstatus):
            self._logger.debug("No compilation required")
            return

        with store.locked(self._path_compilation_lock):
            # Re-check compilation required after lock has been required
            # Another apache might have done the job
            current_configstatus = self.compute_current_configstatus()
            if not self._compilation_required(current_configstatus):
                self._logger.debug(
                    "No compilation required. An other process already compiled it"
                )
                return

            self.prepare_for_compilation(current_configstatus["online_sites"])

            # Compile the raw tree
            for aggregation in self._bi_packs.get_all_aggregations():
                start = time.time()
                self._compiled_aggregations[
                    aggregation.id] = aggregation.compile(self.bi_searcher)
                self._logger.debug("Compilation of %s took %f" %
                                   (aggregation.id, time.time() - start))

            self._verify_aggregation_title_uniqueness(
                self._compiled_aggregations)

            for aggr_id, aggr in self._compiled_aggregations.items():
                start = time.time()
                result = BICompiledAggregationSchema().dump(aggr)
                self._logger.debug(
                    "Schema dump took config took %f (%d branches)" %
                    (time.time() - start, len(aggr.branches)))
                start = time.time()
                self._marshal_save_data(
                    self._path_compiled_aggregations.joinpath(aggr_id), result)
                self._logger.debug("Save dump to disk took %f" %
                                   (time.time() - start))

            self._generate_part_of_aggregation_lookup(
                self._compiled_aggregations)

        known_sites = {
            kv[0]: kv[1]
            for kv in current_configstatus.get("known_sites", set())
        }
        self._cleanup_vanished_aggregations()
        self._bi_structure_fetcher.cleanup_orphaned_files(known_sites)

        self._path_compilation_timestamp.write_text(
            str(current_configstatus["configfile_timestamp"]))
Esempio n. 9
0
    def _check_compilation_status(self) -> None:
        current_configstatus = self.compute_current_configstatus()
        if not self._compilation_required(current_configstatus):
            self._logger.debug("No compilation required")
            return

        with store.locked(self._path_compilation_lock):
            # Re-check compilation required after lock has been required
            # Another apache might have done the job
            if not self._compilation_required(current_configstatus):
                self._logger.debug(
                    "No compilation required. An other process already compiled it"
                )
                return

            self.prepare_for_compilation(current_configstatus["online_sites"])

            # Compile the raw tree
            for aggregation in bi_packs.get_all_aggregations():
                start = time.time()
                self._compiled_aggregations[
                    aggregation.id] = aggregation.compile()
                self._logger.debug("Compilation of %s took %f" %
                                   (aggregation.id, time.time() - start))

            self._verify_aggregation_title_uniqueness(
                self._compiled_aggregations)

            for aggr_id, aggr in self._compiled_aggregations.items():
                start = time.time()
                result = BICompiledAggregationSchema().dump(aggr)
                # TODO: remove pprint before going live, change to marshal
                self._path_compiled_aggregations.joinpath(aggr_id).write_text(
                    pprint.pformat(result.data))
                self._logger.debug("Dump config took %f" %
                                   (time.time() - start))

        known_sites = {
            kv[0]: kv[1]
            for kv in current_configstatus.get("known_sites", set())
        }
        bi_structure_fetcher._cleanup_orphaned_files(known_sites)

        self._path_compilation_timestamp.write_text(
            str(current_configstatus["configfile_timestamp"]))
Esempio n. 10
0
def activation_lock(mode: Optional[str]) -> Iterator[None]:
    """Try to acquire the activation lock and raise exception in case it was not possible"""
    if mode is None:
        # TODO: We really should purge this strange case from being configurable
        yield None  # No locking at all
        return

    lock_file = cmk.utils.paths.default_config_dir + "/main.mk"

    if mode == "abort":
        with store.try_locked(lock_file) as result:
            if result is False:
                raise MKBailOut("Other restart currently in progress. Aborting.")
            yield None
        return

    if mode == "wait":
        with store.locked(lock_file):
            yield None
        return

    raise ValueError(f"Invalid lock mode: {mode}")
Esempio n. 11
0
 def acquire(n):
     with store.locked(path):
         acquired.append(1)
         assert len(acquired) == 1
         acquired.pop()