コード例 #1
0
def _save_inventory_tree(
    hostname: HostName,
    inventory_tree: StructuredDataTree,
) -> Optional[StructuredDataTree]:
    store.makedirs(cmk.utils.paths.inventory_output_dir)

    filepath = cmk.utils.paths.inventory_output_dir + "/" + hostname
    if inventory_tree.is_empty():
        # Remove empty inventory files. Important for host inventory icon
        if os.path.exists(filepath):
            os.remove(filepath)
        if os.path.exists(filepath + ".gz"):
            os.remove(filepath + ".gz")
        return None

    old_tree = StructuredDataTree().load_from(filepath)
    old_tree.normalize_nodes()
    if old_tree.is_equal(inventory_tree):
        console.verbose("Inventory was unchanged\n")
        return None

    if old_tree.is_empty():
        console.verbose("New inventory tree\n")
    else:
        console.verbose("Inventory tree has changed\n")
        old_time = os.stat(filepath).st_mtime
        arcdir = "%s/%s" % (cmk.utils.paths.inventory_archive_dir, hostname)
        store.makedirs(arcdir)
        os.rename(filepath, arcdir + ("/%d" % old_time))
    inventory_tree.save_to(cmk.utils.paths.inventory_output_dir, hostname)
    return old_tree
コード例 #2
0
def _store_status_file_of(status_file_path, piggyback_file_paths):
    # type: (Path, List[Path]) -> None
    store.makedirs(status_file_path.parent)

    # Cannot use store.save_bytes_to_file like:
    # 1. store.save_bytes_to_file(status_file_path, b"")
    # 2. set utime of piggybacked host files
    # Between 1. and 2.:
    # - the piggybacked host may check its files
    # - status file is newer (before utime of piggybacked host files is set)
    # => piggybacked host file is outdated
    with tempfile.NamedTemporaryFile("wb",
                                     dir=str(status_file_path.parent),
                                     prefix=".%s.new" % status_file_path.name,
                                     delete=False) as tmp:
        tmp_path = tmp.name
        os.chmod(tmp_path, 0o660)
        tmp.write(b"")

        tmp_stats = os.stat(tmp_path)
        status_file_times = (tmp_stats.st_atime, tmp_stats.st_mtime)
        for piggyback_file_path in piggyback_file_paths:
            try:
                # TODO use Path.stat() but be aware of:
                # On POSIX platforms Python reads atime and mtime at nanosecond resolution
                # but only writes them at microsecond resolution.
                # (We're using os.utime() in _store_status_file_of())
                os.utime(str(piggyback_file_path), status_file_times)
            except OSError as e:
                if e.errno == errno.ENOENT:
                    continue
                raise
    os.rename(tmp_path, str(status_file_path))
コード例 #3
0
def do_inv(
    hostnames: List[HostName],
    *,
    selected_sections: checkers.SectionNameCollection,
    run_only_plugin_names: Optional[Set[InventoryPluginName]] = None,
) -> None:
    store.makedirs(cmk.utils.paths.inventory_output_dir)
    store.makedirs(cmk.utils.paths.inventory_archive_dir)

    for hostname in hostnames:
        section.section_begin(hostname)
        try:
            host_config = config.HostConfig.make_host_config(hostname)
            inv_result = _do_active_inventory_for(
                host_config=host_config,
                selected_sections=selected_sections,
                run_only_plugin_names=run_only_plugin_names,
            )

            _run_inventory_export_hooks(host_config, inv_result.trees.inventory)
            # TODO: inv_results.source_results is completely ignored here.
            # We should process the results to make errors visible on the console
            _show_inventory_results_on_console(inv_result.trees)

        except Exception as e:
            if cmk.utils.debug.enabled():
                raise

            section.section_error("%s" % e)
        finally:
            cmk.utils.cleanup.cleanup_globals()
コード例 #4
0
ファイル: prediction.py プロジェクト: a-s-995/checkmk-1
def get_levels(
        hostname,  # type: HostName
        service_description,  # type: ServiceName
        dsname,  # type: MetricName
        params,  # type: PredictionParameters
        cf,  # type: ConsolidationFunctionName
        levels_factor=1.0,  # type: float
):
    # type: (...) -> Tuple[Optional[float], EstimatedLevels]
    now = int(time.time())
    period_info = prediction_periods[params["period"]]  # type: Dict

    timegroup, rel_time = cast(GroupByFunction, period_info["groupby"])(now)

    pred_dir = cmk.utils.prediction.predictions_dir(hostname,
                                                    service_description,
                                                    dsname)
    store.makedirs(pred_dir)

    pred_file = os.path.join(pred_dir, timegroup)
    cmk.utils.prediction.clean_prediction_files(pred_file)

    data_for_pred = None  # type: Optional[PredictionData]
    if is_prediction_up2date(pred_file, timegroup, params):
        # Suppression: I am not sure how to check what this function returns
        #              For now I hope this is compatible.
        data_for_pred = cmk.utils.prediction.retrieve_data_for_prediction(  # type: ignore[assignment]
            pred_file, timegroup)

    if data_for_pred is None:
        logger.log(VERBOSE, "Calculating prediction data for time group %s",
                   timegroup)
        cmk.utils.prediction.clean_prediction_files(pred_file, force=True)

        time_windows = time_slices(now, int(params["horizon"] * 86400),
                                   period_info, timegroup)

        rrd_datacolumn = cmk.utils.prediction.rrd_datacolum(
            hostname, service_description, dsname, cf)

        data_for_pred = calculate_data_for_prediction(time_windows,
                                                      rrd_datacolumn)

        info = {
            u"time": now,
            u"range": time_windows[0],
            u"cf": cf,
            u"dsname": dsname,
            u"slice": period_info["slice"],
            u"params": params,
        }  # type: PredictionInfo
        save_predictions(pred_file, info, data_for_pred)

    # Find reference value in data_for_pred
    index = int(rel_time /
                cast(int, data_for_pred["step"]))  # fixed: true-division
    reference = dict(
        zip(data_for_pred["columns"], data_for_pred["points"][index]))
    return cmk.utils.prediction.estimate_levels(reference, params,
                                                levels_factor)
コード例 #5
0
ファイル: changes.py プロジェクト: spearheadsys/checkmk
def log_entry(linkinfo, action, message, user_id=None):
    # Using attrencode here is against our regular rule to do the escaping
    # at the last possible time: When rendering. But this here is the last
    # place where we can distinguish between HTML() encapsulated (already)
    # escaped / allowed HTML and strings to be escaped.
    message = html.attrencode(message).strip()

    # TODO: Create a more generic referencing
    # linkinfo identifies the object operated on. It can be a Host or a Folder
    # or a text.
    # linkinfo is either a Folder, or a Host or a hostname or None
    if hasattr(linkinfo, "linkinfo"):
        link = linkinfo.linkinfo()
    else:
        link = linkinfo

    write_tokens = (
        time.strftime("%s"),
        link or "-",
        user_id or config.user.id or "-",
        action,
        message.replace("\n", "\\n"),
    )

    # TODO: once we know all of these are unicode, remove this line
    write_tokens = (t if isinstance(t, unicode) else t.encode("utf-8")
                    for t in write_tokens)

    store.makedirs(audit_log_path.parent)
    with audit_log_path.open(mode="a", encoding='utf-8') as f:
        audit_log_path.chmod(0o660)
        f.write(u" ".join(write_tokens) + u"\n")
コード例 #6
0
ファイル: config_sync.py プロジェクト: tklecker/checkmk
    def _generate_snapshot(self, snapshot_work_dir: str, target_filepath: str,
                           generic_components: List[ReplicationPath],
                           custom_components: List[ReplicationPath],
                           reuse_identical_snapshots: bool) -> None:
        generate_start_time = time.time()
        target_basename = os.path.basename(target_filepath)
        store.makedirs(os.path.dirname(target_filepath))

        # This is not supported in CME, most of the CME files are customized!
        # Only the sitespecific custom component is currently supported
        if reuse_identical_snapshots:
            # Note/Requirement: There is (currently) no need to rsync custom components, since these components are always
            #                   generated on the fly in a custom directory
            # Check if a snapshot with the same content has already been packed.
            snapshot_fingerprint = self._get_snapshot_fingerprint(
                snapshot_work_dir, generic_components, custom_components)
            identical_snapshot = self._available_snapshots.get(
                snapshot_fingerprint)
            if identical_snapshot:
                os.symlink(identical_snapshot, target_filepath)
                self._statistics_tar[os.path.basename(
                    identical_snapshot)].append(
                        "Reused by %-40s (took %.4fsec)" %
                        (target_basename, time.time() - generate_start_time))
                return

        # Generate the final tar command
        required_subtars = ["%s.tar" % c.ident for c in generic_components]
        final_tar_command = [
            "tar", "czf", target_filepath, "--owner=0", "--group=0", "-C",
            self._tarfile_dir
        ] + required_subtars

        # Add custom files to final tar command
        if custom_components:
            base_dir = os.path.basename(target_filepath)
            tarfile_dir = "%s/custom_files/%s" % (self._tarfile_dir, base_dir)
            os.makedirs(tarfile_dir)

            self._create_custom_components_tarfiles(snapshot_work_dir,
                                                    custom_components,
                                                    tarfile_dir)
            required_custom_subtars = [
                "%s.tar" % c.ident for c in custom_components
            ]
            final_tar_command.extend(["-C", tarfile_dir] +
                                     required_custom_subtars)

        # Execute final tar command, create the snapshot
        self._execute_bash_commands([final_tar_command])

        if reuse_identical_snapshots:
            self._available_snapshots[snapshot_fingerprint] = target_filepath

        self._statistics_tar.setdefault(
            target_basename, []).append("Snapshot creation took %.4fsec" %
                                        (time.time() - generate_start_time))
        self._logger.debug("Snapshot %-30s took %.4fsec" %
                           (target_basename,
                            (time.time() - generate_start_time)))
コード例 #7
0
ファイル: inventory.py プロジェクト: surajrb/checkmk
def do_inv(hostnames):
    # type: (List[HostName]) -> None
    store.makedirs(cmk.utils.paths.inventory_output_dir)
    store.makedirs(cmk.utils.paths.inventory_archive_dir)

    for hostname in hostnames:
        console.section_begin(hostname)
        try:
            config_cache = config.get_config_cache()
            host_config = config_cache.get_host_config(hostname)

            if host_config.is_cluster:
                ipaddress = None
            else:
                ipaddress = ip_lookup.lookup_ip_address(hostname)

            sources = data_sources.DataSources(hostname, ipaddress)
            inventory_tree, status_data_tree = _do_inv_for(
                sources,
                multi_host_sections=None,
                host_config=host_config,
                ipaddress=ipaddress,
            )
            _run_inventory_export_hooks(host_config, inventory_tree)
            _show_inventory_results_on_console(inventory_tree,
                                               status_data_tree)

        except Exception as e:
            if cmk.utils.debug.enabled():
                raise

            console.section_error("%s" % e)
        finally:
            cmk.base.cleanup.cleanup_globals()
コード例 #8
0
ファイル: __init__.py プロジェクト: PLUTEX/checkmk
def commandline_inventory(
    hostnames: List[HostName],
    *,
    selected_sections: SectionNameCollection,
    run_plugin_names: Container[InventoryPluginName] = EVERYTHING,
) -> None:
    store.makedirs(cmk.utils.paths.inventory_output_dir)
    store.makedirs(cmk.utils.paths.inventory_archive_dir)

    for hostname in hostnames:
        section.section_begin(hostname)
        host_config = config.HostConfig.make_host_config(hostname)
        try:
            _commandline_inventory_on_host(
                host_config=host_config,
                selected_sections=selected_sections,
                run_plugin_names=run_plugin_names,
            )

        except Exception as e:
            if cmk.utils.debug.enabled():
                raise
            section.section_error("%s" % e)
        finally:
            cmk.utils.cleanup.cleanup_globals()
コード例 #9
0
ファイル: changes.py プロジェクト: edeka-spatt/checkmk
def log_entry(linkinfo, action, message, user_id=None):
    # TODO: Create a more generic referencing
    # linkinfo identifies the object operated on. It can be a Host or a Folder
    # or a text.
    # linkinfo is either a Folder, or a Host or a hostname or None
    if hasattr(linkinfo, "linkinfo"):
        link = linkinfo.linkinfo()
    else:
        link = linkinfo

    write_tokens = (
        time.strftime("%s"),
        link or "-",
        user_id or config.user.id or "-",
        action,
        message.replace("\n", "\\n"),
    )

    # TODO: once we know all of these are unicode, remove this line
    write_tokens = (t if isinstance(t, six.text_type) else t.encode("utf-8")
                    for t in write_tokens)

    store.makedirs(audit_log_path.parent)
    with audit_log_path.open(mode="a", encoding='utf-8') as f:
        audit_log_path.chmod(0o660)
        f.write(u" ".join(write_tokens) + u"\n")
コード例 #10
0
def log_entry(linkinfo, action, message, user_id=None):
    # Using attrencode here is against our regular rule to do the escaping
    # at the last possible time: When rendering. But this here is the last
    # place where we can distinguish between HTML() encapsulated (already)
    # escaped / allowed HTML and strings to be escaped.
    message = cmk.utils.make_utf8(html.attrencode(message)).strip()

    # TODO: Create a more generic referencing
    # linkinfo identifies the object operated on. It can be a Host or a Folder
    # or a text.
    # linkinfo is either a Folder, or a Host or a hostname or None
    if hasattr(linkinfo, "linkinfo"):
        link = linkinfo.linkinfo()
    elif linkinfo is None:
        link = "-"
    else:
        link = linkinfo

    if user_id is None and config.user.id is not None:
        user_id = config.user.id
    elif user_id == '':
        user_id = '-'

    if user_id:
        user_id = user_id.encode("utf-8")

    store.makedirs(os.path.dirname(audit_log_path))
    with open(audit_log_path, "ab") as f:
        os.chmod(f.name, 0o660)
        f.write("%d %s %s %s %s\n" % (int(
            time.time()), link, user_id, action, message.replace("\n", "\\n")))
コード例 #11
0
    def save(self, settings, site_specific=False):
        filename = self.config_file(site_specific)

        output = wato_fileheader()
        for varname, value in settings.items():
            output += "%s = %s\n" % (varname, pprint.pformat(value))

        store.makedirs(os.path.dirname(filename))
        store.save_file(filename, output)
コード例 #12
0
ファイル: prediction.py プロジェクト: n00rm/checkmk
def get_levels(hostname,
               service_description,
               dsname,
               params,
               cf,
               levels_factor=1.0):
    # type: (HostName, ServiceName, MetricName, PredictionParameters, ConsolidationFunctionName, float) -> Tuple[int, EstimatedLevels]
    now = int(time.time())
    period_info = prediction_periods[params["period"]]  # type: Dict

    timegroup, rel_time = cast(GroupByFunction, period_info["groupby"])(now)

    pred_dir = cmk.utils.prediction.predictions_dir(hostname,
                                                    service_description,
                                                    dsname)
    store.makedirs(pred_dir)

    pred_file = os.path.join(pred_dir, timegroup)
    cmk.utils.prediction.clean_prediction_files(pred_file)

    data_for_pred = None
    if is_prediction_up2date(pred_file, timegroup, params):
        data_for_pred = cmk.utils.prediction.retrieve_data_for_prediction(
            pred_file, timegroup)

    if data_for_pred is None:
        logger.log(VERBOSE, "Calculating prediction data for time group %s",
                   timegroup)
        cmk.utils.prediction.clean_prediction_files(pred_file, force=True)

        time_windows = time_slices(now, int(params["horizon"] * 86400),
                                   period_info, timegroup)

        rrd_datacolumn = cmk.utils.prediction.rrd_datacolum(
            hostname, service_description, dsname, cf)

        data_for_pred = calculate_data_for_prediction(time_windows,
                                                      rrd_datacolumn)

        info = {
            u"time": now,
            u"range": time_windows[0],
            u"cf": cf,
            u"dsname": dsname,
            u"slice": period_info["slice"],
            u"params": params,
        }  # type: PredictionInfo
        save_predictions(pred_file, info, data_for_pred)

    # Find reference value in data_for_pred
    index = int(rel_time /
                cast(int, data_for_pred["step"]))  # fixed: true-division
    reference = dict(
        zip(data_for_pred["columns"], data_for_pred["points"][index]))
    return cmk.utils.prediction.estimate_levels(reference, params,
                                                levels_factor)
コード例 #13
0
def wsgi_app(monkeypatch, recreate_openapi_spec):
    monkeypatch.setenv("OMD_SITE", "NO_SITE")
    store.makedirs(paths.omd_root + '/var/check_mk/web')
    store.makedirs(paths.omd_root + '/var/check_mk/php-api')
    store.makedirs(paths.omd_root + '/var/check_mk/wato/php-api')
    store.makedirs(paths.omd_root + '/var/log')
    store.makedirs(paths.omd_root + '/tmp/check_mk')
    wsgi_callable = make_app()
    cookies = CookieJar()
    return WebTestAppForCMK(wsgi_callable,
                            cookiejar=cookies)  # type: WebTestAppForCMK
コード例 #14
0
    def save(self, settings, site_specific=False, custom_site_path=None):
        filename = self.config_file(site_specific)
        if custom_site_path:
            filename = os.path.join(custom_site_path,
                                    os.path.relpath(filename, cmk.utils.paths.omd_root))

        output = wato_fileheader()
        for varname, value in settings.items():
            output += "%s = %s\n" % (varname, pprint.pformat(value))

        store.makedirs(os.path.dirname(filename))
        store.save_file(filename, output)
コード例 #15
0
ファイル: bi_packs.py プロジェクト: PLUTEX/checkmk
    def save_config(self) -> None:
        store.save_text_to_file(self._bi_configuration_file,
                                repr(self.generate_config()))
        enabled_aggregations = str(
            len([
                bi_aggr for bi_aggr in self.get_all_aggregations()
                if not bi_aggr.computation_options.disabled
            ]))

        store.makedirs(self._num_enabled_aggregations_dir())
        store.save_text_to_file(self._num_enabled_aggregations_path(),
                                enabled_aggregations)
コード例 #16
0
def _initialize_local_po_file(lang: LanguageName) -> None:
    """Initialize the file in the local hierarchy with the file in the default hierarchy if needed"""
    po_file = _po_file(lang)

    store.makedirs(Path(po_file).parent)

    builtin_po_file = _builtin_po_file(lang)
    if not os.path.exists(po_file) and builtin_po_file.exists():
        with builtin_po_file.open("r", encoding="utf-8") as source, Path(po_file).open(
            "w", encoding="utf-8"
        ) as dest:
            dest.write(source.read())
        logger.info("Initialize %s with the file in the default hierarchy", po_file)
コード例 #17
0
def _initialize_local_po_file(lang: LanguageName) -> None:
    """Initialize the file in the local hierarchy with the file in the default hierarchy if needed"""
    po_file = _po_file(lang)

    store.makedirs(Path(po_file).parent)

    builtin_po_file = _builtin_po_file(lang)
    if not po_file.exists() and builtin_po_file.exists():
        po_file.write_text(
            builtin_po_file.read_text(encoding="utf-8"),
            encoding="utf-8",
        )
        logger.info("Initialize %s with the file in the default hierarchy",
                    po_file)
コード例 #18
0
def recreate_openapi_spec(mocker, _cache=[]):  # pylint: disable=dangerous-default-value
    from cmk.gui.openapi import generate
    spec_path = paths.omd_root + "/share/checkmk/web/htdocs/openapi"
    openapi_spec_dir = mocker.patch('cmk.gui.wsgi.applications.rest_api')
    openapi_spec_dir.return_value = spec_path

    if not _cache:
        with SPEC_LOCK:
            if not _cache:
                _cache.append(generate())

    spec_data = ensure_str(_cache[0])
    store.makedirs(spec_path)
    store.save_text_to_file(spec_path + "/checkmk.yaml", spec_data)
コード例 #19
0
    def save_config(self) -> None:
        store.save_file(self._bi_configuration_file,
                        repr(self.generate_config()))
        enabled_aggregations = str(
            len([
                bi_aggr for bi_aggr in self.get_all_aggregations()
                if not bi_aggr.computation_options.disabled
            ]))

        enabled_info_path = os.path.join(cmk.utils.paths.var_dir, "wato")
        store.makedirs(enabled_info_path)
        store.save_file(
            os.path.join(enabled_info_path, "num_enabled_aggregations"),
            enabled_aggregations)
コード例 #20
0
def wsgi_app(monkeypatch):
    monkeypatch.setenv("OMD_SITE", "NO_SITE")
    store.makedirs(cmk.utils.paths.omd_root + '/var/check_mk/web')
    store.makedirs(cmk.utils.paths.omd_root + '/var/check_mk/php-api')
    store.makedirs(cmk.utils.paths.omd_root + '/var/check_mk/wato/php-api')
    store.makedirs(cmk.utils.paths.omd_root + '/tmp/check_mk')
    wsgi_callable = make_app()
    cookies = CookieJar()
    return WebTestAppForCMK(wsgi_callable, cookiejar=cookies)
コード例 #21
0
ファイル: bulk_import.py プロジェクト: dnlldl/checkmk
    def _upload_csv_file(self) -> None:
        store.makedirs(self._upload_tmp_path)

        self._cleanup_old_files()

        upload_info = self._vs_upload().from_html_vars("_upload")
        self._vs_upload().validate_value(upload_info, "_upload")

        file_id = "%s-%d" % (config.user.id, int(time.time()))

        store.save_text_to_file(self._file_path(), upload_info["file"])

        # make selections available to next page
        html.request.set_var("file_id", file_id)

        if upload_info["do_service_detection"]:
            html.request.set_var("do_service_detection", "1")
コード例 #22
0
    def _upload_csv_file(self) -> None:
        store.makedirs(self._upload_tmp_path)

        self._cleanup_old_files()

        upload_info = self._vs_upload().from_html_vars("_upload")
        self._vs_upload().validate_value(upload_info, "_upload")

        file_id = uuid.uuid4().hex

        store.save_text_to_file(self._file_path(file_id=file_id), upload_info["file"])

        # make selections available to next page
        request.set_var("file_id", file_id)

        if upload_info["do_service_detection"]:
            request.set_var("do_service_detection", "1")
コード例 #23
0
ファイル: icons.py プロジェクト: majma24/checkmk
    def _upload_icon(self, icon_info):
        # Add the icon category to the PNG comment
        im = Image.open(io.BytesIO(icon_info['icon'][2]))
        im.info['Comment'] = icon_info['category']
        meta = PngImagePlugin.PngInfo()
        for k, v in im.info.items():
            if isinstance(v, (bytes, str)):
                meta.add_text(k, v, 0)

        # and finally save the image
        dest_dir = "%s/local/share/check_mk/web/htdocs/images/icons" % cmk.utils.paths.omd_root
        store.makedirs(dest_dir)
        try:
            file_name = os.path.basename(icon_info['icon'][0])
            im.save(dest_dir + '/' + file_name, 'PNG', pnginfo=meta)
        except IOError as e:
            # Might happen with interlaced PNG files and PIL version < 1.1.7
            raise MKUserError(None, _('Unable to upload icon: %s') % e)
コード例 #24
0
ファイル: bulk_import.py プロジェクト: stefan927/checkmk
    def _upload_csv_file(self):
        store.makedirs(self._upload_tmp_path)

        self._cleanup_old_files()

        upload_info = self._vs_upload().from_html_vars("_upload")
        self._vs_upload().validate_value(upload_info, "_upload")
        _file_name, _mime_type, content = upload_info["file"]

        file_id = "%s-%d" % (config.user.id, int(time.time()))

        store.save_file(self._file_path(), content.encode("utf-8"))

        # make selections available to next page
        html.request.set_var("file_id", file_id)

        if upload_info["do_service_detection"]:
            html.request.set_var("do_service_detection", "1")
コード例 #25
0
ファイル: groups.py プロジェクト: n00rm/checkmk
def save_group_information(all_groups, custom_default_config_dir=None):
    # Split groups data into Check_MK/Multisite parts
    check_mk_groups = {}
    multisite_groups = {}

    if custom_default_config_dir:
        check_mk_config_dir = "%s/conf.d/wato" % custom_default_config_dir
        multisite_config_dir = "%s/multisite.d/wato" % custom_default_config_dir
    else:
        check_mk_config_dir = "%s/conf.d/wato" % cmk.utils.paths.default_config_dir
        multisite_config_dir = "%s/multisite.d/wato" % cmk.utils.paths.default_config_dir

    for what, groups in all_groups.items():
        check_mk_groups[what] = {}
        for gid, group in groups.items():
            check_mk_groups[what][gid] = group['alias']

            for attr, value in group.items():
                if attr != 'alias':
                    multisite_groups.setdefault(what, {})
                    multisite_groups[what].setdefault(gid, {})
                    multisite_groups[what][gid][attr] = value

    # Save Check_MK world related parts
    store.makedirs(check_mk_config_dir)
    output = wato_fileheader()
    for what in ["host", "service", "contact"]:
        if check_mk_groups.get(what):
            output += "if type(define_%sgroups) != dict:\n    define_%sgroups = {}\n" % (
                what, what)
            output += "define_%sgroups.update(%s)\n\n" % (
                what, format_config_value(check_mk_groups[what]))
    store.save_file("%s/groups.mk" % check_mk_config_dir, output)

    # Users with passwords for Multisite
    store.makedirs(multisite_config_dir)
    output = wato_fileheader()
    for what in ["host", "service", "contact"]:
        if multisite_groups.get(what):
            output += "multisite_%sgroups = \\\n%s\n\n" % (
                what, format_config_value(multisite_groups[what]))
    store.save_file("%s/groups.mk" % multisite_config_dir, output)

    _clear_group_information_request_cache()
コード例 #26
0
def _save_gui_groups(all_groups: AllGroupSpecs, config_dir: str) -> None:
    multisite_groups: Dict[GroupType, Dict[GroupName, GroupSpec]] = {}

    for group_type, groups in all_groups.items():
        for gid, group in groups.items():
            for attr, value in group.items():
                if attr != "alias":  # Saved in cmk_base
                    multisite_groups.setdefault(group_type, {})
                    multisite_groups[group_type].setdefault(gid, {})
                    multisite_groups[group_type][gid][attr] = value

    store.makedirs(config_dir)
    output = wato_fileheader()
    for what in get_args(GroupType):
        if multisite_groups.get(what):
            output += "multisite_%sgroups = \\\n%s\n\n" % (
                what,
                format_config_value(multisite_groups[what]),
            )
    store.save_text_to_file("%s/groups.mk" % config_dir, output)
コード例 #27
0
def _store_status_file_of(status_file_path, piggyback_file_paths):
    store.makedirs(os.path.dirname(status_file_path))
    with tempfile.NamedTemporaryFile("w",
                                     dir=os.path.dirname(status_file_path),
                                     prefix=".%s.new" % os.path.basename(status_file_path),
                                     delete=False) as tmp:
        tmp_path = tmp.name
        os.chmod(tmp_path, 0o660)
        tmp.write("")

        tmp_stats = os.stat(tmp_path)
        status_file_times = (tmp_stats.st_atime, tmp_stats.st_mtime)
        for piggyback_file_path in piggyback_file_paths:
            try:
                os.utime(piggyback_file_path, status_file_times)
            except OSError as e:
                if e.errno == errno.ENOENT:
                    continue
                else:
                    raise
    os.rename(tmp_path, status_file_path)
コード例 #28
0
def _save_cmk_base_groups(all_groups: AllGroupSpecs, config_dir: str) -> None:
    check_mk_groups: Dict[GroupType, Dict[GroupName, str]] = {}
    for group_type, groups in all_groups.items():
        check_mk_groups[group_type] = {}
        for gid, group in groups.items():
            check_mk_groups[group_type][gid] = group["alias"]

    # Save Checkmk world related parts
    store.makedirs(config_dir)
    output = wato_fileheader()
    for group_type in get_args(GroupType):
        if check_mk_groups.get(group_type):
            output += "if type(define_%sgroups) != dict:\n    define_%sgroups = {}\n" % (
                group_type,
                group_type,
            )
            output += "define_%sgroups.update(%s)\n\n" % (
                group_type,
                format_config_value(check_mk_groups[group_type]),
            )
    store.save_text_to_file("%s/groups.mk" % config_dir, output)
コード例 #29
0
ファイル: inventory.py プロジェクト: chill-uk/checkmk
def do_inv(hostnames: List[HostName]) -> None:
    store.makedirs(cmk.utils.paths.inventory_output_dir)
    store.makedirs(cmk.utils.paths.inventory_archive_dir)

    config_cache = config.get_config_cache()

    for hostname in hostnames:
        section.section_begin(hostname)
        try:
            host_config = config.HostConfig.make_host_config(hostname)
            if host_config.is_cluster:
                ipaddress = None
            else:
                ipaddress = ip_lookup.lookup_ip_address(host_config)

            inventory_tree, status_data_tree = _do_inv_for(
                config_cache,
                host_config,
                ipaddress,
                sources=checkers.make_sources(
                    host_config,
                    ipaddress,
                    mode=checkers.Mode.INVENTORY,
                ),
                multi_host_sections=None,
            )[:2]
            _run_inventory_export_hooks(host_config, inventory_tree)
            _show_inventory_results_on_console(inventory_tree,
                                               status_data_tree)

        except Exception as e:
            if cmk.utils.debug.enabled():
                raise

            section.section_error("%s" % e)
        finally:
            cmk.utils.cleanup.cleanup_globals()
コード例 #30
0
ファイル: changes.py プロジェクト: gugas1nwork/checkmk
def log_entry(linkinfo, action, message, user_id=None):
    # TODO: Create a more generic referencing
    # linkinfo identifies the object operated on. It can be a Host or a Folder
    # or a text.
    # linkinfo is either a Folder, or a Host or a hostname or None
    if hasattr(linkinfo, "linkinfo"):
        link = linkinfo.linkinfo()
    else:
        link = linkinfo

    write_tokens_tuple = (
        time.strftime("%s"),
        link or "-",
        user_id or config.user.id or "-",
        action,
        message.replace("\n", "\\n"),
    )

    write_tokens = (ensure_str(t) for t in write_tokens_tuple)

    store.makedirs(audit_log_path.parent)
    with audit_log_path.open(mode="a", encoding='utf-8') as f:
        audit_log_path.chmod(0o660)
        f.write(u" ".join(write_tokens) + u"\n")