def sync_changes_before_remote_automation(site_id): # TODO: Cleanup this local import import cmk.gui.watolib.activate_changes # pylint: disable=redefined-outer-name manager = cmk.gui.watolib.activate_changes.ActivateChangesManager() manager.load() if not manager.is_sync_needed(site_id): return logger.info("Syncing %s", site_id) manager.start([site_id], activate_foreign=True, prevent_activate=True) # Wait maximum 30 seconds for sync to finish timeout = 30.0 while manager.is_running() and timeout > 0.0: time.sleep(0.5) timeout -= 0.5 state = manager.get_site_state(site_id) if state and state["_state"] != "success": logger.error( _("Remote automation tried to sync pending changes but failed: %s" ), state.get("_status_details"), )
def render(self) -> Tuple[Optional[str], Optional[HTML]]: """Renders the HTML code of a perfometer It returns a 2-tuple of either the title to show and the HTML of the perfometer or both elements set to None in case nothing shal be shown. """ if not self._perf_data: return None, None # Try new metrics module title, h = self._render_metrics_perfometer() if title is not None: return title, h if not self._has_legacy_perfometer(): return None, None # Legacy Perf-O-Meters: find matching Perf-O-Meter function logger.info( "Legacy perfometer rendered for %s / %s / %s", self._row["host_name"], self._row["service_description"], self._row["service_check_command"], ) return self._render_legacy_perfometer()
def extract_snapshot(tar: tarfile.TarFile, domains: Dict[str, DomainSpec]) -> None: """Used to restore a configuration snapshot for "discard changes""" tar_domains = {} for member in tar.getmembers(): try: if member.name.endswith(".tar.gz"): tar_domains[member.name[:-7]] = member except Exception: pass # We are using the var_dir, because tmp_dir might not have enough space restore_dir = cmk.utils.paths.var_dir + "/wato/snapshots/restore_snapshot" if not os.path.exists(restore_dir): os.makedirs(restore_dir) def check_domain(domain: DomainSpec, tar_member: tarfile.TarInfo) -> List[str]: errors = [] prefix = domain["prefix"] def check_exists_or_writable(path_tokens: List[str]) -> bool: if not path_tokens: return False if os.path.exists("/".join(path_tokens)): if os.access("/".join(path_tokens), os.W_OK): return True # exists and writable errors.append( _("Permission problem: Path not writable %s") % "/".join(path_tokens)) return False # not writable return check_exists_or_writable(path_tokens[:-1]) # The complete tar file never fits in stringIO buffer.. tar.extract(tar_member, restore_dir) # Older versions of python tarfile handle empty subtar archives :( # This won't work: subtar = tarfile.open("%s/%s" % (restore_dir, tar_member.name)) p = subprocess.Popen( ["tar", "tzf", "%s/%s" % (restore_dir, tar_member.name)], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", ) stdout, stderr = p.communicate() if stderr: errors.append(_("Contains corrupt file %s") % tar_member.name) return errors for line in stdout: full_path = prefix + "/" + line path_tokens = full_path.split("/") check_exists_or_writable(path_tokens) # Cleanup os.unlink("%s/%s" % (restore_dir, tar_member.name)) return errors def cleanup_domain(domain: DomainSpec) -> List[str]: # Some domains, e.g. authorization, do not get a cleanup if domain.get("cleanup") is False: return [] def path_valid(prefix: str, path: str) -> bool: if path.startswith("/") or path.startswith(".."): return False return True # Remove old stuff for what, path in domain.get("paths", {}): if not path_valid(domain["prefix"], path): continue full_path = "%s/%s" % (domain["prefix"], path) if os.path.exists(full_path): if what == "dir": exclude_files = [] for pattern in domain.get("exclude", []): if "*" in pattern: exclude_files.extend( glob.glob("%s/%s" % (domain["prefix"], pattern))) else: exclude_files.append("%s/%s" % (domain["prefix"], pattern)) _cleanup_dir(full_path, exclude_files) else: os.remove(full_path) return [] def extract_domain(domain: DomainSpec, tar_member: tarfile.TarInfo) -> List[str]: try: target_dir = domain.get("prefix") if not target_dir: return [] # The complete tar.gz file never fits in stringIO buffer.. tar.extract(tar_member, restore_dir) command = [ "tar", "xzf", "%s/%s" % (restore_dir, tar_member.name), "-C", target_dir ] p = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", ) _stdout, stderr = p.communicate() exit_code = p.wait() if exit_code: return ["%s - %s" % (domain["title"], stderr)] except Exception as e: return ["%s - %s" % (domain["title"], str(e))] return [] def execute_restore(domain: DomainSpec, is_pre_restore: bool = True) -> List[str]: if is_pre_restore: if "pre_restore" in domain: return domain["pre_restore"]() else: if "post_restore" in domain: return domain["post_restore"]() return [] total_errors = [] logger.info("Restoring snapshot: %s", tar.name) logger.info("Domains: %s", ", ".join(tar_domains.keys())) for what, abort_on_error, handler in [ ("Permissions", True, check_domain), ("Pre-Restore", True, lambda domain, tar_member: execute_restore( domain, is_pre_restore=True)), ("Cleanup", False, lambda domain, tar_member: cleanup_domain(domain)), ("Extract", False, extract_domain), ("Post-Restore", False, lambda domain, tar_member: execute_restore( domain, is_pre_restore=False)) ]: errors: List[str] = [] for name, tar_member in tar_domains.items(): if name in domains: try: dom_errors = handler(domains[name], tar_member) errors.extend(dom_errors or []) except Exception: # This should NEVER happen err_info = "Restore-Phase: %s, Domain: %s\nError: %s" % ( what, name, traceback.format_exc()) errors.append(err_info) logger.critical(err_info) if not abort_on_error: # At this state, the restored data is broken. # We still try to apply the rest of the snapshot # Hopefully the log entry helps in identifying the problem.. logger.critical( "Snapshot restore FAILED! (possible loss of snapshot data)" ) continue break if errors: if what == "Permissions": errors = list(set(errors)) errors.append( _("<br>If there are permission problems, please ensure the site user has write permissions." )) if abort_on_error: raise MKGeneralException( _("%s - Unable to restore snapshot:<br>%s") % (what, "<br>".join(errors))) total_errors.extend(errors) # Cleanup _wipe_directory(restore_dir) if total_errors: raise MKGeneralException( _("Errors on restoring snapshot:<br>%s") % "<br>".join(total_errors))