def _process_change(self, path: str, change_type: str, callback: str): log_info('Process change', change_type=change_type, method=callback, path=path) for watcher in self._watchers: processed = getattr(watcher, callback)(path) if processed: return
def _prepare_env_tz(self) -> typing.Optional[str]: if self._last_update_date_time is not None: offset = self._last_update_date_time.utcoffset() if offset: offset = offset.total_seconds() # Strange, but inversion is needed for rrdtool sign = '-' if offset > 0 else '-' offset = int(abs(offset)) // 60 h, m = offset // 60, offset % 60 log_info(f'{self.__class__.__name__}: Set TZ', tz=f'UTC{sign}{h:02d}:{m:02d}') return f'UTC{sign}{h:02d}:{m:02d}' return None
def python_repo_hash_md5(root_dir: str, *, verbose: bool = False): """ Return MD5 hash's hexdigest bases on non-git non-pycache entries of the root_dir. The purpose is to check if two directory is identical except the modification dates. The two directories can be on different machines when the file transfer would be costly. """ m = hashlib.md5() for e in _collect_entries(root_dir, '.'): if verbose: log_info('Processing e', e) m.update( f"path={e['path']}\tisdir={e['isdir']}\tsize={e['size']}\tmode={e['mode']:03o}\tmtime={e['mtime']}\n" .encode('UTF-8')) return m.hexdigest()
def process(self): if self._reprocess or not os.path.exists(self.generated_yaml_file): messages = Messages() log_info('Run modules') run_modules(self.config, messages) if self.ll_node.mode & Mode.WITH_LOGS: log_info('Run logparser modules') run_parser_modules(self.config, messages) log_info("Postprocessing") self.root_node.processed = datetime.datetime.now().strftime( '%Y-%m-%dT%H:%M:%S%z') messages.print_without_category() render(self._output_dir, [self.BASE_PATH, os.path.join(self.BASE_PATH, 'parts')], self.TEMPLATE_FILENAME, self.config, messages, generated=True) else: self.config.overwrite_config(load_yaml(self.generated_yaml_file)) render(self._output_dir, [self.BASE_PATH, os.path.join(self.BASE_PATH, 'parts')], self.TEMPLATE_FILENAME, self.config, None, generated=False) return 0
def generate(self, intervals: typing.List[GraphInterval]): log_info(f'Generating graphs in {self._parallel_count} thread(s)') if self._parallel_count == 1: job = GraphWriterJob(self._munin_directory, self._config, self._output, self._last_update_date_time, self._last_update_timestamp, self._width, self._height, self._header_args, self._env_tz) job.generate_all(intervals) else: pool = Pool(state=self, thread_count=self._parallel_count) job_params: typing.List[JobParam] = [] for domain, host, plugin in self._config.plugins: for interval in intervals: job_params.append(JobParam( self, self._munin_directory, self._config, self._output, self._last_update_date_time, self._last_update_timestamp, self._width, self._height, self._header_args, self._env_tz, self._config.domains[domain].hosts[host].plugins[plugin], interval)) pool.run(GraphWriterJob, job_params)
def fetch(self): log_info('Fetching historical data') self._fetch_urls() self._fetch_archives()
def load(self): log_info('Loading Munin datafile', path=self._data_file_path) self._load_file() self._postprocess()