def _convert_to_sections(self, raw_data): # type: (RawAgentData) -> AgentHostSections raw_data = cast(RawAgentData, raw_data) if config.agent_simulator: raw_data = agent_simulator.process(raw_data) return self._parse_host_section(raw_data)
def _parse(self, raw_data: AgentRawData) -> AgentHostSections: if config.agent_simulator: raw_data = agent_simulator.process(raw_data) assert isinstance(raw_data, AgentRawData), type(raw_data) return self._parse_host_section( raw_data, self.host_config.check_mk_check_interval)
def _collect_until(oid: OID, oid_prefix: OID, lines: List[str], index: int, direction: int) -> SNMPRowInfo: rows = [] # Handle case, where we run after the end of the lines list if index >= len(lines): if direction > 0: return [] index -= 1 while True: line = lines[index] parts = line.split(None, 1) o = parts[0] if o.startswith('.'): o = o[1:] if o == oid or o.startswith(oid_prefix + "."): if len(parts) > 1: # FIXME: This encoding ping-pong os horrible... value = ensure_str( agent_simulator.process(AgentRawData(ensure_binary(parts[1]),),),) else: value = "" # Fix for missing starting oids rows.append(('.' + o, strip_snmp_value(value))) index += direction if index < 0 or index >= len(lines): break else: break return rows
def parse( self, raw_data: AgentRawData, ) -> AgentHostSections: raw_data = cast(AgentRawData, raw_data) if config.agent_simulator: raw_data = agent_simulator.process(raw_data) return self._parse_host_section( raw_data, self.host_config.check_mk_check_interval)
def parse( self, hostname: HostName, raw_data: RawAgentData, *, check_interval: int, ) -> AgentHostSections: raw_data = cast(RawAgentData, raw_data) if config.agent_simulator: raw_data = agent_simulator.process(raw_data) return self._parse_host_section(hostname, raw_data, check_interval)
def parse(self, raw_data: AgentRawData) -> AgentHostSections: if config.agent_simulator: raw_data = agent_simulator.process(raw_data) assert isinstance(raw_data, AgentRawData), type(raw_data) host_sections = self._parse_host_section(raw_data, self.host_config.check_mk_check_interval) host_sections.add_persisted_sections( self.persisted_sections_file_path, self.use_outdated_persisted_sections, logger=self._logger, ) return host_sections
def parse(self, raw_data: AgentRawData) -> AgentHostSections: if config.agent_simulator: raw_data = agent_simulator.process(raw_data) host_sections, persisted_sections = self._parse_host_section( raw_data, self.host_config.check_mk_check_interval) self.section_store.update(persisted_sections) host_sections.add_persisted_sections( persisted_sections, logger=self._logger, ) return host_sections
def parse( self, raw_data: AgentRawData, *, selection: SectionNameCollection, ) -> AgentHostSections: if self.simulation: raw_data = agent_simulator.process(raw_data) now = int(time.time()) parser = self._parse_host_section(raw_data, selection=selection) host_sections = parser.host_sections # Transform to seconds and give the piggybacked host a little bit more time cache_age = int(1.5 * 60 * self.check_interval) host_sections.cache_info.update({ header.name: cast(Tuple[int, int], header.cache_info(now)) for header in parser.section_info.values() if header.cache_info(now) is not None }) host_sections.piggybacked_raw_data = self._make_updated_piggyback_section_header( host_sections.piggybacked_raw_data, cached_at=now, cache_age=cache_age, ) host_sections.add_persisted_sections( host_sections.sections, section_store=self.section_store, fetch_interval=lambda section_name: parser.section_info.get( section_name, SectionMarker.default(section_name)).persist, now=now, keep_outdated=self.keep_outdated, logger=self._logger, ) return host_sections
def parse( self, raw_data: AgentRawData, *, selection: SectionNameCollection, ) -> HostSections[AgentRawDataSection]: if self.simulation: raw_data = agent_simulator.process(raw_data) now = int(time.time()) raw_sections, piggyback_sections = self._parse_host_section(raw_data) section_info = { header.name: header for header in raw_sections if selection is NO_SELECTION or header.name in selection } def decode_sections( sections: ImmutableSection, ) -> MutableMapping[SectionName, List[AgentRawDataSection]]: out: MutableMapping[SectionName, List[AgentRawDataSection]] = {} for header, content in sections.items(): out.setdefault(header.name, []).extend( header.parse_line(line) for line in content) return out def flatten_piggyback_section( sections: ImmutableSection, *, cached_at: int, cache_for: int, selection: SectionNameCollection, ) -> Iterator[bytes]: for header, content in sections.items(): if not (selection is NO_SELECTION or header.name in selection): continue if header.cached is not None or header.persist is not None: yield str(header).encode(header.encoding) else: # Add cache information. yield str( SectionMarker( header.name, (cached_at, cache_for), header.encoding, header.nostrip, header.persist, header.separator, )).encode(header.encoding) yield from (bytes(line) for line in content) sections = { name: content for name, content in decode_sections(raw_sections).items() if selection is NO_SELECTION or name in selection } piggybacked_raw_data = { header.hostname: list( flatten_piggyback_section( content, cached_at=now, cache_for=self.cache_piggybacked_data_for, selection=selection, )) for header, content in piggyback_sections.items() } cache_info = { header.name: cache_info_tuple for header in section_info.values() if (cache_info_tuple := header.cache_info(now)) is not None } def lookup_persist( section_name: SectionName) -> Optional[Tuple[int, int]]: default = SectionMarker.default(section_name) if (until := section_info.get(section_name, default).persist) is not None: return now, until return None