def parse_content(self, content): # name = None body = {} # Input data is available in text file. Reading each line in file and parsing it to a dictionary. for line in get_active_lines(content): if ':' in line: key, val = [i.strip() for i in line.strip().split(":", 1)] if key == "Features": continue else: raise ParseException( "Unable to parse db2licm info: {0}".format(content)) if key == "Product name": body = {} self[val] = body else: body[key] = val if not self: # If no data is obtained in the command execution then throw an exception instead of returning an empty # object. Rules depending solely on this parser will not be invoked, so they don't have to # explicitly check for invalid data. raise ParseException( "Unable to parse db2licm info: {0}".format(content))
def parse_content(self, content): self.status_of_service = {} self.response_of_service = {} self.service_list = [] self.errors = [] self.are_all_ok = False # The first line should be a service content = get_active_lines(content, comment_char="COMMAND>") if len(content[0].split()) > 1: self.errors.append(content[0]) # Each service occupies 3 lines elif len(content) % 3 == 0: lines = iter(content) for service_no in range(0, len(content) // 3): service_line = next(lines).strip() status_line = next(lines).strip() response_line = next(lines).strip() service = service_line.split(':')[0] status = status_line.split(':', 1)[-1].strip().lower() response = response_line.split(':', 1)[-1].strip() self.service_list.append(service) self.status_of_service[service] = status self.response_of_service[service] = response else: self.errors.extend(content) self.are_all_ok = ( not self.errors and all(status == 'ok' for status in self.status_of_service.values()) )
def parse_content(self, content): # Stored data in a dictionary data structure self.data = {} content = get_active_lines(content, '----') idxs = [ i for i, l in enumerate(content) if l.startswith('Status of volume') ] for i, idx in enumerate(idxs): start = idx end = idxs[i + 1] if i < len(idxs) - 1 else -1 _, val = content[idx].split(":", 1) body = parse_fixed_table(content[start:end], header_substitute=[ ('Gluster process', 'Gluster_process'), ('TCP Port', 'TCP_Port'), ('RDMA Port', 'RDMA_Port') ], heading_ignore=['Gluster process'], trailing_ignore=['Task Status']) self.data[val.strip()] = body if not self.data: # If no data is obtained in the command execution then throw an exception instead of returning an empty # object. Rules depending solely on this parser will not be invoked, so they don't have to # explicitly check for invalid data. raise ParseException( "Unable to parse gluster volume status: {0}".format(content))
def parse_content(self, content): self.status_of_service = {} self.response_of_service = {} self.errors = [] self.raw_content = content content = get_active_lines(content, comment_char="COMMAND>") service_name = None for line in content: items = [item.strip() for item in line.split(':', 1)] if len(items) == 2 and items[0]: if not items[1] and items[0] not in [ 'Status', 'Server Response' ]: service_name = items[0] self[service_name] = {} continue elif service_name is not None: self[service_name][items[0]] = items[1] if items[0] == 'Status': self.status_of_service[service_name] = items[1].lower() if items[0] == 'Server Response': self.response_of_service[service_name] = items[1] continue self.errors.append(line) self._is_normal = (not self.errors and all( [self[item]['Status'] == 'ok' for item in self]))
def parse_content(self, content): self.data = {} for line in unsplit_lines(get_active_lines(content), ',', keep_cont_char=True): if '=' in line: k, v = [i.strip() for i in line.split('=', 1)] self.data[k] = [i.strip() for i in v.split(',')] if ',' in v else v
def parse_content(self, content): """ Override the base class parse_content to parse the output of the '''gluster vol info''' command. Information that is stored in the object is made available to the rule plugins. Attributes: data (dict): Dictionary containing each of the key:value pairs from the command output. Raises: ParseException: raised if data is not parsable.""" # Stored data in a dictionary data structure self.data = {} name = None body = {} # Input data is available in text file. Reading each line in file and parsing it to a dictionary. for line in get_active_lines(content): if ':' in line: key, val = line.strip().split(":", 1) key = key.strip() val = val.lstrip() else: raise ParseException( "Unable to parse gluster volume options: {}".format( content)) if key == "Volume Name": if name is None: name = val else: self.data[name] = body name = val body = {} else: body[key.strip()] = val.lstrip(" ") if name and body: self.data[name] = body if not self.data: # If no data is obtained in the command execution then throw an exception instead of returning an empty # object. Rules depending solely on this parser will not be invoked, so they don't have to # explicitly check for invalid data. raise ParseException( "Unable to parse gluster volume options: {0}".format(content))
def parse_content(self, content): deprecated(PluginConfD, "Deprecated. Use 'PluginConfDIni' instead.") plugin_dict = {} section_dict = {} key = None for line in get_active_lines(content): if line.startswith('['): section_dict = {} plugin_dict[line[1:-1]] = section_dict elif '=' in line: key, _, value = line.partition("=") key = key.strip() section_dict[key] = value.strip() else: if key: section_dict[key] = ','.join([section_dict[key], line]) self.data = plugin_dict
def parse_content(self, content): self.data = [] self.environment = {} self.invalid_lines = [] # Crontabs can use 'nicknames' for common event frequencies: nicknames = { '@yearly': '0 0 1 1 *', '@annually': '0 0 1 1 *', '@monthly': '0 0 1 * *', '@weekly': '0 0 * * 0', '@daily': '0 0 * * *', '@hourly': '0 * * * *', } cron_re = re.compile(_make_cron_re(), flags=re.IGNORECASE) env_re = re.compile(r'^\s*(?P<key>\w+)\s*=\s*(?P<value>\S.*)$') for line in get_active_lines(content): if line.startswith('@'): # Reboot is 'special': if line.startswith('@reboot'): parts = line.split(None, 2) self.data.append({'time': '@reboot', 'command': parts[1]}) continue else: parts = line.split(None, 2) if parts[0] not in nicknames: raise ParseException( "{n} not recognised as a time specification 'nickname'".format(n=parts[0]) ) # Otherwise, put the time spec nickname translation into # the line line = line.replace(parts[0], nicknames[parts[0]]) # And then we fall through to the rest of the parsing. cron_match = cron_re.match(line) env_match = env_re.match(line) if cron_match: self.data.append(cron_match.groupdict()) elif env_match: # Environment variable - capture in dictionary self.environment[env_match.group('key')] = env_match.group('value') else: self.invalid_lines.append(line)
def parse_content(self, content): # Stored data in a dictionary data structure self.data = {} name = None body = {} # Input data is available in text file. Reading each line in file and parsing it to a dictionary. for line in get_active_lines(content): if ':' in line: key, val = line.strip().split(":", 1) key = key.strip() val = val.lstrip() else: raise ParseException( "Unable to parse gluster volume options: {}".format( content)) if key == "Volume Name": if name is None: name = val else: self.data[name] = body name = val body = {} else: body[key.strip()] = val.lstrip(" ") if name and body: self.data[name] = body if not self.data: # If no data is obtained in the command execution then throw an exception instead of returning an empty # object. Rules depending solely on this parser will not be invoked, so they don't have to # explicitly check for invalid data. raise ParseException( "Unable to parse gluster volume options: {0}".format(content))
def parse_content(self, content): self.data = split_kv_pairs(get_active_lines(content))
def parse_content(self, content): if not content: raise SkipException("/etc/crypto-policies/state/current is empty") self.value = get_active_lines(content)[0]
def parse_content(self, content): self.data = [] for line in get_active_lines(content): self.data.append(PamConfEntry(line, pamd_conf=True))
def parse_content(self, content): self.records = [] for line in get_active_lines(content): self.records.append(line)