# parse current running config running_conf_descr = fetch_rule_details() result = list() filter_logs = [] if os.path.isdir('/var/log/filter'): filter_logs = list( sorted(glob.glob("/var/log/filter/filter_*.log"), reverse=True)) if os.path.isfile('/var/log/filter.log'): filter_logs.append('/var/log/filter.log') for filter_log in filter_logs: do_exit = False try: filename = fetch_clog(filter_log) except Exception as e: filename = filter_log for record in reverse_log_reader(filename): if record['line'].find('filterlog') > -1: rule = dict() metadata = dict() # rule metadata (unique hash, hostname, timestamp) if re.search('filterlog\[\d*\]:', record['line']): # rfc3164 format log_ident = re.split('filterlog[^:]*:', record['line']) tmp = log_ident[0].split() metadata['__host__'] = tmp.pop() metadata['__timestamp__'] = ' '.join(tmp) rulep = log_ident[1].strip().split(',') else:
result[line_id] = {'rid': None, 'label': rid} return result if __name__ == '__main__': # read parameters parameters = {'limit': '0', 'digest': ''} update_params(parameters) parameters['limit'] = int(parameters['limit']) # parse current running config running_conf_descr = fetch_rule_details() result = list() for record in reverse_log_reader(fetch_clog(filter_log)): if record['line'].find('filterlog') > -1: rule = dict() metadata = dict() # rule metadata (unique hash, hostname, timestamp) tmp = record['line'].split('filterlog:')[0].split() metadata['__digest__'] = md5(record['line'].encode()).hexdigest() metadata['__host__'] = tmp.pop() metadata['__timestamp__'] = ' '.join(tmp) rulep = record['line'].split('filterlog:')[1].strip().split(',') update_rule(rule, metadata, rulep, fields_general) if 'version' in rule: if rule['version'] == '4': update_rule(rule, metadata, rulep, fields_ipv4) if 'proto' in rule:
# XXX happens on rdr (ID is not unique) or when no label is found result[line_id] = {'label': 'XXX'} return result if __name__ == '__main__': # read parameters parameters = {'limit': '0', 'digest': ''} update_params(parameters) parameters['limit'] = int(parameters['limit']) # parse current running config running_conf_descr = fetch_rules_descriptions() result = list() for record in reverse_log_reader(fetch_clog(filter_log)): if record['line'].find('filterlog') > -1: rule = dict() metadata = dict() # rule metadata (unique hash, hostname, timestamp) tmp = record['line'].split('filterlog:')[0].split() metadata['__digest__'] = md5.new(record['line']).hexdigest() metadata['__host__'] = tmp.pop() metadata['__timestamp__'] = ' '.join(tmp) rulep = record['line'].split('filterlog:')[1].strip().split(',') update_rule(rule, metadata, rulep, fields_general) if 'version' in rule: if rule['version'] == '4': update_rule(rule, metadata, rulep, fields_ipv4) if 'proto' in rule:
offset = int(inputargs.offset) if inputargs.offset.isdigit() else 0 try: filter = inputargs.filter.replace('*', '.*').lower() if filter.find('*') == -1: # no wildcard operator, assume partial match filter = ".*%s.*" % filter filter_regexp = re.compile(filter) except sre_constants.error: # remove illegal expression filter_regexp = re.compile('.*') for log_filename in log_filenames: if os.path.exists(log_filename): format_container = FormatContainer(log_filename) try: filename = fetch_clog(log_filename) except Exception as e: filename = log_filename for record in reverse_log_reader(filename): if record['line'] != "" and filter_regexp.match( ('%s' % record['line']).lower()): result['total_rows'] += 1 if (len(result['rows']) < limit or limit == 0) and result['total_rows'] >= offset: record['timestamp'] = None record['parser'] = None frmt = format_container.get_format(record['line']) if frmt: record['timestamp'] = frmt.timestamp( record['line']) record['line'] = frmt.line(record['line'])