def simple_search(au, source, where): if source == auparse.AUSOURCE_FILE: au = auparse.AuParser(auparse.AUSOURCE_FILE, srcdir + "/test.log") val = "4294967295" else: au = auparse.AuParser(auparse.AUSOURCE_BUFFER_ARRAY, buf) val = "848" au.search_add_item("auid", "=", val, auparse.AUSEARCH_RULE_CLEAR) au.search_set_stop(where) if not au.search_next_event(): print "Error searching for auid" else: print "Found %s = %s" % (au.get_field_name(), au.get_field_str())
def parse_file(self, au_log): """Parse an audit log saved in a file.""" au = auparse.AuParser(auparse.AUSOURCE_FILE, au_log) while au.parse_next_event(): if au.get_type() == 1300: self.parse_syscall(au) au.next_record()
def run(self, args): def parser_callback(au, cb_event_type, event_list: list): if cb_event_type != auparse.AUPARSE_CB_EVENT_READY: return if not au.first_record(): return info = {} while True: type_name = au.get_type_name() if type_name not in RECORD_REDUCERS: au.next_record() continue record_fields = {} au.first_field() while True: field_name = au.get_field_name() field_str = au.get_field_str() interp = au.interpret_field() record_fields[field_name] = SETraceEventField( field_name, field_str, interp) if not au.next_field(): break reducer = RECORD_REDUCERS[type_name] reducer.fold(record_fields, info) if not au.next_record(): break event_list.append(info) event_list = [] parser = auparse.AuParser(auparse.AUSOURCE_FEED, None) parser.add_callback(parser_callback, event_list) parser_hungry = True with open(args.file, 'r') as input_fd: if args.follow: input_fd.seek(0, os.SEEK_END) while self.running: data = input_fd.readline() if not data: if not args.follow: break if parser_hungry: parser.feed("\n") # Parsers love new-lines parser_hungry = False time.sleep(1) continue parser.feed(data) parser_hungry = True
def __init__(self, path, prefix='/host'): """Construct manager.""" self._logfile = Pathname(path, prefix=prefix) epoch = datetime.datetime.utcfromtimestamp(0) now = datetime.datetime.now() - datetime.timedelta(days=30) self._30day_delta = (now - epoch).total_seconds() try: self._parser = auparse.AuParser(auparse.AUSOURCE_FILE, self._logfile) except IOError as e: raise IOError(e.errno, os.strerror(e.errno), self._logfile.relpath) except OSError as e: raise OSError(e.errno, os.strerror(e.errno), self._logfile.relpath)
def compound_search(au, how): au = auparse.AuParser(auparse.AUSOURCE_FILE, srcdir + "/test.log") if how == auparse.AUSEARCH_RULE_AND: au.search_add_item("uid", "=", "0", auparse.AUSEARCH_RULE_CLEAR) au.search_add_item("pid", "=", "13015", how) au.search_add_item("type", "=", "USER_START", how) else: au.search_add_item("auid", "=", "42", auparse.AUSEARCH_RULE_CLEAR) # should stop on this one au.search_add_item("auid", "=", "0", how) au.search_add_item("auid", "=", "500", how) au.search_set_stop(auparse.AUSEARCH_STOP_FIELD) if not au.search_next_event(): print "Error searching for auid" else: print "Found %s = %s" % (au.get_field_name(), au.get_field_str())
def main(): global serials global stop global hup global formatted serials = [] formatted = {} while stop == 0: try: buf=sys.stdin if hup == 1 : reload_config() continue for line in buf: au = auparse.AuParser(auparse.AUSOURCE_BUFFER, line) coolparse(au) except IOError, e: continue
def main(): global serials global output global stop global hup serials = [] output = StringIO.StringIO() while stop == 0: try: #buf=sys.stdin.readlines() buf = sys.stdin if hup == 1: reload_config() continue for line in buf: au = auparse.AuParser(auparse.AUSOURCE_BUFFER, line) coolparse(au) except IOError, e: continue
print " event time: %d.%d:%d, host=%s" % ( event.sec, event.milli, event.serial, none_to_null(event.host)) au.first_field() while True: print " %s=%s (%s)" % (au.get_field_name(), au.get_field_str(), au.interpret_field()) if not au.next_field(): break print record_cnt += 1 if not au.next_record(): break event_cnt[0] += 1 au = auparse.AuParser(auparse.AUSOURCE_BUFFER_ARRAY, buf) print "Starting Test 1, iterate..." while au.parse_next_event(): if au.find_field("auid"): print "%s=%s" % (au.get_field_name(), au.get_field_str()) print "interp auid=%s" % (au.interpret_field()) else: print "Error iterating to auid" print "Test 1 Done\n" # Reset, now lets go to beginning and walk the list manually */ print "Starting Test 2, walk events, records, and fields..." au.reset() walk_test(au) print "Test 2 Done\n"
if 'default' in cfgsections: cfgsections.remove('default') logging.debug('Current list of configuration sections from INI: ' + str(cfgsections)) logging.info('Building key index') for name in cfgsections: cfgkeys.append({cfg.get(name, 'keyname'):name}) validatesections(name) logging.debug('Current list of key index: ' + str(cfgkeys)) logging.info('Opening Auditd source') aup = auparse.AuParser(auparse.AUSOURCE_DESCRIPTOR, 0); while not aup.first_record(): logging.debug('Snoozing until first event') time.sleep(10) logging.info('Entering main loop') while True: #Ensure we are at the first event record and the first field aup.first_record() aup.first_field() mytype = aup.get_type_name() logging.debug(mytype + ' is the first record type') key = aup.find_field('key') logging.debug(key + ' is the first found key')
def parse_audit_log(audit_log, audit_comm, syscalls, arg_inspection): """Parses one audit.log file generated by the Linux audit subsystem.""" unknown_syscall_re = re.compile(r'unknown-syscall\((?P<syscall_num>\d+)\)') au = auparse.AuParser(auparse.AUSOURCE_FILE, audit_log) # Quick validity check for whether this parses as a valid audit log. The # first event should have at least one record. if not au.first_record(): raise ValueError(f'Unable to parse audit log file {audit_log.name}') # Iterate through events where _any_ contained record matches # ((type == SECCOMP || type == SYSCALL) && comm == audit_comm). au.search_add_item('type', '=', 'SECCOMP', auparse.AUSEARCH_RULE_CLEAR) au.search_add_item('type', '=', 'SYSCALL', auparse.AUSEARCH_RULE_OR) au.search_add_item('comm', '=', f'"{audit_comm}"', auparse.AUSEARCH_RULE_AND) # auparse_find_field(3) will ignore preceding fields in the record and # at the same time happily cross record boundaries when looking for the # field. This helper method always seeks the cursor back to the first # field in the record and stops searching before crossing over to the # next record; making the search far less error prone. # Also implicitly seeks the internal 'cursor' to the matching field # for any subsequent calls like auparse_interpret_field. def _find_field_in_current_record(name): au.first_field() while True: if au.get_field_name() == name: return au.get_field_str() if not au.next_field(): return None while au.search_next_event(): # The event may have multiple records. Loop through all. au.first_record() for _ in range(au.get_num_records()): event_type = _find_field_in_current_record('type') comm = _find_field_in_current_record('comm') # Some of the records in this event may not be relevant # despite the event-specific search filter. Skip those. if (event_type not in ('SECCOMP', 'SYSCALL') or comm != f'"{audit_comm}"'): au.next_record() continue if not _find_field_in_current_record('syscall'): raise ValueError(f'Could not find field "syscall" in event of ' f'type {event_type}') # Intepret the syscall field that's under our 'cursor' following the # find. Interpreting fields yields human friendly names instead # of integers. E.g '16' -> 'ioctl'. syscall = au.interpret_field() # TODO(crbug/1172449): Add these syscalls to upstream # audit-userspace and remove this workaround. # This is redundant but safe for non-ARM architectures due to the # disjoint set of private syscall numbers. match = unknown_syscall_re.match(syscall) if match: syscall_num = int(match.group('syscall_num')) syscall = PRIVATE_ARM_SYSCALLS.get(syscall_num, syscall) if ((syscall in arg_inspection and event_type == 'SECCOMP') or (syscall not in arg_inspection and event_type == 'SYSCALL')): # Skip SECCOMP records for syscalls that require argument # inspection. Similarly, skip SYSCALL records for syscalls # that do not require argument inspection. Technically such # records wouldn't exist per our setup instructions but audit # sometimes lets a few records slip through. au.next_record() continue elif event_type == 'SYSCALL': arg_field_name = f'a{arg_inspection[syscall].arg_index}' if not _find_field_in_current_record(arg_field_name): raise ValueError(f'Could not find field "{arg_field_name}"' f'in event of type {event_type}') # Intepret the arg field that's under our 'cursor' following the # find. This may yield a more human friendly name. # E.g '5401' -> 'TCGETS'. arg_inspection[syscall].value_set.add(au.interpret_field()) syscalls[syscall] += 1 au.next_record()
def main(): ftypemap = {} for ftype in ftypes: if hasattr(auparse, ftype): ftypemap[getattr(auparse, ftype)] = ftype parser = argparse.ArgumentParser(description="Manage SELinux modules") parser.add_argument('--scontext', action='store', dest='scontext', help='Limit scontext to comma separated list') # Audit2Allow argument parser.add_argument("-r", "--requires", action="store_true", dest="requires", default=False, help="generate require statements for rules") parser.add_argument("-D", "--dontaudit", action="store_true", dest="dontaudit", default=False, help="generate policy with dontaudit rules") parser.add_argument("-v", "--verbose", action="store_true", dest="verbose", default=False, help="explain generated output") parser.add_argument("-e", "--explain", action="store_true", dest="explain_long", default=False, help="fully explain generated output") parser.add_argument("--interface-info", dest="interface_info", help="file name of interface information") parser.add_argument("--perm-map", dest="perm_map", help="file name of perm map") parser.add_argument("-R", "--reference", action="store_true", dest="refpolicy", default=True, help="generate refpolicy style output") parser.add_argument("-N", "--noreference", action="store_false", dest="refpolicy", default=False, help="do not generate refpolicy style output") parser.add_argument('--hostname', help="Specify hostname", action='store') parser.add_argument( '--input-directory', help="Specify input source directory for SElinux modules", action="store") parser.add_argument('--input-file', help="Specify audit log input for subprocess.", action="store") parser.add_argument('--search', '-s', help="Search the auditlog", action="store") args = parser.parse_args() rules = Rules("rules") manage_instance = Manage(args=args, rules=rules) manage_instance.process_input() # I'm a bit unsure its wise to parse it but it sounded fun try: lines = None ausearch_out = None audit_lines = [] if args.search: # We need to parse the args line, or re-work this ausearch_proc = subprocess.Popen( ['/sbin/ausearch', *(args.search.split(' '))], stdout=subprocess.PIPE) if ausearch_proc.returncode: logger.critical("ausearch subprocess failed") return 1 assert ausearch_proc.stdout cmd = ['audit2allow'] if args.refpolicy: cmd.append('-R') cmd.extend([ '--interface-info', repo[host].working_tree_dir + '/interface_info', '--perm-map', repo[host].working_tree_dir + '/perm_map' ]) ausearch_output = b"" all_allow_out = b"" aup = auparse.AuParser(auparse.AUSOURCE_FEED) user_data = dict(cur_event=None, log={}) aup.add_callback(auparse_callback, user_data) while ausearch_proc.returncode is None: (stdout, stderr) = ausearch_proc.communicate() ausearch_output = ausearch_output + stdout result = aup.feed(stdout) aup.flush_feed() aup = None env = Environment( loader=FileSystemLoader(app_root), trim_blocks=True, lstrip_blocks=True, autoescape=False, ) for host in user_data['log']: x = host.split('.') x.reverse() mod_prefix = '_'.join(x) logger.info("processing %s" % host) audit2allowproc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE) (allow_out, allow_err) = audit2allowproc.communicate( bytes('\n'.join(user_data['log'][host]) + '\n', encoding='utf-8')) allow = allow_out.decode('utf-8') lines = allow.split('\n') source = None for line in lines: try: process_line(rules.host(host), line) except Exception as ex: raise ex for source, sourcerules in rules.host(host)._rules.items(): name = source[0:-2] module_name = '%s_%s' % (mod_prefix, name) t = env.get_template('module.jinja2') classes = {} rulesary = [] types = {source: True} for target in sorted(sourcerules._rules.keys()): targetrules = sourcerules._rules[target] if not (target in types or target in ('self')): types[target] = True for class_ in sorted(targetrules._rules.keys()): ops = targetrules._rules[class_]._rules.keys() rulesary.append( 'allow %s %s:%s { %s };' % (source, target, class_, ' '.join(sorted(ops)))) if not class_ in classes: classes[class_] = {} for op in ops: classes[class_][op] = True for iface in sorted(sourcerules._iface_calls.keys()): calls = sourcerules._iface_calls[iface] for rest in sorted(calls.keys()): rulesary.append("%s(%s%s)" % (iface, source, rest)) fname = tempdir[host] + '/%s.te' % module_name print("writing %s" % fname) classes2 = {} for k, v in classes.items(): classes2[k] = list(sorted(v.keys())) with open(fname, 'w') as f: print(t.render(module_name=module_name, sclasses=sorted(classes2.keys()), classes=classes2, rules=rulesary, types=sorted(types.keys())), file=f) except Exception as ex: raise ex
if record_count == 1: ses_holder = generic_object.ses pid_holder = generic_object.pid else: generic_object.ses = ses_holder generic_object.pid = pid_holder print '%s:%s:%s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s' % (event_count, event_rec_count, record_count, generic_object.flavor, generic_object.type, generic_object.time, generic_object.node, generic_object.ses, generic_object.auid, generic_object.key, generic_object.comm, generic_object.exe, generic_object.a0, generic_object.a1, generic_object.a2, generic_object.uid, generic_object.gid, generic_object.euid, generic_object.egid, generic_object.fsuid, generic_object.fsgid, generic_object.suid, generic_object.sgid, pid_holder, generic_object.ppid, ses_holder, generic_object.tty, generic_object.terminal, generic_object.success, generic_object.exit) record_count += 1 if not au.next_record(): break if not au.parse_next_event(): break ###################################################### # this is the main "loop" ###################################################### au = auparse.AuParser(auparse.AUSOURCE_FEED); event_cnt = 1 au.add_callback(feed_callback, [event_cnt]) chunk_len = 3 t=FileTail(DATA_FILE) for s in t: au.feed(s) au.flush_feed() sys.exit(0)
event = au.get_timestamp() if event is None: print("Error getting timestamp - aborting") sys.exit(1) print(" event time: %d.%d:%d, host=%s" % (event.sec, event.milli, event.serial, none_to_null(event.host))) au.first_field() while True: print(" %s=%s (%s)" % (au.get_field_name(), au.get_field_str(), au.interpret_field())) if not au.next_field(): break print("") record_cnt += 1 if not au.next_record(): break event_cnt[0] += 1 au = auparse.AuParser(auparse.AUSOURCE_BUFFER_ARRAY, buf) print("Starting Test 1, iterate...") while au.parse_next_event(): if au.find_field("auid"): print("%s=%s" % (au.get_field_name(), au.get_field_str())) print("interp auid=%s" % (au.interpret_field())) else: print("Error iterating to auid") print("Test 1 Done\n") # Reset, now lets go to beginning and walk the list manually */ print("Starting Test 2, walk events, records, and fields...") walk_test(au) print("Test 2 Done\n")