def process(entry): # pylint: disable=inconsistent-return-statements print(f'+ Processing {entry.path!r}') mime_type = None for name in API_DICT.keys(): if fnmatch.fnmatch(entry.mime.name, name): mime_type = name break if mime_type is not None: mime = entry.mime.name api = API_DICT[mime_type] cwd = make_cwd(api, entry=entry) else: mime = 'example' api = API_DICT['example'] cwd = make_cwd(api, example=True) if api.remote: if remote(entry, mime, api): return issue(mime) return print_file(entry.path, DUMP) # set up environ env = make_env(api) env['BROAPT_PATH'] = entry.path env['BROAPT_MIME'] = entry.mime.name # run install commands if not api.inited.value: with api.locked: if init(api, cwd, env, mime, entry.uuid): return issue(mime) # run scripts commands scripts_log = 1 for command in api.scripts: log = f'{entry.uuid}-scripts.{scripts_log}' if run(command, cwd, env, mime, file=log): return issue(mime) scripts_log += 1 # run report command log = f'{entry.uuid}-report.1' if run(api.report, cwd, env, mime, file=log): return issue(mime) print_file(entry.path, file=DUMP)
def process(file): print_file(f'+ Working on PCAP: {file!r}', file=TIME) print(f'+ Working on PCAP: {file!r}') stem = pathlib.Path(file).stem uid = uuid.uuid4() dest_stem = f'{stem}-{uid}' with SALT_LOCK: file_salt(uid) env = os.environ env['BRO_LOG_SUFFIX'] = f'{uid}.log' env['BROAPT_PCAP'] = dest_stem args = ['bro'] if BARE_MODE: args.append('--bare-mode') if NO_CHKSUM: args.append('--no-checksums') args.extend(['--readfile', file, os.path.join(ROOT, 'scripts')]) start = time.time() stdout = open(f'stdout.{uid}.log', 'at', 1) stderr = open(f'stderr.{uid}.log', 'at', 1) print(f'+ {" ".join(args)}', file=stdout) print(f'+ {" ".join(args)}', file=stderr) try: subprocess.check_call(args, env=env, stdout=stdout, stderr=stderr) except subprocess.CalledProcessError: print_file(f'+ Failed on PCAP: {file!r}', file=TIME) stdout.close() stderr.close() end = time.time() with STDOUT_LOCK: redirect(src=stdout.name, dst=STDOUT, label=dest_stem) with STDERR_LOCK: redirect(src=stderr.name, dst=STDERR, label=dest_stem) dest = os.path.join(LOGS_PATH, dest_stem) os.makedirs(dest, exist_ok=True) for log in glob.glob(f'*.{uid}.log'): with contextlib.suppress(OSError): shutil.move(log, os.path.join(dest, log.replace(f'.{uid}.log', '.log'))) communicate(dest) print_file(f'+ Bro processing: {end-start} seconds', file=TIME) print_file(file, file=FILE) QUEUE.put(dest_stem)
def main_with_no_args(): # main loop while True: try: processed_file = set(check_history()) pcap_file = set(parse_args([PCAP_PATH])) file_list = sorted(pcap_file - processed_file) del processed_file if file_list: if CPU_CNT <= 1: [process(file) for file in file_list] # pylint: disable=expression-not-assigned else: multiprocessing.Pool(CPU_CNT).map(process, file_list) time.sleep(INTERVAL) except KeyboardInterrupt: return 0 print_file('+ Starting another turn...', file=TIME) print('+ Starting another turn...')
def do_show(self, line): """Show director or firewall configuration.""" if line == "director" or line == "firewall": configkey = line + "_config" if not self.config[configkey]: logger.error("'%s' not defined in configuration file!" % configkey) else: lines = utils.print_file(self.config[configkey]) utils.pager(self.config['pager'], lines) else: print "\nsyntax: show <module>\n"
def generate_log(log_name): global DATE date = time.strftime('%Y-%m-%d') if date != DATE: archive(DATE) DATE = date INFO = os.path.join(LOGS_PATH, 'info', f'{DATE}.log') log_stem = log_name log_root = os.path.join(LOGS_PATH, log_name) log_uuid = re.match( r'.*?-(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})', log_stem, re.IGNORECASE).group('uuid') log_file = os.path.join(log_root, 'files.log') if not os.path.isfile(log_file): return LOG_FILE = parse(log_file) LOG_CONN = parse(os.path.join(log_root, 'conn.log')) for line in LOG_FILE.context.itertuples(): if is_nan(getattr(line, 'extracted', None)): continue hosts = [ dict(tx=ipaddress.ip_address(tx), rx=ipaddress.ip_address(rx)) for (tx, rx) in zip(line.tx_hosts, line.rx_hosts) ] conns = list() is_orig = line.is_orig for conn_uid in line.conn_uids: record = next( LOG_CONN.context[lambda df: df.uid == conn_uid].iterrows())[1] # pylint: disable=cell-var-from-loop if is_orig: conn = dict( src_h=ipaddress.ip_address(record['id.orig_h']), src_p=int(record['id.orig_p']), dst_h=ipaddress.ip_address(record['id.resp_h']), dst_p=int(record['id.resp_p']), ) else: conn = dict( src_h=ipaddress.ip_address(record['id.resp_h']), src_p=int(record['id.resp_p']), dst_h=ipaddress.ip_address(record['id.orig_h']), dst_p=int(record['id.orig_p']), ) conns.append(conn) local_name = line.extracted mime_type = None dump_path = os.path.join(DUMP_PATH, local_name) if os.path.exists(dump_path): with contextlib.suppress(Exception): mime_type = magic.detect_from_filename(dump_path).mime_type # if mime_type is None or MIME_REGEX.match(mime_type) is None: # if MIME_MODE: # local_name = rename_dump(local_name, line.mime_type) # else: # if MIME_MODE or (mime_type != line.mime_type): # pylint: disable=else-if-used # local_name = rename_dump(local_name, mime_type) else: dump_path = None info = dict(timestamp=line.ts if LOG_FILE.format == 'json' else line.ts.timestamp(), log_uuid=log_uuid, log_path=log_root, log_name=log_stem, dump_path=dump_path, local_name=local_name, source_name=getattr(line, 'filename', None), hosts=hosts, conns=conns, bro_mime_type=line.mime_type, real_mime_type=mime_type, hash=dict( md5=getattr(line, 'md5', None), sha1=getattr(line, 'sha1', None), sha256=getattr(line, 'sha256', None), )) print_file(json.dumps(info, cls=IPAddressJSONEncoder), file=INFO)
print("hi there, only using builtins today.") import builtins print("whoops, nearly forgot to show you the source") from utils import print_file print_file(__file__) def metastring(message): global str # this is necessary; try it without and see what happens class str(str): # wait, what? def __call__( self): # this runs when an *instance* of a class is called print(message, end=' ') return type(self).__bases__[0]() # this is an instance, not a type return str # the class we just created str = metastring('\n') for s in "Isn't this crazy? (more like crazy bad; I'd get killed if I did it in production)".split( )[::-1]: str = metastring(s) while hasattr(str, '__call__'): str = str() print("\nthis wouldn't be a real demo without some exceptions, would it?") str = type(str) # we messed this up earlier while __call__ ing
from scanner import Scanner from parser import Parser from utils import read_file, print_file, get_parser from logger import logger if __name__ == "__main__": parser = get_parser() args = parser.parse_args() input_string = read_file(args.filepath) logger.info(f"Starting compose-validator for {args.filepath}") print_file(args.filepath) Parser(Scanner(input_string)).parse()
def init(HTTP_LOG): print_file(f'#separator {hexlify(SEPARATOR)}', file=HTTP_LOG) print_file(f'#set_separator{SEPARATOR}{SET_SEPARATOR}', file=HTTP_LOG) print_file(f'#empty_field{SEPARATOR}{EMPTY_FIELD}', file=HTTP_LOG) print_file(f'#unset_field{SEPARATOR}{UNSET_FIELD}', file=HTTP_LOG) print_file(f'#path{SEPARATOR}http', file=HTTP_LOG) print_file(f'#open{SEPARATOR}{time.strftime("%Y-%m-%d-%H-%M-%S")}', file=HTTP_LOG) print_file(f'#fields{SEPARATOR}{SEPARATOR.join(FIELDS)}', file=HTTP_LOG) print_file(f'#types{SEPARATOR}{SEPARATOR.join(TYPES)}', file=HTTP_LOG)
def close(): print_file(f'#close{SEPARATOR}{time.strftime("%Y-%m-%d-%H-%M-%S")}', file=HTTP_LOG)
def generate(log_name): global DATE, HTTP_LOG date = time.strftime('%Y-%m-%d') if date != DATE: close() DATE = date HTTP_LOG = os.path.join(LOGS_PATH, 'http', f'{DATE}.log') init(HTTP_LOG) log_root = os.path.join(LOGS_PATH, log_name) http_log = os.path.join(log_root, 'http.log') if not os.path.isfile(http_log): return LOG_HTTP = parse(http_log) for (index, line) in LOG_HTTP.context.iterrows(): # record = dict( # srcip=line['id.orig_h'], # ad=None, # ts=math.floor((line['ts'] if LOG_HTTP.format == 'json' else line['ts'].timestamp()) * 1000), # url=make_url(line), # ref=make_b64(line.get('referrer')), # ua=make_ua(line), # dstip=line['id.resp_h'], # cookie=make_cookie(line), # src_port=int(line['id.orig_p']), # # json=make_json(line), # method=line['method'], # body=line['post_body'], # ) record = ( # scrip line['id.orig_h'], # ad None, # ts math.floor((line['ts'] if LOG_HTTP.format == 'json' else line['ts'].timestamp()) * 1000), # url make_url(line), # ref make_b64(line.get('referrer')), # ua make_b64(line.get('user_agent')), # dstip line['id.resp_h'], # cookie make_b64(line.get('cookies')), # src_port int(line['id.orig_p']), # json make_json(line), # method line.get('method'), # body make_b64(line.get('post_body')), ) # data = json.dumps(record, cls=IPAddressJSONEncoder) data = '\t'.join(map(lambda obj: beautify(obj), record)) print_file(data, file=HTTP_LOG)
def run(command, cwd=None, env=None, mime='example', file='unknown'): # prepare log path logs_path = os.path.join(API_LOGS, mime) os.makedirs(logs_path, exist_ok=True) # prepare runtime logs = os.path.join(logs_path, file) with temp_env(env): if isinstance(command, str): shell = True args = os.path.expandvars(command) else: shell = False args = [os.path.expandvars(arg) for arg in command] env_line = f'{os.linesep}# '.join(f'{key}={shlex.quote(val)}' for (key, val) in env.items()) suffix = '' for retry in range(MAX_RETRY): log = logs + suffix print_file(f'# open: {time.strftime("%Y-%m-%d-%H-%M-%S")}', file=log) print_file(f'# cwd: {cwd}', file=log) print_file(f'# env: {env_line}', file=log) print_file(f'# args: {args}', file=log) try: with open(log, 'w') as stdout: returncode = subprocess.check_call(args, shell=shell, cwd=cwd, env=env, stdout=stdout, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as error: print_file(f'# exit: {error.returncode}', file=log) print_file(f'# close: {time.strftime("%Y-%m-%d-%H-%M-%S")}', file=log) print_file(error.args, file=FAIL) suffix = f'_{retry+1}' time.sleep(INTERVAL) continue print_file(f'# exit: {returncode}', file=log) print_file(f'# close: {time.strftime("%Y-%m-%d-%H-%M-%S")}', file=log) return EXIT_SUCCESS return EXIT_FAILURE