async def gather(self): try: info = { 'ds' : self.domain_server, 'ms' : self.multiplexor_server, 'mp' : self.multiplexor_port, 'ai' : self.agent_id, 'sh' : self.socks_server_info['listen_ip'], 'sp' : self.socks_server_info['listen_port'] } ldap_url = 'ldap+multiplexor-ntlm://{ds}/?proxytype=socks5&proxyhost={sh}&proxyport={sp}&authhost={ms}&authport={mp}&authagentid={ai}'.format(**info) smb_url = 'smb+multiplexor-ntlm://{ds}/?proxytype=socks5&proxyhost={sh}&proxyport={sp}&authhost={ms}&authport={mp}&authagentid={ai}'.format(**info) self.logger.info(ldap_url) self.logger.info(smb_url) smb_mgr = SMBConnectionURL(smb_url) ldap_mgr = MSLDAPURLDecoder(ldap_url) #self.ldapenum = LDAPEnumeratorManager(self.db_conn, ldap_mgr, agent_cnt=self.parallel_cnt, progress_queue=self.progress_queue) #self.logger.info('Enumerating LDAP') #self.ldapenum_task = asyncio.create_task(self.ldapenum.run()) # #adifo_id = await self.ldapenum_task #if adifo_id is None: # raise Exception('LDAP enumeration failed!') #self.logger.info('ADInfo entry successfully created with ID %s' % adifo_id) # #self.logger.info('Enumerating SMB') #self.smbenum = SMBGathererManager(smb_mgr, worker_cnt=self.parallel_cnt, progress_queue = self.progress_queue) #self.smbenum.gathering_type = ['all'] #self.smbenum.db_conn = self.db_conn #self.smbenum.target_ad = adifo_id #self.smbenum_task = asyncio.create_task(self.smbenum.run()) # #await self.smbenum_task work_dir = './workdir' with multiprocessing.Pool() as mp_pool: gatherer = Gatherer( self.db_conn, work_dir, ldap_url, smb_url, ldap_worker_cnt=None, smb_worker_cnt=None, mp_pool=mp_pool, smb_gather_types=['all'], progress_queue=self.progress_queue, show_progress=False, calc_edges=True, dns=None ) res, err = await gatherer.run() if err is not None: raise err return True except: logging.exception('Failed to run scan!') return False
async def do_gather(self, cmd): try: progress_queue = asyncio.Queue() gatheringmonitor_task = asyncio.create_task(self.__gathermonitor(cmd, progress_queue)) ldap_url = cmd.ldap_url if ldap_url == 'auto': if platform.system().lower() == 'windows': from winacl.functions.highlevel import get_logon_info logon = get_logon_info() ldap_url = 'ldap+sspi-ntlm://%s\\%s:jackdaw@%s' % (logon['domain'], logon['username'], logon['logonserver']) else: raise Exception('ldap auto mode selected, but it is not supported on this platform') smb_url = cmd.smb_url if smb_url == 'auto': if platform.system().lower() == 'windows': from winacl.functions.highlevel import get_logon_info logon = get_logon_info() smb_url = 'smb2+sspi-ntlm://%s\\%s:jackdaw@%s' % (logon['domain'], logon['username'], logon['logonserver']) else: raise Exception('smb auto mode selected, but it is not supported on this platform') kerberos_url = cmd.kerberos_url dns = cmd.dns if dns == 'auto': if platform.system().lower() == 'windows': from jackdaw.gatherer.rdns.dnstest import get_correct_dns_win srv_domain = '%s.%s' % (logon['logonserver'], logon['dnsdomainname']) dns = await get_correct_dns_win(srv_domain) if dns is None: dns = None #failed to get dns else: dns = str(dns) else: raise Exception('dns auto mode selected, but it is not supported on this platform') print(ldap_url) print(smb_url) print(dns) with multiprocessing.Pool() as mp_pool: gatherer = Gatherer( self.db_url, self.work_dir, ldap_url, smb_url, kerb_url=kerberos_url, ldap_worker_cnt=int(cmd.ldap_workers), smb_worker_cnt=int(cmd.smb_worker_cnt), mp_pool=mp_pool, smb_gather_types=['all'], progress_queue=progress_queue, show_progress=self.show_progress, calc_edges=True, ad_id=None, dns=dns, stream_data=cmd.stream_data ) res, err = await gatherer.run() if err is not None: print('gatherer returned error') await self.send_error(cmd, str(err)) return #####testing await asyncio.sleep(20) ####### await self.send_ok(cmd) except Exception as e: logger.exception('do_gather') await self.send_error(cmd, str(e)) finally: if gatheringmonitor_task is not None: gatheringmonitor_task.cancel() progress_queue = None
async def run(args): try: if args.silent is True: print(__banner__) if args.verbose == 0: logging.basicConfig(level=logging.INFO) jdlogger.setLevel(logging.INFO) msldaplogger.setLevel(logging.CRITICAL) smblogger.setLevel(100) elif args.verbose == 1: logging.basicConfig(level=logging.DEBUG) jdlogger.setLevel(logging.DEBUG) msldaplogger.setLevel(logging.WARNING) smblogger.setLevel(logging.CRITICAL) elif args.verbose > 1: logging.basicConfig(level=1) msldaplogger.setLevel(logging.DEBUG) jdlogger.setLevel(1) smblogger.setLevel(1) if not args.sql and args.command != 'auto': print( 'SQL connection identification is missing! You need to provide the --sql parameter' ) sys.exit() work_dir = './workdir' ldap_url = None smb_url = None if hasattr(args, 'ldap_url'): ldap_url = args.ldap_url if hasattr(args, 'smb_url'): smb_url = args.smb_url db_conn = args.sql if db_conn is not None: os.environ['JACKDAW_SQLITE'] = '0' if args.sql.lower().startswith('sqlite'): os.environ['JACKDAW_SQLITE'] = '1' else: os.environ['JACKDAW_SQLITE'] = '1' if args.command == 'enum': with multiprocessing.Pool() as mp_pool: gatherer = Gatherer(db_conn, work_dir, ldap_url, smb_url, kerb_url=args.kerberoast, ldap_worker_cnt=args.ldap_workers, smb_worker_cnt=args.smb_workers, mp_pool=mp_pool, smb_gather_types=['all'], progress_queue=None, show_progress=args.silent, calc_edges=True, ad_id=None, dns=args.dns, no_work_dir=args.no_work_dir) res, err = await gatherer.run() if err is not None: raise err elif args.command == 'auto': _, err = await run_auto(ldap_worker_cnt=args.ldap_workers, smb_worker_cnt=args.smb_workers, dns=args.dns, work_dir=work_dir, show_progress=args.silent, no_work_dir=args.no_work_dir) if err is not None: print(err) elif args.command == 'dbinit': create_db(db_conn) elif args.command == 'adinfo': session = get_session(db_conn) from jackdaw.dbmodel.adinfo import ADInfo from jackdaw.utils.table import print_table rows = [['Ad ID', 'domain name', 'scantime']] for did, distinguishedName, creation in session.query( ADInfo).with_entities(ADInfo.id, ADInfo.distinguishedName, ADInfo.fetched_at).all(): name = distinguishedName.replace('DC=', '') name = name.replace(',', '.') rows.append([str(did), name, creation.isoformat()]) print_table(rows) elif args.command == 'ldap': with multiprocessing.Pool() as mp_pool: gatherer = Gatherer(db_conn, work_dir, ldap_url, smb_url, ldap_worker_cnt=args.ldap_workers, smb_worker_cnt=None, mp_pool=mp_pool, smb_gather_types=['all'], progress_queue=None, show_progress=args.silent, calc_edges=args.calculate_edges, ad_id=args.ad_id, no_work_dir=args.no_work_dir) await gatherer.run() elif args.command == 'kerberoast': gatherer = Gatherer(db_conn, work_dir, None, None, kerb_url=args.kerberos_url, ldap_worker_cnt=None, smb_worker_cnt=None, mp_pool=None, smb_gather_types=[], progress_queue=None, show_progress=False, calc_edges=False, ad_id=args.ad_id) await gatherer.run() print('Kerberoast Finished!') elif args.command in ['shares', 'sessions', 'localgroups', 'smball']: if args.command == 'smball': args.command = 'all' gatherer = Gatherer( db_conn, work_dir, ldap_url, smb_url, ad_id=args.ad_id, ldap_worker_cnt=None, smb_worker_cnt=args.smb_workers, mp_pool=None, smb_gather_types=args.command, progress_queue=None, show_progress=args.silent, calc_edges=False, dns=args.dns, ) await gatherer.run() elif args.command == 'dns': gatherer = Gatherer( db_conn, work_dir, None, None, ad_id=args.ad_id, ldap_worker_cnt=None, smb_worker_cnt=None, mp_pool=None, smb_gather_types=None, progress_queue=None, show_progress=args.silent, calc_edges=False, dns=args.dns, ) await gatherer.run() elif args.command == 'version': print('Jackdaw version: %s' % jdversion) print('MSLDAP version : %s' % ldapversion) print('AIOSMB version : %s' % smbversion) elif args.command == 'files': raise Exception('not yet implemented!') #if args.src == 'domain': # if not args.ad_id: # raise Exception('ad-id parameter is mandatory in ldap mode') # # mgr = SMBConnectionURL(args.smb_url) # settings_base = SMBShareGathererSettings(args.ad_id, mgr, None, None, None) # settings_base.dir_depth = args.smb_folder_depth # settings_base.dir_with_sd = args.with_sid # settings_base.file_with_sd = args.with_sid # # mgr = ShareGathererManager(settings_base, db_conn = db_conn, worker_cnt = args.smb_workers) # mgr.run() elif args.command == 'creds': creds = JackDawCredentials(db_conn, args.domain_id) creds.add_credentials_impacket(args.impacket_file) elif args.command == 'passwords': creds = JackDawCredentials(db_conn) creds.add_cracked_passwords(args.potfile, args.disable_usercheck, args.disable_passwordcheck) elif args.command == 'uncracked': creds = JackDawCredentials(db_conn, args.domain_id) creds.get_uncracked_hashes(args.hash_type, args.history) elif args.command == 'cracked': creds = JackDawCredentials(db_conn, args.domain_id) creds.get_cracked_info() elif args.command == 'recalc': with multiprocessing.Pool() as mp_pool: gatherer = Gatherer(db_conn, work_dir, None, None, mp_pool=mp_pool, progress_queue=None, show_progress=args.silent, calc_edges=True, store_to_db=True, ad_id=None, graph_id=args.graphid) await gatherer.run() elif args.command == 'nest': from jackdaw.nest.wrapper import NestServer debug = bool(args.verbose) server = NestServer( args.sql, bind_ip=args.ip, bind_port=args.port, debug=debug, work_dir=args.work_dir, graph_backend=args.backend, ) server.run() elif args.command == 'ws': from jackdaw.nest.ws.server import NestWebSocketServer server = NestWebSocketServer(args.listen_ip, args.listen_port, args.sql, args.work_dir, args.backend, ssl_ctx=None) await server.run() elif args.command == 'bhimport': from jackdaw.utils.bhimport import BHImport print( 'DISCLAIMER! This feature is still beta! Bloodhound acquires way less data than Jackdaw therefore not all functionality will work after import. Any errors during import will be silently ignored, use "-vvv" verbosity level to see all errors.' ) bh = BHImport.from_zipfile(args.bhfile) bh.db_conn = db_conn if args.verbose > 1: bh.set_debug(True) bh.run() print('Import complete!') except Exception as e: jdlogger.exception('main')
async def run_auto(ldap_worker_cnt=None, smb_worker_cnt=500, dns=None, work_dir='./workdir', db_conn=None, show_progress=True, no_work_dir=False): try: if platform.system() != 'Windows': raise Exception('auto mode only works on windows!') smblogger.setLevel(100) from winacl.functions.highlevel import get_logon_info logon = get_logon_info() jdlogger.debug(str(logon)) if logon['domain'] == '' or logon['logonserver'] == '': if logon['domain'] == '': logon['domain'] = os.environ['USERDOMAIN'] if logon['logonserver'] == '': logon['logonserver'] = os.environ['LOGONSERVER'].replace( '\\', '') if logon['domain'] == '' or logon['logonserver'] == '': return False, Exception( "Failed to find user's settings! Is this a domain user?") try: #checking connection can be made over ldap... reader, writer = await asyncio.wait_for( asyncio.open_connection(logon['logonserver'], 389), 2) writer.close() except: return False, Exception( "Failed to connect to server %s over LDAP" % (logon['logonserver'])) if db_conn is None: db_loc = '%s_%s.db' % (logon['domain'], datetime.datetime.utcnow(). strftime("%Y%m%d_%H%M%S")) db_conn = 'sqlite:///%s' % db_loc create_db(db_conn) ldap_url = 'ldap+sspi-ntlm://%s\\%s:jackdaw@%s' % ( logon['domain'], logon['username'], logon['logonserver']) smb_url = 'smb2+sspi-kerberos://%s\\%s:jackdaw@%s' % ( logon['domain'], logon['username'], logon['logonserver']) jdlogger.debug('LDAP connection: %s' % ldap_url) jdlogger.debug('SMB connection: %s' % smb_url) if dns is None: from jackdaw.gatherer.rdns.dnstest import get_correct_dns_win srv_domain = '%s.%s' % (logon['logonserver'], logon['dnsdomainname']) dns = await get_correct_dns_win(srv_domain) if dns is None: jdlogger.debug('Failed to identify DNS server!') else: dns = str(dns) jdlogger.debug('DNS server selected: %s' % str(dns)) kerb_url = 'auto' with multiprocessing.Pool() as mp_pool: gatherer = Gatherer(db_conn, work_dir, ldap_url, smb_url, kerb_url=kerb_url, ldap_worker_cnt=ldap_worker_cnt, smb_worker_cnt=smb_worker_cnt, mp_pool=mp_pool, smb_gather_types=['all'], progress_queue=None, show_progress=show_progress, calc_edges=True, dns=dns, no_work_dir=no_work_dir) res, err = await gatherer.run() if err is not None: raise err return True, None except Exception as e: return False, e
async def __start_cmd(self, cmd): try: print('New agent connected to C2! Starting jackdaw...') print(cmd.to_dict()) progress_q = asyncio.Queue() progress_task = asyncio.create_task(self.print_progress(cmd.agentid.hex(), progress_q)) await self.ducky_q.put(self.duckysvc_event['AGENTCONNECTED']) wsproto = 'wsnetws' if self.c2_proto == 'ws' else 'wsnetwss' domain = cmd.domain username = cmd.username if username.find('\\') != -1: domain, username = username.split('\\') dns_url = 'dns://%s:53/?proxytype=%s&proxyhost=%s&proxyport=%s&proxyagentid=%s' % (cmd.logonserver, wsproto, self.c2_ip, self.c2_port, cmd.agentid.hex()) kerberos_url = '%s://%s:%s/?type=sspiproxy&agentid=%s' % (self.c2_proto, self.c2_ip, self.c2_port, cmd.agentid.hex()) params = 'authhost=%s&authport=%s&authagentid=%s&proxytype=%s&proxyhost=%s&proxyport=%s&proxyagentid=%s' % ( self.c2_ip, self.c2_port, cmd.agentid.hex(), wsproto, self.c2_ip, self.c2_port, cmd.agentid.hex()) smb_url = 'smb2+sspiproxy-ntlm://%s\\%s:aa@%s/?%s' % (domain, username, cmd.logonserver, params) ldap_url = 'ldap+sspiproxy-ntlm://%s\\%s:aa@%s/?%s' % (domain, username, cmd.logonserver, params) print('dns %s' % dns_url) print('kerberos %s' % kerberos_url) print('smb %s' % smb_url) print('ldap %s' % ldap_url) smb_workers = 10 ldap_workers = 4 loc_base = '%s_%s' % (datetime.datetime.utcnow().strftime("%Y%m%d_%H%M%S"), cmd.agentid.hex()[:8]) p = pathlib.Path(self.workdir).joinpath('./workdir_' + loc_base) p.mkdir(parents=True, exist_ok=True) db_loc = '%s_%s.db' % (cmd.domain, loc_base) db_loc = p.joinpath(db_loc) print(db_loc) db_conn = 'sqlite:///%s' % db_loc create_db(db_conn) work_dir = str(p) print(work_dir) print(db_conn) await self.ducky_q.put(self.duckysvc_event['JDENUMSTART']) with multiprocessing.Pool() as mp_pool: gatherer = Gatherer( db_conn, work_dir, ldap_url, smb_url, kerb_url=kerberos_url, ldap_worker_cnt=ldap_workers, smb_worker_cnt=smb_workers, mp_pool=mp_pool, smb_gather_types=['all'], progress_queue=progress_q, show_progress=False, calc_edges=True, ad_id=None, dns=dns_url, no_work_dir=False ) _, err = await gatherer.run() if err is not None: raise err print('%s Jackdaw finished sucsessfully!' % cmd.agentid.hex()[:8]) progress_task.cancel() await self.ducky_q.put(self.duckysvc_event['JDENUMFINISH']) web_port = self.get_web_port() ws_port = self.get_web_port() print('%s Starting webserver on port %s' % (cmd.agentid.hex()[:8], web_port)) websrv = WebServerProcess(db_conn, '0.0.0.0', web_port, work_dir, graph_backend = self.graph_backend) websrv.start() #checking if server is up now... own_ip = None while True: try: _, writer = await asyncio.open_connection('127.0.0.1', web_port) own_ip = writer.get_extra_info('sockname')[0] except: print('%s Could not connect to webserver, probably not ready yet' % cmd.agentid.hex()[:8]) await asyncio.sleep(1) else: print('%s Jackdaw server started!' % cmd.agentid.hex()[:8]) writer.close() break jd_url = 'http://%s:%s' % (own_ip, web_port) try: print('%s Asking server to load graph data to memory...' % cmd.agentid.hex()[:8]) async with aiohttp.ClientSession() as session: async with session.post('%s/graph?adids=1' % jd_url) as resp: if resp.status != 200: raise Exception('Loading graphid failed! Status: %s' % resp.status) await resp.text() except Exception as e: raise e print('%s WEB Service listening on port %s' % (cmd.agentid.hex()[:8], ws_port)) await self.ducky_q.put(self.duckysvc_event['JDSERVICESTART']) await self.ducky_q.put( [ 'STRING %s WEB listening on port %s' % (cmd.agentid.hex()[:8], ws_port), 'DELAY 1000', 'ENTER' ] ) server = NestWebSocketServer('127.0.0.1', ws_port, db_conn, work_dir, 'igraph', ssl_ctx = None) ws_task = asyncio.create_task(server.run()) await asyncio.sleep(0) while True: try: _, writer = await asyncio.open_connection('127.0.0.1', ws_port) own_ip = writer.get_extra_info('sockname')[0] except: print('%s Could not connect to ws server, probably not ready yet' % cmd.agentid.hex()[:8]) await asyncio.sleep(1) else: print('%s Jackdaw WS server started!' % cmd.agentid.hex()[:8]) writer.close() break print('%s WS Service listening on port %s' % (cmd.agentid.hex()[:8], ws_port)) await self.ducky_q.put( [ 'STRING %s WS Service listening on port %s' % (cmd.agentid.hex()[:8], ws_port), 'DELAY 1000', 'ENTER' ] ) await asyncio.sleep(1000) except Exception as e: print('%s Exception handling agent Reson: %s' % (cmd.agentid.hex()[:8], e)) await self.ducky_q.put( [ 'STRING %s Exception handling agent Reson: %s' % (cmd.agentid.hex()[:8], e), 'DELAY 1000', 'ENTER' ] )