def port_scan(targets, ports): ports=[ports] src_port = RandShort() FIN = 0x01 SYN = 0x02 RST = 0x04 PSH = 0x08 ACK = 0x10 SYNACK = 0x12 RSTACK = 0x14 URG = 0x20 ECE = 0x40 CWR = 0x80 alive_hosts = [] for target in targets: for port in ports: send_syn = sr1(IP(dst=target)/TCP(sport=src_port,dport=port, flags=SYN), verbose=0, timeout=2) if send_syn == None: logger.live_bad('[{}]:\tNO TCP RESPONSE: [{}]'.format(logger.RED(target),logger.RED(str(port)))) elif(send_syn.haslayer(TCP)): if(send_syn.getlayer(TCP).flags == SYNACK): send_ack = sr(IP(dst=target)/TCP(sport=src_port,dport=port, flags=RST), verbose=0, timeout=2) logger.live_good('[{}]:\tRECIEVED {}: [{}]'.format(logger.GREEN(target),logger.GREEN('SYNACK'),logger.GREEN(str(port)))) if target not in alive_hosts: alive_hosts.append(target) elif (send_syn.getlayer(TCP).flags == RSTACK): logger.live_bad('[{}]:\tRECIEVED {}: [{}]'.format(logger.RED(target),logger.RED('RSTACK'),logger.RED(str(port)))) elif (send_syn.getlayer(TCP).flags == RST): logger.live_bad('[{}]:\tRECIEVED {}: [{}]'.format(logger.RED(target),logger.RED('RST'),logger.RED(str(port)))) alive_hosts=list(set(alive_hosts)) return alive_hosts
def icmp_scan(targets): targets=[targets] # takes in a list of targets and tries to identify a list of hosts responding to icmp and returns them in a list logger.verbose('Amount of targets for ICMP Scan: {}'.format(len(targets))) alive_hosts = [] timeout = 2 logger.verbose('ICMP Timeout set to: '+logger.YELLOW(str(timeout))) for target in targets: logger.live_info('[{}]:\tSENDING ICMP'.format(logger.BLUE(target))) try: resp = sr1(IP(dst=str(target))/ICMP(), timeout=timeout, verbose=0) except: logger.live_bad('[{}]:\tGOT NO RESPONSE'.format(logger.RED(target))) try: icmp_type = str(resp.getlayer(ICMP).code) resp_parse = icmp_response_parse(icmp_type) logger.live_info('[{}]:\tGOT ICMP TYPE: [{}] {}'.format(logger.BLUE(target),logger.BLUE(icmp_type),logger.BLUE(resp_parse))) except: logger.live_info('[{}]:\tCOULD NOT GET ICMP TYPE'.format(logger.BLUE(target))) if resp is None: logger.live_bad('[{}]:\tGOT NO RESPONSE'.format(logger.RED(target))) logger.live_bad('[{}]:\tDOWN'.format(logger.RED(target))) elif(int(resp.getlayer(ICMP).type) == 3 and int(resp.getlayer(ICMP).code) in [1, 2, 3, 9, 10, 13]): logger.live_bad('[{}]:\tDOWN'.format(logger.RED(target))) else: logger.green('[{}]:\tUP [{}] {}'.format(logger.GREEN(target),logger.GREEN(icmp_type),logger.GREEN(resp_parse))) if target not in alive_hosts: alive_hosts.append(target) return alive_hosts
def icmp_scan(target): # takes in a list of targets and tries to identify a list of hosts responding to icmp and returns them in a list timeout = 2 logger.verbose('ICMP Timeout set to: '+str(timeout)) logger.blue('Pinging: {}'.format(logger.BLUE(target))) resp = sr1(IP(dst=str(target))/ICMP(), timeout=timeout, verbose=0) try: icmp_type = str(resp.getlayer(ICMP).code) resp_parse = icmp_response_parse(icmp_type) logger.verbose('Got ICMP Type: [{}] {}'.format( logger.YELLOW(icmp_type), logger.YELLOW(resp_parse))) except: logger.verbose('Could not get ICMP Type code for: ' + logger.YELLOW(target)) if resp is None: logger.verbose('Got no response from: '+logger.YELLOW(target)) logger.red_indent('{}: Down'.format(logger.RED(target))) result = None elif(int(resp.getlayer(ICMP).type) == 3 and int(resp.getlayer(ICMP).code) in [1, 2, 3, 9, 10, 13]): logger.red_indent('{}: Down'.format(logger.RED(target))) result = None else: logger.green_indent('{}: Up'.format(logger.GREEN(target))) result = target return result
def search(self, domain, wildcard=True): base_url = "https://crt.sh/?q={}&output=json" if wildcard: domain = "%25.{}".format(domain) url = base_url.format(domain) user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1' try: response = requests.get(url, headers={'User-Agent': user_agent}) except Exception as e: logger.red('Got [%s] whilst requesting %s' % (logger.RED(str(e)), logger.RED(url))) return None if response.ok: logger.green( 'Got [%s] from %s' % (logger.GREEN(response.status_code), logger.GREEN(url))) content = response.content.decode('utf-8') try: data = json.loads(response.text) return ('crtsh', data) except Exception as e: logger.red('Got [%s] whilst loading data from %s' % (logger.RED(str(e)), logger.RED(url))) return None else: return None
def get_nullsessions(target): return_val={} logger.live_info('[{}]:\tATTEMPTING NULL SESSIONS'.format(logger.BLUE(target))) rpc_command_lsaquery = 'rpcclient -U "" -N {} -c "lsaquery"'.format(target) result = run(rpc_command_lsaquery, stdout=PIPE, stderr=PIPE,universal_newlines=False, shell=True) if len(result.stdout) > 0 and len(result.stderr) == 0: command_output = result.stdout elif len(result.stderr) > 0 and len(result.stdout) == 0: command_output = result.stderr decoded = command_output.decode('utf-8') has_error = error_handle(target,decoded) try: output = decoded.rstrip().replace('\n', ' ') logger.verbose('[{}]:\tOUTPUT FROM RPCCLIENT: {}'.format(logger.YELLOW(target),logger.YELLOW(str(output)))) except: logger.verbose('[{}]:\tOUTPUT FROM RPCCLIENT: FAILED TO GET DATA'.format(logger.YELLOW(target))) if has_error == True: logger.live_bad('[{}]:\tNULL SESSIONS: {}'.format(logger.RED(target),logger.RED('FAILED'))) return False elif has_error == False: logger.green('[{}]:\tNULL SESSIONS: {}'.format(logger.GREEN(target),logger.GREEN('SUCCESS'))) return True
def insert(domain_data): db = init() data = vars(domain_data) logger.yellow('Adding %s to %s' % (logger.YELLOW(str(data)), logger.YELLOW(db_name))) try: db.insert(data) except Exception as e: logger.red('Got [%s] whilst adding %s to %s' % (logger.RED(str(e)), logger.RED(data), logger.RED(db_name))) return None # this return code wont be checked anywhere, i just dont like leaving unclosed functions :)
def header(string): print(logger.RED(' ██████ ▓█████▄ ▄▄▄▄ ')) print(logger.RED('▒██ ▒ ▒██▀ ██▌▓█████▄ ')) print(logger.RED('░ ▓██▄ ░██ █▌▒██▒ ▄██ %s') % logger.YELLOW(string)) print(logger.RED(' ▒ ██▒░▓█▄ ▌▒██░█▀ ')) print(logger.RED('▒██████▒▒░▒████▓ ░▓█ ▀█▓')) print(logger.RED('▒ ▒▓▒ ▒ ░ ▒▒▓ ▒ ░▒▓███▀▒')) print(logger.RED('░ ░▒ ░ ░ ░ ▒ ▒ ▒░▒ ░ ')) print(logger.RED('░ ░ ░ ░ ░ ░ ░ ░ ')) print(logger.RED(' ░ ░ ░ '))
def get_nullsessions(target): logger.blue('Testing null sessions on {}'.format(logger.BLUE(target))) rpc_command_lsaquery = 'rpcclient -U "" -N {} -c "lsaquery"'.format(target) result = run(rpc_command_lsaquery, stdout=PIPE, stderr=PIPE, universal_newlines=False, shell=True) if len(result.stdout) > 0 and len(result.stderr) == 0: command_output = result.stdout elif len(result.stderr) > 0 and len(result.stdout) == 0: command_output = result.stderr decoded = command_output.decode('utf-8') has_error = error_handle(decoded) try: output = decoded.rstrip().replace('\n', ' ') logger.verbose('Output from rpcclient: '+logger.YELLOW(str(output))) except: logger.verbose('Failed to get output from rpcclient') if has_error != False: logger.red_indent( 'Failed to authenticate with null sessions to {}'.format(logger.RED(target))) return False elif has_error == False: logger.green_indent( 'Successfully authenticated with null sessions to {}'.format(logger.GREEN(target))) return True
def get_shares(target, domain_name, remote_name, username, password): my_name = 'WIN-2003' logger.verbose('Client name configured to: '+logger.YELLOW(my_name)) logger.blue('Looking up shares on {}'.format(logger.BLUE(target))) server_ip = target if remote_name != None: logger.verbose('Connection status: [{} | {} | {}]'.format(logger.YELLOW( server_ip), logger.YELLOW(remote_name), logger.YELLOW(domain_name))) else: try: logger.verbose('Connection status: [{} | {} | {}]'.format(logger.YELLOW( server_ip), logger.YELLOW('Could not resolve name'), logger.YELLOW(domain_name))) except: pass open_shares = [] if remote_name == None: logger.red_indent('Could not get remote hosts name, skipping...') return None else: conn = SMBConnection(username, password, my_name, remote_name, domain=domain_name, use_ntlm_v2=True, is_direct_tcp=True) logger.verbose('SMB configuration:') logger.verbose('\tConnecting with: {}'.format(logger.YELLOW(username))) for k, v in vars(conn).items(): attribute = str(k) value = str(v) if '<class' not in value and 'bound method' not in value and 'object' not in value and "b''" not in value: logger.verbose('\t'+attribute+': '+value) try: conn.connect(server_ip, 445) logger.green('Successfully connected to {} on {}'.format( logger.GREEN('smb'), logger.GREEN(server_ip))) try: shares = conn.listShares(timeout=15) for share in range(len(shares)): share_name = str(shares[share].name) logger.green_indent_list(logger.GREEN(share_name)) open_shares.append(share_name) except Exception as e: logger.red_indent('Got error: {}'.format(logger.RED(e))) except: logger.red_indent( 'Failed to obtain shares from {}'.format(logger.RED(server_ip))) return open_shares
def check(urls): probed = [] for url in urls: try: response = requests.get(url, allow_redirects=False, timeout=5, verify=False) logger.green('%s [%s]' % (url, logger.GREEN(response.status_code))) probed.append(url) except requests.exceptions.Timeout: logger.red('%s [%s]' % (url, logger.RED('Timed out'))) except requests.exceptions.TooManyRedirects: logger.red('%s [%s]' % (url, logger.RED('Too many redirects'))) except requests.exceptions.RequestException as e: logger.red('%s [%s]' % (url, logger.RED('Connection Refused'))) return probed
def get_name(target, timeout=2): logger.live_info('[{}]:\tATTEMPTING NETBIOS NAME'.format(logger.BLUE(target))) # logger.verbose('Timeout for NetBIOS resolution: '+str(timeout)) logger.verbose('[{}]:\tNETBIOS TIMEOUT: {}'.format(logger.YELLOW(target),logger.YELLOW(str(timeout)))) bios = NetBIOS() try: tmpnetbios_name = bios.queryIPForName(target, timeout=timeout) netbios_name = str(tmpnetbios_name[0]) except: netbios_name = None bios.close() if netbios_name == None: logger.live_bad('[{}]:\tNETBIOS NAME: {}'.format(logger.RED(target),logger.RED('FAILED'))) return None else: logger.green('[{}]:\tNETBIOS NAME: {}'.format(logger.GREEN(target),logger.GREEN(netbios_name))) return str(netbios_name)
def validate(email): try: # The password here doesnt really matter as the o365 link just requires anything, but its worthwhile having a common password in order to check for access at the same time password = '******' url = 'https://outlook.office365.com/Microsoft-Server-ActiveSync' headers = {"MS-ASProtocolVersion": "14.0"} auth = (email, password) try: logger.verbose('Attempting to validate %s' % logger.YELLOW(email)) r = requests.options(url, headers=headers, auth=auth) status = r.status_code except: logger.verbose('Unable to connect to [%s]' % logger.RED(url)) quit() if status == 401: logger.verbose('Successfully validated %s' % logger.GREEN(email)) return True elif status == 404: logger.verbose('Could not validate %s' % logger.RED(email)) return False elif status == 403: logger.green('Found credentials: %s:%s (2FA)' % (logger.GREEN(email), logger.GREEN(password))) return [True, password] elif status == 200: logger.green('Found credentials: %s:%s' % (logger.GREEN(email), logger.GREEN(password))) return [True, password] else: logger.verbose( 'Got HTTP Status Response %s. Unexpected, skipping.' % logger.RED(str(status))) return None except KeyboardInterrupt: logger.yellow('Keyboard interrupt detected!') quit()
def cred_split(creds): if creds != None: try: username = creds.split(':')[0] password = creds.split(':')[1] except: logger.RED('failed to split credentials') quit() else: username = '' password = '' return username,password
def search(self, domain): url = "http://dns.bufferover.run/dns?q=%s" % domain user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1' try: response = requests.get(url, headers={'User-Agent': user_agent}) except Exception as e: logger.red('Got [%s] whilst requesting %s' % (logger.RED(str(e)), logger.RED(url))) return None if response.ok: logger.green( 'Got [%s] from %s' % (logger.GREEN(response.status_code), logger.GREEN(url))) content = response.content.decode('utf-8') try: data = json.loads(response.text) return ('dns.bufferover.run', data) except Exception as e: logger.red('Got [%s] whilst loading data from %s' % (logger.RED(str(e)), logger.RED(url))) return None else: logger.red('Got [%s] from %s' % (logger.RED(response.status_code), logger.RED(url))) return None
def get_shares(target, domain_name, remote_name, username, password): my_name = 'WIN-2003' logger.verbose('[{}]:\tCLIENT NAME CONFIGURED TO: {}'.format(logger.YELLOW(target),logger.YELLOW(my_name))) logger.live_info('[{}]:\tATTEMPTING SHARES'.format(logger.BLUE(target))) server_ip = target if remote_name != None: logger.verbose('[{}]:\tCONNECTION STATUS: [{} | {} | {}]'.format(logger.YELLOW(target),logger.YELLOW(server_ip), logger.YELLOW(remote_name), logger.YELLOW(domain_name))) else: logger.verbose('[{}]:\tCONNECTION STATUS: [{} | {} | {}]'.format(logger.YELLOW(target),logger.YELLOW(server_ip), logger.YELLOW('COULD NOT RESOLVE'), logger.YELLOW(domain_name))) open_shares = [] if remote_name == None: logger.live_bad('[{}]:\tSMB CONNECTION: {}'.format(logger.RED(server_ip),logger.RED('COULD NOT GET REMOTE HOST NAME'))) return None else: conn = SMBConnection(username, password, my_name, remote_name,domain=domain_name, use_ntlm_v2=True, is_direct_tcp=True) logger.verbose('SMB configuration:') logger.verbose('\tConnecting with: {}'.format(logger.YELLOW(username))) for k, v in vars(conn).items(): attribute = str(k) value = str(v) if '<class' not in value and 'bound method' not in value and 'object' not in value and "b''" not in value: logger.verbose('\t'+attribute+': '+value) try: conn.connect(server_ip, 445) logger.green('[{}]:\tSMB CONNECTION: {}'.format(logger.GREEN(server_ip),logger.GREEN('SUCCESS'))) try: shares = conn.listShares(timeout=15) for share in range(len(shares)): share_name = str(shares[share].name) open_shares.append(share_name) except Exception as e: logger.live_bad('Got error: {}'.format(logger.RED(e))) except: logger.live_bad('[{}]:\tSMB CONNECTION: {}'.format(logger.RED(server_ip),logger.RED('FAILED'))) logger.green('[{}]:\tSHARES: {}'.format(logger.GREEN(target),logger.GREEN(', '.join(open_shares)))) return open_shares
def validate(email): try: password = '******' url = 'https://outlook.office365.com/Microsoft-Server-ActiveSync' headers = {"MS-ASProtocolVersion": "14.0"} auth = (email, password) try: r = requests.options(url, headers=headers, auth=auth) status = r.status_code except: logger.red('Unable to connect to [%s]' % logger.RED(url)) quit() if status == 401: logger.green('Successfully validated %s' % logger.GREEN(email)) return True elif status == 404: if r.headers.get("X-CasErrorCode") == "emailNotFound": logger.red('Could not validate %s' % logger.RED(email)) return False elif status == 403: logger.green('Found credentials: %s:%s (2FA)' % (logger.GREEN(email), logger.GREEN(password))) return [True, password] elif status == 200: logger.green('Found credentials: %s:%s' % (logger.GREEN(email), logger.GREEN(password))) return [True, password] else: logger.red('Got HTTP Status Response %s. Unexected, skipping.') return None except KeyboardInterrupt: logger.yellow('Keyboard interrupt detected!') quit()
def port_scan(target, ports): src_port = RandShort() FIN = 0x01 SYN = 0x02 RST = 0x04 PSH = 0x08 ACK = 0x10 SYNACK = 0x12 RSTACK = 0x14 URG = 0x20 ECE = 0x40 CWR = 0x80 logger.blue('Checking TCP ports: {}'.format(logger.BLUE(target))) for port in ports: send_syn = sr1(IP(dst=target)/TCP(sport=src_port, dport=port, flags=SYN), verbose=0, timeout=2) if send_syn == None: logger.verbose( 'Recieved no TCP response from: '+logger.YELLOW(target)) logger.red_indent('{}:{} [{}]'.format(logger.RED( target), logger.RED(str(port)), logger.RED('CLOSED'))) elif(send_syn.haslayer(TCP)): if(send_syn.getlayer(TCP).flags == SYNACK): send_ack = sr(IP(dst=target)/TCP(sport=src_port, dport=port, flags=RST), verbose=0, timeout=2) logger.verbose('Recieved SYNACK from {}, responding with RST'.format( logger.YELLOW(target))) logger.green_indent('{}:{} [{}]'.format(logger.GREEN( target), logger.GREEN(str(port)), logger.GREEN('OPEN'))) logger.verbose('Found alive host: ' + logger.YELLOW(target)) return target elif (send_syn.getlayer(TCP).flags == RSTACK): logger.verbose('Recieved RSTACK from: ' + logger.YELLOW(target)) logger.red_indent('{}:{} [{}]'.format(logger.RED( target), logger.RED(str(port)), logger.RED('CLOSED'))) elif (send_syn.getlayer(TCP).flags == RST): logger.verbose('Recieved RST from: '+logger.YELLOW(target)) logger.red_indent('{}:{} [{}]'.format(logger.RED( target), logger.RED(str(port)), logger.RED('CLOSED'))) return None
def search(self, domain): url = "https://api.certspotter.com/v1/issuances?domain=%s&expand=dns_names&expand=issuer&expand=cert" % domain user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1' try: response = requests.get(url, headers={'User-Agent': user_agent}) except Exception as e: logger.red('Got [%s] whilst requesting %s' % (logger.RED(str(e)), logger.RED(url))) return None if response.ok: logger.green( 'Got [%s] from %s' % (logger.GREEN(response.status_code), logger.GREEN(url))) content = response.content.decode('utf-8') try: data = json.loads(response.text) return ('certspotter', data) except Exception as e: logger.red('Got [%s] whilst loading data from %s' % (logger.RED(str(e)), logger.RED(url))) return None else: return None
def validate(email, api_key): successful = {} url = 'https://api.hunter.io/v2/email-verifier?email=%s&api_key=%s' % ( email, api_key) try: r = requests.get(url) status_code = r.status_code except Exception as e: logger.red('Unable to get %s' % url) quit() try: data = json.loads(r.content) except Exception as e: logger.red('Failed to load JSON from requests') quit() if status_code == 429 or status_code == 401: try: result = data['errors'][0]['details'] except Exception as e: logger.red('Failed to load JSON from errors') quit() if 'exceeded' in result: return 429 elif 'No user found for the API key supplied' in result: return 401 elif status_code == 200: try: result = data['data']['result'] score = data['data']['score'] except Exception as e: logger.red('Unable to extract json for %s' % email) quit() percent = str(score) + '%' if score > 68: logger.green('Validated %s at %s' % (logger.GREEN(email), logger.GREEN(percent))) return True else: return False else: logger.red('Got unexpected HTTP response' % logger.RED(str(status_code)))
def do_certspotter(): certspotter_api = certspotter.api() domain_data = certspotter_api.search(domain) if not verify(domain_data): logger.red('Failed to obtain data from %s' % logger.RED('certspotter')) return False else: logger.green('Successfully validated %s response' % logger.GREEN('certspotter')) crunched_data = crunch.get_certspotter_data(domain_data) if verify(crunched_data): db.insert(crunched_data) return crunched_data else: return False
def do_bufferoverrun(): bufferoverrun_api = bufferoverrun.api() domain_data = bufferoverrun_api.search(domain) if not verify(domain_data): logger.red('Failed to obtain data from %s' % logger.RED('bufferover.run')) return False else: logger.green('Successfully validated %s response' % logger.GREEN('bufferover.run')) crunched_data = crunch.get_bufferoverrun_data(domain_data) if verify(crunched_data): db.insert(crunched_data) return crunched_data else: return False
def do_crtsh(): crtsh_api = crtsh.api( ) # create an instance of the crtsh class. isnt really required but it was just incase multiple domains were going to be added domain_data = crtsh_api.search( domain ) # go to crtsh and return a tuple. index 0 being the 'source' string, and 1 being the json blob. if not verify(domain_data): logger.red('Failed to obtain data from %s' % logger.RED('crt.sh')) return False else: logger.green('Successfully validated %s response' % logger.GREEN('crt.sh')) crunched_data = crunch.get_crtsh_data(domain_data) if verify(crunched_data): db.insert(crunched_data) return crunched_data else: return False
def banner1(): print( logger.YELLOW(' _______ __ __ //') + logger.RED(' _______ _______ _______ _______ ___ _______ ')) print( logger.YELLOW('| || |_| | ') + logger.RED('| _ || || _ || || | | |')) print( logger.YELLOW('| _____|| | ') + logger.RED('| |_| || ___|| |_| || ___|| | | ___|')) print( logger.YELLOW('| |_____ | | ') + logger.RED('| || |___ | || | __ | | | |___ ')) print( logger.YELLOW('|_____ || | ') + logger.RED('| _ | | ___|| || || || |___ | ___|')) print( logger.YELLOW(' _____| || ||_|| | ') + logger.RED('| |_| || |___ | _ || |_| || || |___ ')) print( logger.YELLOW('|_______||_| |_| ') + logger.RED('|_______||_______||__| |__||_______||_______||_______|')) print()
def log_results(results_data, probed): subdomains = results_data[0] wildcards = results_data[1] uid = get_uid() if probed != None: try: filename = '%s_probed_%s.txt' % (args.domain, uid) with open(filename, 'w') as f: logger.green('Writing wildcards to %s' % logger.GREEN(filename)) for subdomain in probed: f.write(subdomain + '\n') except Exception as e: logger.red('Got [%s] whilst logging to %s' % (logger.RED(str(e)), logger.RED(filename))) return False try: filename = '%s_subdomains_%s.txt' % (args.domain, uid) with open(filename, 'w') as f: logger.green('Writing subdomains to %s' % logger.GREEN(filename)) for subdomain in subdomains: f.write(subdomain + '\n') except Exception as e: logger.red('Got [%s] whilst logging to %s' % (logger.RED(str(e)), logger.RED(filename))) return False try: filename = '%s_wildcards_%s.txt' % (args.domain, uid) logger.green('Writing wildcards to %s' % logger.GREEN(filename)) with open(filename, 'w') as f: for wildcard in wildcards: f.write(wildcard + '\n') except Exception as e: logger.red('Got [%s] whilst logging to %s' % (logger.RED(str(e)), logger.RED(filename))) return False return subdomains, wildcards
def banner2(): print() print( logger.YELLOW('▀█████████▄ ') + logger.RED(' ▄████████') + logger.YELLOW(' ▄████████ ') + logger.RED(' ▄██████▄ ') + logger.YELLOW(' ▄█ ') + logger.RED(' ▄████████')) print( logger.YELLOW(' ███ ███') + logger.RED(' ███ ███') + logger.YELLOW(' ███ ███ ') + logger.RED(' ███ ███ ') + logger.YELLOW('███ ') + logger.RED(' ███ ███')) print( logger.YELLOW(' ███ ███') + logger.RED(' ███ █▀ ') + logger.YELLOW(' ███ ███ ') + logger.RED(' ███ █▀ ') + logger.YELLOW('███ ') + logger.RED(' ███ █▀ ')) print( logger.YELLOW(' ▄███▄▄▄██▀ ') + logger.RED(' ▄███▄▄▄ ') + logger.YELLOW(' ███ ███ ') + logger.RED(' ▄███ ') + logger.YELLOW('███ ') + logger.RED(' ▄███▄▄▄ ')) print( logger.YELLOW('▀▀███▀▀▀██▄ ') + logger.RED('▀▀███▀▀▀ ') + logger.YELLOW('▀███████████ ') + logger.RED('▀▀███ ████▄ ') + logger.YELLOW('███ ') + logger.RED('▀▀███▀▀▀ ')) print( logger.YELLOW(' ███ ██▄') + logger.RED(' ███ █▄ ') + logger.YELLOW(' ███ ███ ') + logger.RED(' ███ ███ ') + logger.YELLOW('███ ') + logger.RED(' ███ █▄ ')) print( logger.YELLOW(' ███ ███') + logger.RED(' ███ ███') + logger.YELLOW(' ███ ███ ') + logger.RED(' ███ ███ ') + logger.YELLOW('███▌ ▄') + logger.RED(' ███ ███')) print( logger.YELLOW('▄█████████▀ ') + logger.RED(' ██████████') + logger.YELLOW(' ███ █▀ ') + logger.RED(' ████████▀ ') + logger.YELLOW('█████▄▄██') + logger.RED(' ██████████')) print( logger.YELLOW(' ') + logger.RED(' ') + logger.YELLOW(' ') + logger.RED(' ') + logger.YELLOW('▀ ') + logger.RED(' '))
quit() # The most important part... banner.banner() if args.verbose: logger.verbose_switch = True logger.debug_switch = False if args.debug: logger.debug_switch = True logger.verbose_switch = True if args.cookie == None: logger.red('Please specify a file containing the %s cookie.' % logger.RED('li_at')) quit() try: with open(args.cookie, 'r') as f: cookie = f.readline().rstrip() except: logger.red('Please add the cookie to a file') logger.debug('%s not valid' % args.cookie) quit() company_id = args.company_id domain = args.domain if args.output:
def extract_data(data,domain,email_format,validation,api_key): if domain.startswith('@'): domain=domain else: domain='@'+domain collected_data={} for d in data['elements'][0]['elements']: if 'com.linkedin.voyager.search.SearchProfile' in d['hitInfo'] and d['hitInfo']['com.linkedin.voyager.search.SearchProfile']['headless'] == False: try: industry = d['hitInfo']['com.linkedin.voyager.search.SearchProfile']['industry'] except: industry = "" raw_firstname = d['hitInfo']['com.linkedin.voyager.search.SearchProfile']['miniProfile']['firstName'] raw_surname = d['hitInfo']['com.linkedin.voyager.search.SearchProfile']['miniProfile']['lastName'] profile_url = "https://www.linkedin.com/in/%s" % d['hitInfo']['com.linkedin.voyager.search.SearchProfile']['miniProfile']['publicIdentifier'] occupation = d['hitInfo']['com.linkedin.voyager.search.SearchProfile']['miniProfile']['occupation'] location = d['hitInfo']['com.linkedin.voyager.search.SearchProfile']['location'] try: role_data=d['hitInfo']['com.linkedin.voyager.search.SearchProfile']['snippets'][0]['heading']['text'] try: current_role=role_data.split(' at ')[0] current_company=role_data.split(' at ')[1] except: current_company=None,'Error' current_role=occupation except: try: current_company=occupation.split(' at ')[1] current_role=occupation.split(' at ')[0] except: current_company=None,'Error' current_role=occupation name_data=[raw_firstname,raw_surname] name_scheme=naming_scheme.names(name_data) firstname=name_scheme[0] middlename=name_scheme[1] surname=name_scheme[2] fullname=name_scheme[3] name_data=[firstname,middlename,surname] email_scheme=naming_scheme.emails(name_data,email_format,domain) email = email_scheme try: datapoint_1=d['hitInfo']['com.linkedin.voyager.search.SearchProfile']['miniProfile']['picture']['com.linkedin.common.VectorImage']['rootUrl'] datapoint_2=d['hitInfo']['com.linkedin.voyager.search.SearchProfile']['miniProfile']['picture']['com.linkedin.common.VectorImage']['artifacts'][2]['fileIdentifyingUrlPathSegment'] picture=datapoint_1+datapoint_2 if current_company[0] != None: logger.green('Successfully obtained image for %s [%s]' % (logger.GREEN(fullname),logger.GREEN(current_company))) else: logger.green('Successfully obtained image for %s' % (logger.GREEN(fullname))) except: if current_company[0] != None: logger.red('Unable to obtain image for %s [%s]' % (logger.RED(fullname),logger.RED(current_company))) else: logger.red('Unable to obtain image for %s' % (logger.RED(fullname))) picture = None if current_company[0] != None: logger.green('Found %s [%s] at %s' % (logger.GREEN(fullname),logger.GREEN(email),logger.GREEN(current_company))) userinfo=[profile_url,picture,firstname,middlename,surname,email,current_role,current_company] else: logger.green('Found %s [%s]' % (logger.GREEN(fullname),logger.GREEN(email))) userinfo=[profile_url,picture,firstname,middlename,surname,email,current_role,'Error'] if validation != None: if validation == 'o365': validated=o365_validation.validate(email) userinfo.append(validated) elif validation == 'hunter': validated=hunter_api.validate(email,api_key) if validated == 429: logger.red('You have exceeded your hunter API Requests.') quit() elif validated == 401: logger.red('The API Key specified recieved an %s error.' % 'authentication') quit() else: userinfo.append(validated) collected_data[fullname]=userinfo return collected_data
def banner3(): print() print( logger.RED( ' ▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄ ▄▄▄▄▄▄▄▄▄▄▄ ' )) print( logger.RED( '░░░░░░░░░░▌ ▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░▌ ▐░░░░░░░░░░░▌' )) print( logger.RED( '░█▀▀▀▀▀▀▀█░▌▐░█▀▀▀▀▀▀▀▀▀ ▐░█▀▀▀▀▀▀▀█░▌▐░█▀▀▀▀▀▀▀▀▀ ▐░▌ ▐░█▀▀▀▀▀▀▀▀▀ ' )) print( logger.RED( '░▌ ▐░▌▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌ ▐░▌ ' )) print( logger.RED( '░█▄▄▄▄▄▄▄█░▌▐░█▄▄▄▄▄▄▄▄▄ ▐░█▄▄▄▄▄▄▄█░▌▐░▌ ▄▄▄▄▄▄▄▄ ▐░▌ ▐░█▄▄▄▄▄▄▄▄▄ ' )) print( logger.RED( '░░░░░░░░░░▌ ▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░▌▐░░░░░░░░▌▐░▌ ▐░░░░░░░░░░░▌' )) print( logger.RED( '░█▀▀▀▀▀▀▀█░▌▐░█▀▀▀▀▀▀▀▀▀ ▐░█▀▀▀▀▀▀▀█░▌▐░▌ ▀▀▀▀▀▀█░▌▐░▌ ▐░█▀▀▀▀▀▀▀▀▀ ' )) print( logger.RED( '░▌ ▐░▌▐░▌ ▐░▌ ▐░▌▐░▌ ▐░▌▐░▌ ▐░▌ ' )) print( logger.RED( '░█▄▄▄▄▄▄▄█░▌▐░█▄▄▄▄▄▄▄▄▄ ▐░▌ ▐░▌▐░█▄▄▄▄▄▄▄█░▌▐░█▄▄▄▄▄▄▄▄▄ ▐░█▄▄▄▄▄▄▄▄▄ ' )) print( logger.RED( '░░░░░░░░░░▌ ▐░░░░░░░░░░░▌▐░▌ ▐░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌▐░░░░░░░░░░░▌' )) print( logger.RED( '▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀ ▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀ ' ))
def extract_data(data, domain, email_format): domain = '@' + domain collected_data = {} for d in data['elements'][0]['elements']: if 'com.linkedin.voyager.search.SearchProfile' in d['hitInfo'] and d[ 'hitInfo']['com.linkedin.voyager.search.SearchProfile'][ 'headless'] == False: try: industry = d['hitInfo'][ 'com.linkedin.voyager.search.SearchProfile']['industry'] except: industry = "" raw_firstname = d['hitInfo'][ 'com.linkedin.voyager.search.SearchProfile']['miniProfile'][ 'firstName'] raw_surname = d['hitInfo'][ 'com.linkedin.voyager.search.SearchProfile']['miniProfile'][ 'lastName'] profile_url = "https://www.linkedin.com/in/%s" % d['hitInfo'][ 'com.linkedin.voyager.search.SearchProfile']['miniProfile'][ 'publicIdentifier'] occupation = d['hitInfo'][ 'com.linkedin.voyager.search.SearchProfile']['miniProfile'][ 'occupation'] location = d['hitInfo'][ 'com.linkedin.voyager.search.SearchProfile']['location'] try: role_data = d['hitInfo'][ 'com.linkedin.voyager.search.SearchProfile']['snippets'][ 0]['heading']['text'] try: current_role = role_data.split(' at ')[0] current_company = role_data.split(' at ')[1] except: current_company = None, 'Error' current_role = occupation except: try: current_company = occupation.split(' at ')[1] current_role = occupation.split(' at ')[0] except: current_company = None, 'Error' current_role = occupation name_data = [raw_firstname, raw_surname] name_scheme = naming_scheme.names(name_data) firstname = name_scheme[0] middlename = name_scheme[1] surname = name_scheme[2] fullname = name_scheme[3] name_data = [firstname, middlename, surname] email_scheme = naming_scheme.emails(name_data, email_format, domain) email = email_scheme try: datapoint_1 = d['hitInfo'][ 'com.linkedin.voyager.search.SearchProfile'][ 'miniProfile']['picture'][ 'com.linkedin.common.VectorImage']['rootUrl'] datapoint_2 = d['hitInfo'][ 'com.linkedin.voyager.search.SearchProfile'][ 'miniProfile']['picture'][ 'com.linkedin.common.VectorImage']['artifacts'][2][ 'fileIdentifyingUrlPathSegment'] picture = datapoint_1 + datapoint_2 if current_company[0] != None: logger.green('Successfully obtained image for %s [%s]' % (logger.GREEN(fullname), logger.GREEN(current_company))) else: logger.green('Successfully obtained image for %s' % (logger.GREEN(fullname))) except: if current_company[0] != None: logger.red( 'Unable to obtain image for %s [%s]' % (logger.RED(fullname), logger.RED(current_company))) else: logger.red('Unable to obtain image for %s' % (logger.RED(fullname))) picture = None if current_company[0] != None: logger.green('Found %s [%s] at %s' % (logger.GREEN(fullname), logger.GREEN(email), logger.GREEN(current_company))) userinfo = [ profile_url, picture, firstname, middlename, surname, email, current_role, current_company ] else: logger.green('Found %s [%s]' % (logger.GREEN(fullname), logger.GREEN(email))) userinfo = [ profile_url, picture, firstname, middlename, surname, email, current_role, 'Error' ] collected_data[fullname] = userinfo return collected_data
def main(): pool = ThreadPool(processes=args.threads) logger.VERBOSE = args.verbose logger.LIVE = args.live start_time=strftime("%H:%M:%S", gmtime()) filetypes=['txt','csv','html','all'] if args.format: if args.format.lower() not in filetypes: logger.red('Did not understand the format supplied: [{}]'.format(logger.RED(args.format))) quit() if args.ports: p = [] ports = args.ports if "-" in ports: try: start = int(ports.split('-')[0]) end = int(ports.split('-')[1]) for port in range(start, end+1): p.append(port) except: print('failed to split on "-"') quit() elif "," in args.ports: ports = [int(n) for n in args.ports.split(",")] p = ports elif len(args.ports) > 0 and "-" not in args.ports and "," not in args.ports: try: p.append(int(args.ports)) except ValueError: print('Please specify an port number') quit() else: p = [53, 88, 139, 445, 464] if args.ports: logger.verbose('Ports configuration: '+str(p)) target = args.target # to be replaced with argparse hosts = get_targets(target) # all possible hosts scan_type=args.enumerate logger.blue('Target: [{}]'.format(logger.BLUE(target))) logger.blue('Found {} target(s)'.format(logger.BLUE(str(len(hosts))))) if scan_type == None: logger.blue('Scan type: [{}]'.format(logger.BLUE('default'))) else: logger.blue('Scan type: [{}]'.format(logger.BLUE(scan_type))) if args.ports: logger.blue('Ports given: [{}]'.format(logger.BLUE(args.ports))) logger.blue('Port count: [{}]'.format(logger.BLUE(str(len(p))))) username,password=cred_split(args.credentials) if username and password: logger.blue('Username: [{}]'.format(logger.BLUE(username))) logger.blue('Password: [{}]'.format(logger.BLUE(password))) if args.domain: domain=args.domain else: domain='WORKGROUP' logger.blue('Domain: [{}]'.format(logger.BLUE(domain))) logger.header('SCANNING') logger.blue('Start time: '+logger.BLUE(start_time)) if args.mode != None: if args.mode.upper() == 'ICMP': logger.verbose('Discovery mode set to ICMP') # alive_hosts = icmp_scan(hosts) # all hosts that respond to icmp alive_hosts = pool.map(icmp_scan, hosts) elif args.mode.upper() == 'PORTS': logger.verbose('Discovery mode set to ports') # alive_hosts = port_scan(hosts, p) alive_hosts = pool.map(partial(port_scan, hosts), p) elif args.mode.upper() == 'SKIP': logger.verbose('Discovery mode set to skip, scanning all {} hosts'.format(logger.YELLOW(str(len(hosts))))) alive_hosts = hosts else: logger.red('Unknown option for -m! Only skip, port and icmp can be used!') quit() else: logger.verbose('No discovery mode set, skipping') alive_hosts = hosts # all hosts that respond to icmp #Before enumeration, this just fixes some weird errors. Somehow the ports function returns a list and stores it a list. Like: [[]]. The next two lines fix that #and then removes any empties. alive_hosts=[''.join(x) for x in alive_hosts] #join into one list alive_hosts=list(filter(None, alive_hosts))#remove empties alive_hosts=list(set(alive_hosts))#removes duplicates # create an empty list that will store all the Host objects enumerated_hosts = [] # for every host, do some enum; this could probably be done with multiprocessing if args.enumerate != None: if args.enumerate.lower() == 'null': pass elif args.enumerate.lower() == 'shares': pass else: logger.red('Unknown option for -e! Only null and shares can be used!') quit() enumerated_hosts = pool.map(hosts_enumeration, alive_hosts) end_time=strftime("%H:%M:%S", gmtime()) logger.blue('End time: '+logger.BLUE(end_time)) logger.header('RESULTS') results_parse(results_cache, scan_type) if args.output: outfile_name=args.output if args.format: outfo=args.format.lower() if outfo== 'txt': clean_output(outfile_name) output(results_cache,outfile_name,scan_type) elif outfo == 'csv': clean_output(outfile_name) csv_output(results_cache,outfile_name,scan_type) elif outfo == 'html': clean_output(outfile_name) html_output(results_cache,outfile_name,scan_type) elif outfo == 'all': try: outfile_name=outfile_name.split('.')[0] except: outfile_name=outfile_name clean_output(outfile_name) output(results_cache,outfile_name+'.txt',scan_type) csv_output(results_cache,outfile_name+'.csv',scan_type) html_output(results_cache,outfile_name+'.html',scan_type) else: clean_output(outfile_name) output(results_cache,outfile_name,scan_type)