def restore(self): """ Restores the IP tables of the target and the router to the default state (before ARP spoof attack) """ colors.info('Restoring IP tables') target_arp_packet = scapy.ARP(op=2, pdst=self.target_ip, hwdst=self.target_mac, psrc=self.router_ip, hwsrc=self.router_mac) router_arp_packet = scapy.ARP(op=2, pdst=self.router_ip, hwdst=self.router_mac, psrc=self.target_ip, hwsrc=self.target_mac) COUNT = 10 # Send 10 packets to restore while COUNT > 0: scapy.send(target_arp_packet, verbose=False) scapy.send(router_arp_packet, verbose=False) COUNT = COUNT - 1 colors.success('ARP Table restored')
def networkScan(self): print('Enter the IP address to start scanning...') ip = str(input()).strip() if self.validateIP(ip): try: colors.info('Initiating ARP Scan') from lib.scanner.ip_scanner import arp_scanner arpScanObj = arp_scanner.ARPScan(ip=ip, start_ip=None, end_ip=None, threads=100) total_index, result_dict = arpScanObj.threadingScan() index = int(input('>> Enter the index of the target IP : ')) if index < total_index and index > 0: self.target_ip = result_dict[index][0] self.target_mac = result_dict[index][1] colors.success('Target IP set to : {}'.format(self.target_ip)) colors.success('Target MAC set to : {}'.format(self.target_mac)) except ImportError: colors.error('Could not import the required module.') except Exception as e: print(e) else: colors.error('Please enter a valid IP address...') self.networkScan()
def check_match(): list_temp_images = os.listdir(os.getcwd() + "{}Template images".format(path_symbol)) colors.success("template image list grabbed. ") list_search_images = os.listdir(os.getcwd() + "{}images".format(path_symbol)) colors.success("search image list grabbed ") print( "\n{}----------------------------------------------------------------------{}" .format(colors.red, colors.green)) print("\n\t {}:: Similar images found are :: \n".format(colors.lightgreen)) for path in list_search_images: checked = [] pos = 0 src_image = cv2.imread('images{}'.format(path_symbol) + path, 1) src_gray = cv2.cvtColor(src_image, cv2.COLOR_BGR2GRAY) while (pos < 12): template_path = list_temp_images[pos] template_image = cv2.imread( 'Template images{}'.format(path_symbol) + template_path, 0) result = cv2.matchTemplate(src_gray, template_image, cv2.TM_CCOEFF_NORMED) thresh = 0.9 loc = np.where(result > thresh) if str(loc[0]) == str(loc[1]): checked.append("False") break else: checked.append("True") pos += 1 if "False" not in checked: print("Image : {}".format(path))
def startSpoof(self): t1 = time.time() colors.info('ARP Spoofing started...') colors.info('Press CTRL+C to exit...') try: while True: target_arp_packet, router_arp_packet = self.generatePacket() scapy.send(target_arp_packet, verbose=False) scapy.send(router_arp_packet, verbose=False) self.no_of_packets = self.no_of_packets + 1 print('[+] Packets sent : {}'.format(self.no_of_packets), end='\r') time.sleep(self.INTER) except KeyboardInterrupt: colors.info('Stopping ARP spoof') except Exception as e: print(e) finally: # self.restore() t2 = time.time() colors.success('ARP Spoof completed in : {}'.format(t2-t1))
def get_router_IP(self): """ Finds the router IP address """ colors.info('Finding Router IP address...') command_process = subprocess.Popen(['route', '-n'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, error = command_process.communicate() if error: print(error.decode('utf-8')) output = output.decode('utf-8') ip_candidates = re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", output) colors.success('Router IP found is : {}'.format(ip_candidates[1])) val = str(input('Continue with this IP address(Y) or enter a different IP address : ')).strip() if val == 'Y' or val == 'y': self.router_ip = ip_candidates[1] colors.info('Router IP set to : {}'.format(self.router_ip)) elif self.validateIP(val): self.router_ip = val colors.info('Router IP set to : {}'.format(self.router_ip)) else: colors.error('Please enter a valid Router IP address') self.findRouterIP()
def startScan(self): """ Distributes the list of scans into multiple processor and starts the scan """ colors.info('Hash scanning started...') colors.info('Press CTRL+C to stop...') t1 = time.time() try: processes = [] for mode in self.list_scans: newProcess = multiprocessing.Process(target=self.modeScan, args=(mode, )) newProcess.start() processes.append(newProcess) for process in processes: process.join() except KeyboardInterrupt: colors.error('Stopping the process...') except Exception as e: print(e) finally: t2 = time.time() colors.success('Completed in {}'.format(t2 - t1)) resultDict = self.parseResult() return resultDict
def email(args): if not args.url: colors.error('Please enter an URL for finding emails') LOGGER.error('[-] Please enter an URL for finding emails') sys.exit(1) try: from lib.others.info_gathering.finder import finding_email colors.info('Performing email gathering over : {}'.format(args.url)) findEmailObj = finding_email.FindingEmails(args.url) found_emails = findEmailObj.parse_emails() if args.output: if args.output.endswith('.txt'): file = args.output else: file = args.output + '.txt' with open(file, 'w') as f: f.write('---[!] Emails---\n\n') for email in found_emails: f.write(str(email) + os.linesep) colors.success('File has been saved successfully') except ImportError: colors.error('Could not import the required module.') LOGGER.error('[-] Could not import the required module.') except Exception as e: LOGGER.error(e)
def comment(args): if not args.url: colors.error('Please enter an URL for finding comments') LOGGER.error('[-] Please enter an URL for finding comments') sys.exit(1) try: from lib.others.info_gathering.finder import finding_comment colors.info('Performing comment gathering over : {}'.format(args.url)) findCommnentObj = finding_comment.FindingComments(args.url) comment_dict = findCommnentObj.parse_comments() if args.output: if args.output.endswith('.txt'): file = args.output else: file = args.output + '.txt' with open(file, 'w') as f: f.write('---[!] Comments---\n\n') for k, v in comment_dict.items(): f.write(str(k) + ' : ' + str(v) + os.linesep) colors.success('File has been saved successfully') except ImportError: colors.error('Could not import the required module.') LOGGER.error('[-] Could not import the required module.') except Exception as e: LOGGER.error(e)
def save(): try: import data except ImportError: colors.error('Error importing data module') sys.exit(1) fields = [ 'Username', 'Repositories', 'Stars', 'Followers', 'Following', 'Email' ] rows = [[0 for x in range(6)] for y in range(len(data.username_list))] for row in range(len(data.username_list)): rows[row][0] = '@' + data.username_list[row] rows[row][1] = data.repo_list[row].strip() rows[row][2] = data.star_list[row].strip() rows[row][3] = data.followers_list[row].strip() rows[row][4] = data.following_list[row].strip() rows[row][5] = data.email_list[row] file_path = args.path if file_path is not None and file_path.endswith('.csv'): pass else: csv_file = data.header + '.csv' # Name of csv file file_path = os.path.join(os.environ["HOME"], "Desktop", csv_file) try: with open(file_path, 'w') as csvfile: csvwriter = csv.writer(csvfile) csvwriter.writerow(fields) csvwriter.writerows(rows) colors.success("Saved the data into " + file_path, True) except FileNotFoundError: colors.error("Please enter valid path.") sys.exit()
def whois(args): if not args.ip: colors.error('Please enter an IP for Whois lookup') LOGGER.error('[-] Please enter an IP for Whois lookup') sys.exit(1) try: from lib.others.whois_lookup import lookup data = lookup.whois_lookup(args.ip) colors.success('Information after Whois lookup: \n') for k, v in data.items(): print(k, ':', v) if args.output: if args.output.endswith('.txt'): file = args.output else: file = args.output + '.txt' with open(file, 'w') as f: f.write('Information after Whois lookup: \n\n') for k, v in data.items(): f.write(str(k) + ' : ' + str(v) + os.linesep) colors.success('File has been saved successfully') except ImportError: colors.error('Could not import the required module.') LOGGER.error('[-] Could not import the required module.') except Exception as e: LOGGER.error(e)
def ssl(args): if not args.url: colors.error('Please enter an URL for SSL scanning') LOGGER.error('[-] Please enter an URL for SSL scanning') sys.exit(1) try: from lib.scanner.ssl_scanner import ssl_scanner colors.info('SSL scan using SSL Labs API') data = ssl_scanner.analyze(args.url) ssl_data = ssl_scanner.vulnerability_parser(data) if args.output: if args.output.endswith('.txt'): file = args.output else: file = args.output + '.txt' with open(file, 'wt') as f: f.write('[+] Vulnerability Scan Result : \n\n') for k, v in ssl_data.items(): f.write(str(k) + ' : ' + str(v) + os.linesep) colors.success('File has been saved successfully') except ImportError: colors.error('Could not import the required module.') LOGGER.error('[-] Could not import the required module.') except Exception as e: LOGGER.error(e)
def getMAC(self, IP, name): """ Fetches MAC address of the selected IP """ arp_packet = scapy.ARP(pdst=IP) broadcast = scapy.Ether(dst='ff:ff:ff:ff:ff:ff') arp_broadcast = broadcast/arp_packet broadcast = scapy.srp(arp_broadcast, timeout=1, verbose=False)[0] mac_addr_str = self.capture_output(broadcast) mac_addr = re.findall(r'\w\w:\w\w:\w\w:\w\w:\w\w:\w\w', mac_addr_str)[0] mac_addr = str(mac_addr).strip() colors.success('Found MAC address for {} : {} is : {}' .format(name, IP, mac_addr)) val = str(input('>> Enter(Y/y) to continue or enter MAC address : '))\ .strip() if val == 'Y' or val == 'y': return mac_addr elif self.validateMAC(val): colors.info('Setting MAC address for {} : {} : {}' .format(name, IP, val)) return val else: colors.error('Please enter a valid MAC address...') self.getMAC(IP, name)
def parseResult(self, t1): """ Prints the live IP with their MAC address :t1: Start time of the scan """ print('-' * 36) print('IP'.ljust(15, ' ') + '|' + ' MAC'.ljust(19, ' ') + '|') print('-' * 36) index = 1 response_dict = {} for packets in self.answ_packets: for ele in packets: data = str(index) + '. ' + ele[1].psrc + ' : ' + ele[1].src response_dict[index] = [ele[1].psrc, ele[1].src] print(data.ljust(33, ' '), '|') print('-' * 35) index = index + 1 t2 = time.time() colors.success('Completed in {}'.format(t2 - t1)) return index, response_dict
def search_vulnerabilities(): response = requests.get("https://www.cvedetails.com/json-feed.php" "?vendor_id=6538&orderby=3") response = response.json() colors.success("Possible vulnerabilities:") for i in response: print(i["summary"], end="\n\n")
def print_data(dict_value): colors.success('Vulnerability Scan Result : ') for key, item in dict_value.items(): if not isinstance(item, bool): new_item = get_value(key, item) print('[+] ', key, ' : ', new_item) else: print('[+] ', key, ' : ', item)
def parse_comments(self): self.find_comment() if len(self.found_comments) > 0: for comment_code, comment in self.found_comments.items(): colors.success('Found for {} : {}'.format( comment_code, comment)) else: colors.error('No comment found')
def parse_emails(self): self.find_email() if len(self.found_emails) > 0: for email in self.found_emails: colors.success('Found {}'.format(email)) else: colors.error('No email found') print('No email found') return self.found_emails
def is_root(): """ Checks if program is running as root or not """ if os.geteuid() != 0: colors.error('Please run as root') sys.exit(1) else: colors.success('Running as root')
def gather_header(self): try: r = self.get_response() headers_dict = r.headers colors.info('Header Details') for key, value in headers_dict.items(): colors.success(' {} : {} '.format(key, value)) return headers_dict except Exception as e: colors.error(e)
def parse_comments(self): self.find_comment() comment_dict = {} if len(self.found_comments) > 0: for comment_code, comment in self.found_comments.items(): colors.success('Found for {} : {}'.format(comment_code, comment)) comment_dict[comment_code] = comment else: colors.error('No comment found') return comment_dict
def os_scan(self): colors.info('OS Scan running on: {}'.format(self.target)) self.nm.scan(self.target, arguments="-O") if len(self.nm[self.nm.all_hosts()[0]]["osmatch"]) != 0: colors.success('OS Scan results of: {}'.format( self.nm.all_hosts()[0])) for osmatch in self.nm[self.nm.all_hosts()[0]]["osmatch"]: print("[+] Name: {}".format(osmatch["name"])) print("[+] Accuracy: {}".format(osmatch["accuracy"])) else: colors.info('No OS matches for host')
def start_engine(self): self.response = requests.get(self.url) self.soup_obj = BeautifulSoup(self.response.text, "html.parser") colors.info("Starting CMS Detect engine...") colors.info("Reading payloads...") self.read_payloads() colors.info("Detecting CMS...") self.detect_cms() colors.success("Detected framework: {}".format( max(self.scores.items(), key=operator.itemgetter(1))[0]))
def process_display(verbose, type, message): if args.v is False: verbose = 0 if type == 0 and verbose == 0: colors.success(message) elif type == 1 and verbose == 0: colors.error(message) elif type == 2 and verbose == 0: colors.process(message) elif type == 3 and verbose == 0: colors.info(message)
def scan(self): null_byte = "%00" success_count = 0 url = self.url ck = self.check_url(url) if ck: _matches = self.payload_data["linux"] _payloads = self.payload_data["linux"].keys() _prefixs = self.payload_data["linuxPrefix"] urls = [] for _prefix in _prefixs: urls.append(url + _prefix) # Now Sart Scanning for _url in urls: for _payload in _payloads: scan_url = _url + _payload res = requests.get(scan_url, headers=self.scan_headers) for _match in _matches[_payload]: if _match in res.text: colors.success( "LFI Detected!: {}".format(scan_url)) success_count += 1 if "syntax error" in res.text: colors.error( "Syntax Parse Error: {}".format(scan_url)) # Still no success, now check with null byte if success_count == 0: colors.info("Now creating payloads with one NULL BYTE suffix.") for _url in urls: for _payload in _payloads: scan_url = _url + _payload + null_byte res = requests.get(scan_url, headers=self.scan_headers) for _match in _matches[_payload]: if _match in res.text: colors.success( "LFI Detected! : {}".format(scan_url)) success_count += 1 if "syntax error" in res.text: colors.error( "Syntax Parse Error:{}".format(scan_url)) if success_count == 0: colors.error('No LFI Detected') else: colors.error('An error occured, make sure provided URL is valid ' 'and accessible.')
def analyze(url): global analyze_payload colors.success('Scanning') analyze_payload['host'] = url resp = request_api(API_URL, analyze_payload) analyze_payload.pop('startNew') while resp['status'] != 'READY' and resp['status'] != 'ERROR': time.sleep(30) resp = request_api(API_URL, analyze_payload) return resp
def pingScan(self, end): ping_ip = self.ip + '.' + str(end) command = self.checkOS() command.append(ping_ip) process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False) stdout, stderr = process.communicate() if self.checkStatus(stdout.decode('utf-8')): colors.success('Open : {}'.format(ping_ip)) else: colors.error('Closed : {}'.format(ping_ip))
def main(): global template_image_dir_name global target_images_dir_name source_path = None logo.banner() print("\n") try: import argparse import sys except ImportError: print("[-] Error importing argparse or sys module") exit(1) parser = argparse.ArgumentParser(description='A program which given a source image and a set of target images ' 'will match the source image to the target images to find its matches') parser.add_argument('-p', '--path', help=' Path of source image') parser.add_argument('-v', '--version', action='version', version='%(prog)s 1.0.0(beta)', help='Prints the version ' 'of Photoroid') parser.add_argument('-t', '--target', help=' Path of target images directory', default=target_images_dir_name_default) parser.add_argument('-o', '--output', help='Path of template images directory', default=template_image_dir_name_default) if len(sys.argv) > 1: args = parser.parse_args() source_path = args.path template_image_dir_name = args.output target_images_dir_name = args.target if source_path is None: source_path = str( input("[ {}!{} ] Enter path of source image: {}".format(colors.white, colors.end, colors.lightgreen))) print("\n") # Some serious end of line, for UI purpose LOL ... # Getting the image to be searched source = cv2.imread(source_path, cv2.IMREAD_COLOR) colors.process("Creating template sections of source image.") start_dir = os.getcwd() # Saving the start directory # Creating template sections of source image. template_images(source) colors.success("12 template sections of source image created.") os.chdir(start_dir) colors.process("Setting 'Core' as current directory.") check_match() print("{}\nThank you for using my tool\n".format(colors.blue))
def check_match(): try: import numpy as np except ImportError: print("[-] Error importing numpy module.") exit(1) list_temp_images = os.listdir( os.path.join(os.getcwd(), template_image_dir_name)) colors.success("Template image list grabbed.") list_search_images = os.listdir( os.path.join(os.getcwd(), target_images_dir_name)) colors.success("Search image list grabbed ") print( "\n{}----------------------------------------------------------------------{}" .format(colors.red, colors.green)) print("\n\t {}:: Similar images found are :: \n".format(colors.lightgreen)) for path in list_search_images: checked = [] pos = 0 # Reading images to be matched one by one. src_image = cv2.imread(os.path.join(target_images_dir_name, path), cv2.IMREAD_COLOR) # Converting image to grayscale. src_gray = cv2.cvtColor(src_image, cv2.COLOR_BGR2GRAY) # Checking if all the templates are there in image or not. while pos < 12: template_path = list_temp_images[pos] template_image = cv2.imread( os.path.join(template_image_dir_name, template_path), cv2.IMREAD_GRAYSCALE) # Using cv2.matchTemplate() to check if template is found or not. result = cv2.matchTemplate(src_gray, template_image, cv2.TM_CCOEFF_NORMED) thresh = 0.9 loc = np.where(result > thresh) if str(loc[0]) == str(loc[1]): checked.append("False") break else: checked.append("True") pos += 1 if "False" not in checked: print("Image : {}".format(path))
def startAttack(self): try: colors.info('Ping of death attack on: {}' .format(self.target_ip)) colors.success('Ping of death attack started, press CTRL+C to ' 'stop...') t1 = time.time() self.attack() except KeyboardInterrupt: t2 = time.time() colors.success('Completed in time: {}'.format(t2-t1))
def restore(self): """ Restore the network services """ colors.info('[!] Restoring the network services...') command0 = 'airmon-ng stop {}'.format(self.monFace) command1 = 'service networking restart' command2 = 'service network-manager restart' self.quickExecute(command0) self.quickExecute(command1) self.quickExecute(command2) colors.success('Restored')
def collect(self): # 1. create partitions from rdds (partition_num = len(workers)) # 2. for every target_partition in partitions, find in partition_discover: # - if exists, fetch result from corresponding worker partition_discover = self.context.partition_discover results = [partition_discover.get_partition(partition.uuid) for partition in self.partitions] missing_index = [None if result is not None else i for i, result in enumerate(results)] missing_index = filter(lambda m: m is not None, missing_index) print info("collect", missing_index) # add to job server if missing job_server = self.context.job_server for i in range(self.partition_num): # - if doesn't exist, or previous try failed if results[i] is None: # - broadcast a `job` with partition uuid job_server.add(self.partitions[i]) # 3. keep discovering rdds until found the target_rdd while True: missing_index = [None if result is not None else i for i, result in enumerate(results)] missing_index = filter(lambda m: m is not None, missing_index) if len(missing_index) is 0: break # print 'keep discovering', missing_index for i in missing_index: partition = self.partitions[i] # try to fetch again results[i] = partition_discover.get_partition(partition.uuid) # if success this time if results[i] is not None: # 4. stop broadcast the `job` print success("Got " + str(i) + ":" + str(results[i])) job_server.remove(partition) gevent.sleep(0.1) # 5. retrieve result of the rdd result = [] for element in results: result += element return result
def typecheck(p): if type(p) == Declaration: init["__children__"] = [] init["__len__"] = 0 if "it" in init: del init["it"] p.checkType(init) print(colors.success("Type Check finished! The Symbol Table is:")) pprint.pprint(init) return init
def add_tag(self, tag): self.check_user_stories() succeed = [] failed = [] for user_story in self.user_stories['Items']: response = self.tp.post('UserStories', { 'Id': user_story['Id'], 'Tags': user_story['Tags'] + ', ' + tag }) if response.status_code == 200: succeed.append(colors.success("User story %d add tag '%s' success" % (user_story['Id'], tag))) else: failed.append(colors.error("User story %d add tag '%s' failed" % (user_story['Id'], tag))) return succeed, failed
def move_user_stories(self, to_state): self.check_user_stories() succeed = [] failed = [] for user_story in self.user_stories['Items']: response = self.tp.post('UserStories', { 'Id': user_story['Id'], 'EntityState': {'Id': self.tp.getStateCode(to_state)} }) if response.status_code == 200: succeed.append(colors.success("User story {} \"{}\" -> \"{}\" success".format( user_story['Id'], user_story['EntityState']['Name'], to_state ))) else: failed.append(colors.error("User story {} \"{}\" -> \"{}\" failed".format( user_story['Id'], user_story['EntityState']['Name'], to_state ))) return succeed, failed
__all__ = ["colors"] if __name__ == "__main__": import colors as console console.warning("Warning example") console.error("Error example") console.log("Log example") console.success("Success example") console.header("Header example") console.title("Title example")
# 3. run the target_partition try: # - if narrow_dependent: # - do it right away result = partition.get() except DependencyMissing: # - if wide_dependent: # - try search dep_partitions in rdds # - if exists, fetch result from corresponding worker not_exists_in_discover = lambda p: partition_discover.get_partition(p.uuid) is None missing_partitions = filter(not_exists_in_discover, partition.parent_list) assert len(missing_partitions) > 0 # otherwise there won't be exception # - if doesn't exist, or previous try failed # 1. for every partition of dep_rdd: for missing in missing_partitions: # 1. append to job server # 2. broadcast a `job` with partition uuid job_server.add(missing) # 2. append current job back to jobs job_discover.suspend_job(next_job) print warn('Wide dependency missing. Suspend the job.') # 3. DO NOT sleep(NETWORK_LATENCY * 2). it's better to it locally to avoid network transfer # 3. continue to next job continue print success('got result:'+partition.uuid) # 4. add result to the partition server partition_server.add(uuid=partition.uuid, result=result)