def init_port(port_choice): # port choice # file if os.path.isfile(port_choice): port_use_file(port_choice) # all port elif port_choice == cli_all_port: set_port_string("0-65535") for _ in range(0, 65536): ports_pool.append(str(_)) # other else: specify_ports = port_choice.split(',') for portchunk in specify_ports: # range portchunk = portchunk.strip() if "-" in portchunk: port_split = portchunk.split("-") if not (len(port_split) == 2 and port_split[0].isdigit() and port_split[1].isdigit() and int(port_split[0]) <= int(port_split[1])): exit(cool.red("[-] Port range error\n")) else: for _ in range(int(port_split[0]), int(port_split[1]) + 1): ports_pool.append(str(_)) # single or multi elif str(portchunk).isdigit() and 0 <= int(portchunk) <= 65535: ports_pool.append(portchunk) else: exit(cool.red("[-] Specify port error\n")) set_port_string(port_choice.strip()) if len(ports_pool) != 0: print(cool.orange("[+] Load: {0} ports".format(len(ports_pool)))) else: exit(cool.red("[-] Cannot find port"))
def init_target(target_choice): # target choice # file if os.path.isfile(target_choice): target_use_file(target_choice) # dir elif os.path.exists(target_choice): for _ in get_all_file(target_choice): target_use_file(_) # other else: specify_targets = target_choice.split(',') for targetschunk in specify_targets: if targetschunk.startswith("http"): targetschunk = ((targetschunk.strip()).split("/")[2]).split(":")[0] else: targetschunk = (targetschunk.strip()).split(":")[0] t_dic = {1: [], 2: [], 3: [], 4: []} # range if "-" in targetschunk: sc_chunk = targetschunk.split(".") if len(sc_chunk) != 4: exit(cool.red("[-] Targets Error\n")) else: for r in range(0, 4): if "-" in sc_chunk[r]: sc_chunk_split = sc_chunk[r].split("-") if not (len(sc_chunk_split) == 2 and sc_chunk_split[0].isdigit() and sc_chunk_split[1].isdigit() and int(sc_chunk_split[0]) <= int(sc_chunk_split[1]) and 0 <= int(sc_chunk_split[0]) <= 255 and 0 <= int(sc_chunk_split[1]) <= 255): exit(cool.red("[-] Targets range error\n")) else: if len(sc_chunk_split) == 1: t_dic[r + 1].append(sc_chunk[0]) for _ in range(int(sc_chunk_split[0]), int(sc_chunk_split[1]) + 1): t_dic[r + 1].append(_) else: if not (sc_chunk[r].isdigit() and 0 <= int(sc_chunk[r]) <= 255): exit(cool.red("[-] Specify error ip address\n")) t_dic[r + 1].append(sc_chunk[r]) for item in itertools.product(t_dic[1], t_dic[2], t_dic[3], t_dic[4]): targets_pool.append( "{0}.{1}.{2}.{3}".format(str(item[0]), str(item[1]), str(item[2]), str(item[3]))) # single or multi else: if domain_pattern.findall(targetschunk): domains_pool.append(targetschunk) set_global_parse_flag(True) elif ip_pattern.findall(targetschunk): targets_pool.append(targetschunk) else: print(cool.fuchsia("[!] Invalid target %s" % targetschunk)) if len(targets_pool) != 0 or len(domains_pool) != 0: print(cool.white("[+] Load {0} ip and {1} domain".format(len(targets_pool), len(domains_pool)))) else: exit(cool.red("[-] Cannot find target\n"))
def check_cmd_status(): try: test_cmd = scan_command + '0 127.0.0.1' cmd = subprocess.Popen(test_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if not ("0/tcp" in str(cmd.communicate())): exit(cool.red("[-] command %s error!\n" % test_cmd)) else: print(cool.white("[+] Test command sucess")) except: exit(cool.red("[-] Test command failed, Please ensure config.py nmap command is correct\n"))
def counter_enter(encodeflag, head, tail, *args): args = args[0] # counter lack file argument if len(args) == 2 and args[0] == tool_range[2] and args[ 1] in counter_cmd_range: exit(CRLF + cool.red("[-] {0} need specify the file path".format( tool_range[2]))) # counter error "vf" argument elif len(args) >= 2 and args[0] == tool_range[2] and args[ 1] not in counter_cmd_range: exit(CRLF + cool.red( "[-] Need {0}'s options, choose from '{1}' or '{2}' or '{3}'". format(tool_range[2], counter_cmd_range[0], counter_cmd_range[1], counter_cmd_range[2]))) # counter elif len(args) >= 3 and args[0] == tool_range[2] and args[ 1] in counter_cmd_range: if os.path.isfile(args[2]): # counter f file if len(args) == 3 and args[1] == just_save_counter: counter_operator(args[2], True, False, encodeflag, head, tail) # counter v file elif len(args) == 3 and args[1] == just_view_counter: counter_operator(args[2], False, True, encodeflag, head, tail) # counter fv file elif len(args) == 3 and args[1] == save_and_view: counter_operator(args[2], False, False, encodeflag, head, tail) # counter v file 100 elif len(args) == 4 and args[1] == just_view_counter and str( args[3]).isdigit(): counter_operator(args[2], False, True, encodeflag, head, tail, view_count=int(args[3])) # counter fv file 100 elif len(args) == 4 and args[1] == save_and_view and str( args[3]).isdigit(): counter_operator(args[2], False, False, encodeflag, head, tail, view_count=int(args[3])) else: exit(CRLF + cool.red("[-] Some unexpected input"))
def create_output_file(): if not os.path.exists(output_path): try: os.mkdir(output_path) print(cool.white("[+] Create output path success")) except: exit(cool.red("[-] Create output path: %s failed\n" % output_path))
def checkurl(urlike): site_pattern = re.compile( '^(?:(?:ht|f)tp(?:s?)\:\/\/|~\/|\/)?(?:\w+:\w+@)?((?:(?:[-\w\d{1-3}]+\.)+(?:com|org|net|' 'gov|mil|biz|info|mobi|name|aero|jobs|edu|co\.uk|ac\.uk|it|fr|tv|museum|asia|local|travel' '|[a-z]{2}))|((\b25[0-5]\b|\b[2][0-4][0-9]\b|\b[0-1]?[0-9]?[0-9]\b)(\.(\b25[0-5]\b|\b[2]' '[0-4][0-9]\b|\b[0-1]?[0-9]?[0-9]\b)){3}))(?::[\d]{1,5})?(?:(?:(?:\/(?:[-\w~!$+|.,=]|%' '[a-f\d]{2})+)+|\/)+|\?|#)?(?:(?:\?(?:[-\w~!$+|.,*:]|%[a-f\d{2}])+=?(?:[-\w~!$+|.,*:=]|%' '[a-f\d]{2})*)(?:&(?:[-\w~!$+|.,*:]|%[a-f\d{2}])+=?(?:[-\w~!$+|.,*:=]|%[a-f\d]{2})*)*)*' '(?:#(?:[-\w~!$ |\/.,*:;=]|%[a-f\d]{2})*)?$', re.IGNORECASE) try: if len(site_pattern.findall(str(urlike))[0][0]) > 0: return True else: exit("[-] Incorrect url/uri: {0}".format(cool.red(urlike))) except: exit("[-] Incorrect url/uri: {0}".format(cool.red(urlike)))
def get_extend_dic(rawlist, encodeflag='none', need_passcratch=False): prefix = EXTEND_prefix if rawlist == []: exit(CRLF + cool.red("[-] raw extend_enter file cannot be empty")) if need_passcratch: prefix = PASSCRAPER_prefix storepath = os.path.join( get_result_store_path(), "%s_%s_%s%s" % (prefix, get_buildtime(), encodeflag, filextension)) with open(storepath, "a") as f: try: for _ in extend_enter(rawlist): f.write(operator.get(encodeflag)(str(_) + CRLF)) except: traceback.print_exc() exit(CRLF + cool.red("[-] Some error")) finishprinter(finishcounter(storepath), storepath)
def checkurl(urlike): try: if not str(urlike).startswith('http'): return 'http://' + urlike.split('/')[0] else: return urlike except: exit("[-] Incorrect url/uri: {0}".format(cool.red(urlike)))
def getExtendDic(rawlist, encodeflag=""): if rawlist == []: exit(CRLF + cool.red("[-] raw extend file cannot be empty")) storepath = os.path.join( get_result_store_path(), "%s_%s_%s%s" % (EXTEND_prefix, get_buildtime(), encodeflag, filextension)) with open(storepath, 'w') as f: try: for _ in extend(rawlist): _ = str(_) if encodeflag == "": f.write(_ + CRLF) else: f.write(operator.get(encodeflag)(_ + CRLF)) except: exit(CRLF + cool.red("[-] File's character encoding maybe error")) finishprinter(finishcounter(storepath), storepath)
def confmatcher(confpath): configures = [] with open(confpath) as f: for item in f.readlines(): confline = item.strip() if len(confline) >= 1 and confline[0] == conf_annotator: pass else: matches = re.findall('(.*?)\[(.*?)\]\{(.*?)\}\<(.*?)\>([^[]*)', confline) for match in matches: for m in match: configures.append(m) if configures: if len(configures) // 5 > 10: exit(CRLF + cool.red('[-] Max support 10 parser')) else: return configures else: exit(CRLF + cool.red('[-] Match configuration file for nothing'))
def cleaner(*args): _ = "".join(args) if _ and os.path.isdir(_): shreder_dir(_) elif _ and os.path.isfile(_): shreder_file(_) elif _ and _.upper() in prefix_range: for filename in os.listdir(get_result_store_path()): if _.upper() in str(filename[0:8]).upper(): shreder_file(os.path.join(get_result_store_path(), filename)) else: exit(CRLF + cool.red("[-] invalid shredder path_or_dir arguments"))
def get_conf_dic(minlength, maxlength, objflag, encodeflag, head, tail): diclist = [] for i in range_compatible(minlength, maxlength + 1): for item in itertools.product(objflag, repeat=i): if encodeflag in operator.keys(): diclist.append( operator.get(encodeflag)(head + "".join(item) + tail)) else: exit(CRLF + cool.red('[-] wrong encode type')) # items count check countchecker(-1, len(diclist)) return diclist
def combiner_enter(directory=os.path.abspath(sys.argv[0]), need_uniqifer=False): if not os.path.isdir(os.path.abspath(directory)): exit(CRLF + cool.red("[-] path: {} don't exists".format(directory))) filepaths = [] combine_list = [] storepath = os.path.join(get_result_store_path(), "%s_%s%s" % (COMBINER_prefix, get_buildtime(), filextension)) for rootpath, subdirsname, filenames in os.walk(directory): filepaths.extend([os.path.abspath(os.path.join(rootpath, _)) for _ in filenames]) if len(filepaths) > 0: for _ in filepaths: if mimetypes.guess_type(_)[0] == 'text/plain': combine_list.append(_) try: with open(storepath, "a") as f: for onefile in combine_list: with open(onefile, 'r') as tf: f.write(tf.read()) if not need_uniqifer: finishprinter(finishcounter(storepath), storepath) else: uniqifer_enter(storepath, from_combiner=True) except Exception as ex: print(CRLF + cool.red("[-] Combine file failed, Looking: ")) exit(CRLF + traceback.print_exc())
def shreder_dir(directory, rewritecounts=dir_rewrite_count): filepaths = [] dirpaths = [] print(CRLF + "[+] Shredding '%s' ..." % cool.orange(directory)) try: newdirectoryname = os.path.join( os.path.dirname(directory), "".join( chr(random.randint(97, 122)) for _ in range_compatible(1, 6))) os.rename(directory, newdirectoryname) directory = newdirectoryname except: traceback.print_exc() exit(CRLF + cool.red( "[-] Error: cannot rename root directory name, Please check permissions" )) for rootpath, subdirsname, filenames in os.walk(directory): # get all directories dirpaths.extend( [os.path.abspath(os.path.join(rootpath, _)) for _ in subdirsname]) # get all absolute file path filepaths.extend( [os.path.abspath(os.path.join(rootpath, _)) for _ in filenames]) for filepath in filepaths: try: os.chmod(filepath, stat.S_IREAD | stat.S_IWRITE) except: pass for _ in range_compatible(0, rewritecounts): print("[+] Rewrite count: %d" % (_ + 1)) for filepath in filepaths: rewrite(filepath) for filepath in filepaths: truncating(filepath) for filepath in filepaths: renamefile(filepath) renamedir(dirpaths) os.chdir(os.path.join(directory, "..")) try: shutil.rmtree(directory) except OSError as ex: print( cool.fuchsia("[!] Error: Cannot removing directory: '%s' " % directory)) traceback.print_exc() print(cool.orange("[+] Completely!"))
def shredder_enter(*args): fnum = 0 _ = "".join(args) if _ and os.path.isdir(_): shreder_dir(_) elif _ and os.path.isfile(_): shreder_file(_) elif _ and _.upper() in prefix_range: for filename in os.listdir(get_result_store_path()): if _.upper() in str(filename[0:10]).upper(): fnum += 1 shreder_file(os.path.join(get_result_store_path(), filename)) if fnum == 0: exit(CRLF + cool.orange("[+] prefix %s files has been clean" % _.upper())) else: exit(CRLF + cool.red("[-] invalid shredder path_or_dir arguments"))
def scratchword(siteList): scrabbler = "scrabbler" resluts = [] # Create an empty list for generation logic. y_arr = [] for site in siteList: try: site = site.strip() response = urlopen(site) response.addheaders = \ [('User-agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:35.0) Gecko/20100101 Firefox/35.0')] # if you don't decode('utf-8'), it will don't work both in python2 and python3 x = stripHTMLTags(response.read().decode('utf-8') + site) # Replace junk found in our response x = x.replace('\n', ' ') x = x.replace(',', ' ') x = x.replace('.', ' ') x = x.replace('/', ' ') x = re.sub('[^A-Za-z0-9]+', ' ', x) x_arr = x.split(' ') for y in x_arr: y = y.strip() if y and (len(y) >= 5): if ((y[0] == '2') and (y[1] == 'F')) or ( (y[0] == '2') and (y[1] == '3')) or ((y[0] == '3') and (y[1] == 'F')) or ((y[0] == '3') and (y[1] == 'D')): y = y[2:] if len(y) <= 8 and y.lower() not in passcratch_white_list: y_arr.append(y) elif 9 <= len( y ) <= 25 and True if scrabbler in passcratch_white_list and y not in scrabbler else False: y_arr.append(y) except Exception: print(CRLF + cool.red("[-] process abort, Looking: ") + CRLF) traceback.print_exc() exit(CRLF) y_arr_unique = OrderedDict.fromkeys(y_arr).keys() for yy in y_arr_unique: if yy.strip().isdigit(): pass else: resluts.append(yy.strip()) return resluts
from plugins.idcard import get_idcard_post from plugins.extend import get_extend_dic from plugins.passcraper import get_passcratch_dic from lib.text import pydictor_ascii_text as pydictor_art_text from lib.data import set_result_store_path, get_result_store_path, set_conf_path, get_conf_path, tool_range, plug_range,\ CRLF, scrabble_site_path, startime if __name__ == '__main__': print("{}".format(cool.green(pydictor_art_text))) args = parse_args() try: if not os.path.exists(get_result_store_path()): os.mkdir(get_result_store_path()) except IOError: exit(CRLF + cool.red("[-] Cannot create %s " % get_result_store_path())) if args.output: if os.path.exists(args.output): tmppath = os.path.abspath(args.output) else: try: os.mkdir(args.output) tmppath = os.path.abspath(args.output) except IOError: tmppath = "" print(CRLF + cool.red("[-] Cannot create %s, default %s" % (args.output, get_result_store_path()))) if os.path.isdir(tmppath): set_result_store_path(tmppath) if args.base: get_base_dic(args.len[0], args.len[1], args.base, args.encode,
'abc123', '123456789', '11111', '1234567', 'iloveyou', 'adobe123', '123123', 'admin', '1234567890', 'letmein', 'photoshop', '1234', 'monkey', 'shadow', 'sunshine', '12345', 'password1', 'princess', 'azerty', 'trustno1', '000000' ) # IT youth weak password ITpass = ( '123456789', '12345678', '11111111', 'dearbook', '00000000', '123123123', '1234567890', '88888888', '111111111', '147258369', '987654321', 'aaaaaaaa', '1111111111', '66666666', 'a123456789', '11223344', '1qaz2wsx', 'xiazhili', '789456123', 'qqqqqqqq', '000000000', '3.1415926', '3_1415926' ) weakpass101_list = [] weakpass101 = os.path.join(build_in_dict_path, "Weakpass101") if os.path.isfile(weakpass101): try: with open(weakpass101) as f: for _ in f.readlines(): weakpass101_list.append(_) except IOError as ex: exit(cool.red("[-] Weakpass101.txt operator error") + CRLF + ex.message) joinall = top_20_kb + other_char_kb + usual_weak_pass + worst_25_pass + ITpass + tuple(weakpass101_list) weak_pass_set = set(joinall)
import traceback from collections import OrderedDict from lib.fun import cool from lib.fun import py_ver_egt_3 from plugins.extend import get_extend_dic from lib.data import scrabble_site_path, passcratch_white_list, CRLF, annotator # in python3: urllib + urilib2 -> urllib, and # urllib2.urlopen() -> urllib.request.urlopen(), urllib2.Request() -> urllib.request.Request() try: if py_ver_egt_3(): from urllib.request import urlopen else: from urllib2 import urlopen except ImportError: print(cool.red('[-] can not import urllib or urllib2 module:') + CRLF) exit(traceback.print_exc()) def stripHTMLTags(html): text = html rules = [ { r'>\s+': '>' }, # Remove spaces after a tag opens or closes. { r'\s+': ' ' }, # Replace consecutive spaces. { r'\s*<br\s*/?>\s*': '\n' }, # Newline after a <br>.
from core.CONF import build_conf_dic from core.SEDB import SEDB from plugins.idcard import getIDCardPost from plugins.extend import getExtendDic from lib.fun import cool from lib.tool import uniqify_enter, cleaner if __name__ == '__main__': print("{}".format(cool.green(pydictor_art_text))) args = parse_args() try: if not os.path.exists(get_result_store_path()): os.mkdir(get_result_store_path()) except IOError: exit(CRLF + cool.red("[-] Cannot create %s " % get_result_store_path())) if args.output: if os.path.exists(args.output): tmppath = os.path.abspath(args.output) else: try: os.mkdir(args.output) tmppath = os.path.abspath(args.output) except IOError: tmppath = "" print(CRLF + cool.red("[-] Cannot create %s, default %s" % (args.output, get_result_store_path()))) if os.path.isdir(tmppath): set_result_store_path(tmppath) if args.type:
result_handler(arg.analyse, result_file_path) if os.path.getsize(result_file_path) > 0: print( cool.orange( "\n[+] Store in: {0}".format(result_file_path))) else: try: os.remove(result_file_path) except: pass print(cool.fuchsia("\n[!] analyse output nothing")) print( cool.orange("[+] Cost : {0:.6} seconds".format(time.time() - start_time))) else: exit(cool.red("[-] Directory:%s don't exists" % arg.analyse)) elif arg.select[0] != '': if os.path.isfile(arg.select[0]): if str(arg.select[1]).isdigit(): select_by_port(arg.select[0], arg.select[1]) else: select_by_service(arg.select[0], arg.select[1]) else: exit(cool.red("[-] File:%s don't exists" % arg.select[0])) else: if arg.rmself != 'default': remself = 'true' else: remself = 'false' try: init(target_choice=arg.target,