def main(): PATHS.ROOT_PATH = moudle_path() print(PATHS.ROOT_PATH) PATHS.PLUGIN_PATH = os.path.join(PATHS.ROOT_PATH, "pocs") print(PATHS.PLUGIN_PATH) PATHS.OUTPUT_PATH = os.path.join(PATHS.ROOT_PATH, "output") print(PATHS.OUTPUT_PATH) PATHS.DATA_PATH = os.path.join(PATHS.ROOT_PATH, "data") print(PATHS.DATA_PATH) patch_all() logger.info("Hello W12SCAN !") # domain域名整理(统一格式:无论是域名还是二级目录,右边没有 /),ip cidr模式识别,ip整理 # 访问redis获取目标 schedular.start() # 启动任务分发调度器 if DEBUG: func_target = debug_get else: func_target = redis_get # 与WEB的通信线程 node = threading.Thread(target=node_register) node.start() # 队列下发线程 t = threading.Thread(target=func_target, name='LoopThread') t.start() try: schedular.run() except KeyboardInterrupt: logger.info("User exit")
def portScan(): notScanned = [] for _file in os.listdir(paths.PORT_PATH): try: _port = int(_file) if portExploits.PORTS.has_key(_port): commands = portExploits.PORTS[_port] for command in commands: if "nmap" in command: command = command.replace("$TARGET", "-iL " + os.path.join(paths.PORT_PATH, _file)) logger.info(command) os.system(command) elif "msfconsole" in command: for ip in open(os.path.join(paths.PORT_PATH, _file)).readlines(): command = ( command.replace("$TARGET", ip.strip()) .replace("$USER", paths.USR_LIST) .replace("$PASS", paths.PWD_LIST) ) logger.info(command) os.system(command) # TODO 验证其它命令是否可用,待添加 else: pass else: notScanned.append(_port) # TODO except Exception, e: print e continue
def load_remote_poc(): filename = os.path.join(PATHS.DATA_PATH, "api.json") api_lock = os.path.join(PATHS.DATA_PATH, "api.lock") # 每隔10天更新一次api if not os.path.exists(api_lock): with open(api_lock, "w") as f: f.write(str(time.time())) with open(api_lock) as f: last_time = float(f.read()) logger.debug("api last time:{}".format(last_time)) if time.time() - last_time > 60 * 60 * 24 * 10: with open(api_lock, "w") as f: f.write(str(time.time())) logger.info("update airbug api...") _middle = "/master" _suffix = "/API.json" _profix = WEB_REPOSITORY.replace("github.com", "raw.githubusercontent.com") _api = _profix + _middle + _suffix r = requests.get(_api) datas = json.loads(r.text, encoding='utf-8') for data in datas: data["webfile"] = _profix + _middle + data["filepath"] with open(filename, "w") as f: json.dump(datas, f) with open(filename) as f: datas = json.load(f) return datas
def _init_plugins(): # 加载所有插件 _plugins = [] for root, dirs, files in os.walk(PATH['plugins']): files = filter(lambda x: not x.startswith("__") and x.endswith(".py"), files) for _ in files: if len(INCLUDE_PLUGINS) == 1 and INCLUDE_PLUGINS[0] == 'all': pass else: if "loader.py" not in INCLUDE_PLUGINS: INCLUDE_PLUGINS.append("loader.py") if _ not in INCLUDE_PLUGINS: continue if _ in EXCLUDE_PLUGINS: continue filename = os.path.join(root, _) mod = load_file_to_module(filename) try: mod = mod.W13SCAN() getattr(mod, 'name', 'unknown plugin') plugin = os.path.splitext(_)[0] plugin_type = os.path.split(root)[1] if getattr(mod, 'type', None) is None: setattr(mod, 'type', plugin_type) KB["registered"][plugin] = mod except AttributeError: logger.error('Filename:{} not class "{}"'.format(_, 'W13SCAN')) logger.info('Load plugin:{}'.format(len(KB["registered"])))
def login(username, password): _, u_field, _ = getFormField() u_field.clear() u_field.send_keys(username) u_field.submit() newFields = getFormField() if newFields is not None and isWebShellLoginPage(newFields): newPage = getPage(browser.current_url) TARGET.PAGE.insert(0, newPage) passwordField = newFields[-1] passwordField.clear() passwordField.send_keys(password) passwordField.submit() TARGET.PASSWORD_TESTED = True status = verifyAccount() infoMsg = "Account => %s : %s (%s)" % (username, password, status) logger.info(infoMsg) if status == STATUS.OK: TARGET.CREDENTIALS.append((username, password)) TARGET.PAGE.pop(0) browser.delete_all_cookies() browser.get(TARGET.URL)
def portScan(): notScanned = [] for _file in os.listdir(paths.PORT_PATH): try: _port = int(_file) if portExploits.PORTS.has_key(_port): commands = portExploits.PORTS[_port] for command in commands: if 'nmap' in command: command = command.replace( '$TARGET', '-iL ' + os.path.join(paths.PORT_PATH, _file)) logger.info(command) os.system(command) elif 'msfconsole' in command: for ip in open(os.path.join(paths.PORT_PATH, _file)).readlines(): command = command.replace('$TARGET', ip.strip()) \ .replace('$USER', paths.USR_LIST) \ .replace('$PASS', paths.PWD_LIST) logger.info(command) os.system(command) # TODO 验证其它命令是否可用,待添加 else: pass else: notScanned.append(_port) # TODO except Exception, e: print e continue
def method_a(): logger.info("Try to Clone straightly") git_dir = os.path.join(paths.GITHACK_DIST_TARGET_PATH, ".git") if os.path.exists(git_dir): logger.warning("[Skip][First Try] %s already exists." % (git_dir)) return valid_git_repo() return clone()
def clone_from_cache(): logger.info("Cache files") refresh_files() readorwget("COMMIT_EDITMSG") readorwget("ORIG_HEAD") readorwget("description") readorwget("info/exclude") readorwget("FETCH_HEAD") readorwget("refs/heads/master") readorwget("refs/remote/master") refs = readorwget("HEAD")[5:-1] readorwget("index") readorwget("logs/HEAD", True) HEAD_HASH = readorwget(refs) readorwget("logs/refs/heads/%s" % (refs.split("/")[-1])) if HEAD_HASH: cache_commits(HEAD_HASH.replace("\n", "")) readorwget("logs/refs/remote/master") readorwget("logs/refs/stash") # 下载 stash STASH_HASH = readorwget("refs/stash") if STASH_HASH: cache_commits(STASH_HASH.replace("\n", "")) cache_objects()
def main(): PATHS.ROOT_PATH = module_path() PATHS.PLUGIN_PATH = os.path.join(PATHS.ROOT_PATH, "pocs") PATHS.OUTPUT_PATH = os.path.join(PATHS.ROOT_PATH, "output") PATHS.DATA_PATH = os.path.join(PATHS.ROOT_PATH, "data") patch_all() logger.info("Hello W12SCAN !") # domain域名整理(统一格式:无论是域名还是二级目录,右边没有 /),ip cidr模式识别,ip整理 # 访问redis获取目标 def redis_get(): list_name = "w12scan_scanned" while 1: target = redis_con.blpop(list_name)[1] scheduler.put_target(target) # redis_get() def debug_get(): target = "http://stun.tuniu.com" scheduler.put_target(target) def node_register(): first_blood = True while 1: if first_blood: dd = { "last_time": time.time(), "tasks": 0, "running": 0, "finished": 0 } redis_con.hmset(NODE_NAME, dd) first_blood = False else: redis_con.hset(NODE_NAME, "last_time", time.time()) time.sleep(50 * 5) scheduler = Schedular(threadnum=THREAD_NUM) scheduler.start() # 启动任务分发调度器 if DEBUG: func_target = debug_get else: func_target = redis_get # 与WEB的通信线程 node = threading.Thread(target=node_register) node.start() # 队列下发线程 t = threading.Thread(target=func_target, name='LoopThread') t.start() try: scheduler.run() except KeyboardInterrupt: logger.info("User exit")
def decorated(*args): wrapped = errormanager(func) wrapped(*args) INFO_ACCOUNT.append(len(TARGET.CREDENTIALS)) if len(INFO_ACCOUNT) == 2: if INFO_ACCOUNT[1] > INFO_ACCOUNT[0]: if SETTING.SHOW_PROMPT: infoMsg = "[?] Account valid? (Y/n)> " jawaban = raw_input(infoMsg).lower() if jawaban.startswith("n"): # hapus kredensial TARGET.CREDENTIALS.pop(-1) INFO_ACCOUNT.pop(0) if SETTING.MAX_CREDENTIAL is not None: credType = "account" if len(args) != 1 else "password" if len(TARGET.CREDENTIALS) == SETTING.MAX_CREDENTIAL: infoMsg = "The '--max-cred' option is used. " infoMsg += "the process of finding an %s has reached the limit. " % repr( credType) infoMsg += "try with a value greater than '%d' (e.g. %d)" infoMsg %= (SETTING.MAX_CREDENTIAL, SETTING.MAX_CREDENTIAL * 2) logger.info(infoMsg) raise BrutemapStopBruteForceException
def __init__(self, threadnum=1): self.queue = Queue() self.threadNum = threadnum self.lock = threading.Lock() self.cache_ips = [] # IP缓冲池 self.cache_domains = [] # 域名缓冲池 logger.info("Start number of threading {}".format(self.threadNum))
def init(): logger.info("Initialize Git") process = subprocess.Popen( "git init %s" % (paths.GITHACK_DIST_TARGET_PATH), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() if stderr: logger.error("Initialize Git Error: %s" % (stderr))
def method_c(): logger.info("Try to clone with Cache") git_dir = os.path.join(paths.GITHACK_DIST_TARGET_PATH, ".git") if not os.path.exists(git_dir): init() clone_from_cache() if not valid_git_repo(): logger.warning("Clone With Cache end. But missed some files.") return True
def run_threads(num_threads, thread_function, args: tuple = ()): threads = [] KB["continue"] = True KB["console_width"] = getTerminalSize() KB['start_time'] = time.time() KB['finished'] = 0 KB["lock"] = threading.Lock() KB["result"] = 0 KB["running"] = 0 try: info_msg = "Staring {0} threads".format(num_threads) logger.info(info_msg) # Start the threads for num_threads in range(num_threads): thread = threading.Thread(target=exception_handled_function, name=str(num_threads), args=(thread_function, args)) thread.setDaemon(True) try: thread.start() except Exception as ex: err_msg = "error occurred while starting new thread ('{0}')".format( str(ex)) logger.critical(err_msg) break threads.append(thread) # And wait for them to all finish alive = True while alive: alive = False for thread in threads: if thread.isAlive(): alive = True time.sleep(0.1) except KeyboardInterrupt as ex: KB['continue'] = False if num_threads > 1: logger.info("waiting for threads to finish{0}".format( " (Ctrl+C was pressed)" if isinstance(ex, KeyboardInterrupt ) else "")) try: while threading.activeCount() > 1: pass except KeyboardInterrupt: raise except Exception as ex: logger.error("thread {0}: {1}".format( threading.currentThread().getName(), str(ex))) traceback.print_exc() finally: Share.dataToStdout('\n')
def clone_pack(): logger.info("Clone pack data.") packdata = readorwget("objects/info/packs") if packdata: packs = re.findall('P pack-([a-z0-9]{40}).pack\n', packdata) for pack in packs: readorwget("objects/pack/pack-%s.idx" % (pack)) readorwget("objects/pack/pack-%s.pack" % (pack)) logger.info("Clone pack data end.")
def readorwget(filename, refresh=False): filepath = os.path.join(paths.GITHACK_DIST_TARGET_GIT_PATH, filename) if refresh or not os.path.exists(filepath): wget(filename) else: if DEBUG: logger.info("[Skip] File %s already exists. " % filename) if not os.path.exists(filepath): return None return readFile(filepath)
def valid_git_repo(): logger.info("Valid Repository") process = subprocess.Popen( "cd %s && git reset" % (paths.GITHACK_DIST_TARGET_PATH), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() if stderr: logger.info("Valid Repository Fail") return False logger.success("Valid Repository Success") return True
def checkdepends(): logger.info("Check Depends") process = subprocess.Popen("git --version", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() if stderr: logger.error(DEPENDS) sys.exit(1) logger.success("Check depends end")
def __init__(self, server_addr=('', 8788), request_handler_class=ProxyHandle, bind_and_activate=True, https=True): HTTPServer.__init__(self, server_addr, request_handler_class, bind_and_activate) logger.info('HTTPServer is running at address( %s , %d )......' % (server_addr[0], server_addr[1])) self.req_plugs = [] self.ca = CAAuth(ca_file="ca.pem", cert_file='ca.crt') self.https = https
def clone(): logger.info("Clone") cmd = "git clone %s %s" % (target.TARGET_GIT_URL, paths.GITHACK_DIST_TARGET_PATH) # process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # stdout, stderr = process.communicate() # process.wait() ret = os.system(cmd) if ret: mkdir_p(paths.GITHACK_DIST_TARGET_PATH) logger.warning("Clone Error") return False return True
def printStatus(start=True): """ Mencetak pesan status jika brutemap sedang berjalan atau tidak """ msg = "( %s ) %s at %s" status = "starting" if start else "die" msg %= (colored(HOMEPAGE, "green", attrs=["bold", "underline"]), status, time.strftime("%X")) logger.info(msg) if not start: tryCloseWebDriver()
def __init__(self): ''' 初始化 self.url 根url self.deep 爬取深度 self.db 数据库操作类 self._thread 线程池 ''' logger.info('init control class') self.url = conf['url'] self.deep = conf['deep'] self.db = operate['db'] self._thread = ThreadPool(conf['thread'], self.get_html)
def creab_table(self): ''' 建表 固定表名/字段 固定字段为id(int), html(text), url(text), deep(int), keyword(text) ''' cb_sql = "CREATE TABLE spider (id INTEGER PRIMARY KEY autoincrement, \ html text, " \ " url text, " \ " deep INTEGER, " \ " keyword text)" self.cur.execute(cb_sql) self.conn.commit() logger.info('Create spider table suc!')
def login(password): _, _, p_field = getFormField() p_field.clear() p_field.send_keys(password) p_field.submit() status = verifyAccount() infoMsg = "Password => %s (%s)" % (password, status) logger.info(infoMsg) if status == STATUS.OK: TARGET.CREDENTIALS.append((password, )) browser.delete_all_cookies() raise BrutemapStopBruteForceException
def clone_from_cache(): logger.info("Cache files") refresh_files() readorwget("COMMIT_EDITMSG") readorwget("info/exclude") readorwget("FETCH_HEAD") readorwget("/refs/heads/master") refs = readorwget("HEAD")[5:-1] readorwget("index") readorwget("logs/HEAD", True) HEAD_HASH = readorwget(refs) readorwget("logs/refs/heads/%s" % (refs.split("/")[-1])) cache_commits(HEAD_HASH.replace("\n", "")) cache_objects()
def login(username, password): handler = SETTING.HTTP_AUTH_HANDLER(username, password) wrapped = errormanager(requests.get, False) r = wrapped(TARGET.URL, auth=handler) if r.status_code == 200: status = STATUS.OK else: status = STATUS.NO infoMsg = "Account => %s : %s (%s)" % (username, password, status) logger.info(infoMsg) if status == STATUS.OK: TARGET.CREDENTIALS.append((username, password)) raise BrutemapStopBruteForceException
def setPaths(url): logger.info("Set Paths") target.TARGET_GIT_URL = url if (url[-1] == "/") else url + "/" target.TARGET_DIST = urlparse.urlparse( target.TARGET_GIT_URL).netloc.replace(':', '_') logger.info("Target Url: %s" % (target.TARGET_GIT_URL)) paths.GITHACK_DIST_ROOT_PATH = os.path.join(paths.GITHACK_ROOT_PATH, "dist") paths.GITHACK_DATA_PATH = os.path.join(paths.GITHACK_ROOT_PATH, "data") paths.USER_AGENTS = os.path.join(paths.GITHACK_DATA_PATH, "user-agents.txt") paths.GITHACK_DIST_TARGET_PATH = os.path.join(paths.GITHACK_DIST_ROOT_PATH, target.TARGET_DIST) paths.GITHACK_DIST_TARGET_GIT_PATH = os.path.join( paths.GITHACK_DIST_TARGET_PATH, ".git")
def login(username, password): form, u_field, p_field = getFormField() u_field.clear() u_field.send_keys(username) p_field.clear() p_field.send_keys(password) form.submit() status = verifyAccount() infoMsg = "Account => %s : %s (%s)" % (username, password, status) logger.info(infoMsg) if status == STATUS.OK: TARGET.CREDENTIALS.append((username, password)) browser.delete_all_cookies() browser.get(TARGET.URL)
def checkTarget(url): """ Memeriksa jika target adalah target yang didukung. """ infoMsg = "Checking target..." logger.info(infoMsg) response = None try: wrapped = errormanager(requests.get) response = wrapped(url) except Exception, e: logger.exception(e) raise BrutemapSkipTargetException
def isdirlist(): keywords = [ "To Parent Directory", "Index of /", "Directory Listing For /", "[转到父目录]", "objects/", ] data = request_data(target.TARGET_GIT_URL) if data: for key in keywords: if key in data: logger.info("%s is support Directory Listing" % target.TARGET_GIT_URL) return True logger.info("%s is not support Directory Listing" % target.TARGET_GIT_URL) return False
def _init_plugins(): # 加载所有插件 _plugins = [] for root, dirs, files in os.walk(PATH['plugins']): files = filter(lambda x: not x.startswith("__") and x.endswith(".py"), files) for _ in files: if _ in EXCLUDE_PLUGINS: continue filename = os.path.join(PATH['plugins'], _) mod = load_file_to_module(filename) try: mod = mod.W13SCAN() KB["registered"][_] = mod except AttributeError: logger.error('Filename:{} not class "{}"'.format(_, 'W13SCAN')) logger.info('Load plugin:{}'.format(len(KB["registered"])))
def get_html(self, url): ''' :return: {'type': url返回内容类型, 一般js和html页面中会含有url, 暂不处理js} ''' header = { "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:46.0) Gecko/20100101 Firefox/46.0" } result = {"type": None} logger.info("request a url: %s" %url) try: req = requests.get(url, headers=header, timeout=4) except Exception, e: try: logger.error("%s @@ requests fail and the info is %s" %(url.encode('utf-8'), e)) except: print url print isinstance(url, unicode) return result
def DNSzoneTransfer(): path = os.path.join(paths.OUTPUT_PATH, "DNS-zoneTransfer.txt") logger.info("Target domain: " + conf.TARGET) if zonetransfer_poc(conf.TARGET, path): logger.warning("Vulnerable!") logger.info("Save results to %s" % path) else: logger.info("Not vulnerable.")
def run(self): ''' 主控方法 :return: None ''' logger.info("start spider, and the spider deep is " + str(self.deep)) self.url_group = [] self.r_group = [] self.recursion_deep() logger.info("The spider page total number is : " + str(len(self.url_group))) self._thread._del() logger.info("Spider OVER!!")
def recursion_deep(self): ''' 根据深度值进行爬取 operate['db'].deep 当前深度 self.deep 需要爬取的深度 :return: ''' if operate['db'].deep == 0: logger.info("spidering deep == 0 page") r = self.get_html(self.url) try: html = r['html'] except: print "url input error!" logger.error("url error(%s)" %(self.url)) return operate['db'].insert(html, self.url) self.r_group.append(r) operate['db'].deep += 1 self.recursion_deep() elif operate['db'].deep > self.deep: logger.info('spider deep over!') return else: logger.info("spidering deep = %s" %operate['db'].deep) tmp = [] url_group = [] # 从上一个deep爬取的页面中提取url for x in self.r_group: html = x['html'] url_group.extend(self.find_url(html)) logger.debug("from %s page find %s url" %(x['url'], len(url_group))) # 当页面没匹配出任何url, 则结束退出 if url_group == []: return # 把提取出来的url丢入线程池中 result_list = self._thread.my_map(url_group) for y in xrange(len(result_list)): if result_list[y]['type'] == 'html': tmp.append(result_list[y]) else: logger.debug("delete the not html page (%s)" % url_group[y]) self.r_group = tmp operate['db'].deep += 1 self.recursion_deep()