def ipInfo(self, datas): print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green('[INFO]'), 'Success') tb = pt.PrettyTable() tb.field_names = ['IP', 'Title', 'Port', 'Domain', 'Protocol', 'Host'] print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green('[INFO]'), 'Url信息:') for data in datas: tb.add_row(data) for keys in data: if "http" == keys or "https" == keys: Urls.url.append("{0}://{1}/".format(data[4], data[5])) print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green('[INFO]'), "{0}://{1}/".format(data[4], data[5])) print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green('[INFO]'), '全部信息:') print(tb) print()
def main(self): for rule in ruleDatas: cms = rule[0] rulesRegex = rule[2] if 'headers' == rule[1]: self.heads(rulesRegex, cms) elif 'cookie' == rule[1]: self.cookieInfo(rulesRegex, cms) else: self.bodys(rulesRegex, cms) webTitle = "" webServer = "" webCms = "None" for key in WebInfos: if 'server' in WebInfos[key][0]: webServer = WebInfos[key][0]['server'] else: webServer = "None" webTitles = re.findall(self.rex, WebInfos[key][1]) if webTitles: webTitle = webTitles[0] else: webTitle = "None" OutInfos[key] = webCms, webServer, WebInfos[key][2], webTitle logger.success("{} {} {} {}".format(mkPut.green(webServer), mkPut.yellow( WebInfos[key][2]), key, mkPut.blue(webTitle)))
def heads(self, rulesRegex, cms): webTitle = "" webServer = "" for key in list(WebInfos): if 'server' in WebInfos[key][0]: webServer = WebInfos[key][0]['server'] else: webServer = "None" webTitles = re.findall(self.rex, WebInfos[key][1]) if webTitles: webTitle = webTitles[0] else: webTitle = "None" for head in WebInfos[key][0]: resHeads = re.findall(rulesRegex, WebInfos[key][0][head]) if resHeads: print( mkPut.fuchsia("[{0}]".format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.red(cms), mkPut.green(webServer), mkPut.yellow(WebInfos[key][2]), key, mkPut.blue(webTitle)) OutInfos[key] = cms, webServer, WebInfos[key][2], webTitle WebInfos.pop(key) break
def main(self): for rule in ruleDatas: cms = rule[0] rulesRegex = rule[2] if 'headers' == rule[1]: self.heads(rulesRegex, cms) elif 'cookie' == rule[1]: self.cookieInfo(rulesRegex, cms) else: self.bodys(rulesRegex, cms) webTitle = "" webServer = "" for key in WebInfos: if 'server' in WebInfos[key][0]: webServer = WebInfos[key][0]['server'] else: webServer = "None" webTitles = re.findall(self.rex, WebInfos[key][1]) if webTitles: webTitle = webTitles[0] else: webTitle = "None" print( mkPut.fuchsia("[{0}]".format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green(webServer), mkPut.yellow(WebInfos[key][2]), key, mkPut.blue(webTitle)) OutInfos[key] = webServer, WebInfos[key][2], webTitle
def run(self): s = requests.Session() s.keep_alive = False s.headers = self.headers # s.mount("http://", HTTPAdapter(max_retries=3)) # s.mount("https://", HTTPAdapter(max_retries=3)) s.verify = False shiroCookie = {'rememberMe': '1'} s.cookies.update(shiroCookie) try: req = s.get(self.target, timeout=5) lock.acquire() webHeaders = req.headers webCodes = req.text WebInfos[self.target] = webHeaders, webCodes, req.status_code req.close() print(mkPut.fuchsia("[{0}]".format(time.strftime("%H:%M:%S", time.localtime( )))), mkPut.green("[INFO]"), "命中{0}个链接".format(len(WebInfos)), end='\r', flush=True) lock.release() except requests.exceptions.ReadTimeout: pass except requests.exceptions.ConnectionError: pass except requests.exceptions.ChunkedEncodingError: pass except KeyboardInterrupt: pass self.sem.release()
def mwebs(): threads = [] print(mkPut.fuchsia("[{0}]".format(time.strftime("%H:%M:%S", time.localtime()))), mkPut.green("[INFO]"), "共采集{0}个web链接".format(len(Urls.url))) print(mkPut.fuchsia("[{0}]".format(time.strftime( "%H:%M:%S", time.localtime()))), mkPut.green("[INFO]"), "获取网页信息中") sem = threading.Semaphore(threadNum) try: for url in Urls.url: sem.acquire() t = webInfo(url, sem) t.setDaemon(True) threads.append(t) t.start() for t in threads: t.join() except KeyboardInterrupt: pass print() print()
def run(self): keywordsBs = base64.b64encode(self.ip.encode('utf-8')) keywordsBs = keywordsBs.decode('utf-8') url = "https://fofa.so/api/v1/search/all?email={0}&key={1}&qbase64={2}&full=false&fields=ip,title,port,domain,protocol,host&size={3}".format( self.email, self.key, keywordsBs, fofaSize) try: req = requests.Session() req.keep_alive = False req.headers = self.headers req.mount("https://", HTTPAdapter(max_retries=10)) target = req.get(url, timeout=10) lock.acquire() print( mkPut.fuchsia('\n[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green('[INFO]'), '正在检测IP:', self.ip) print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green('[INFO]'), '正在通过API获取信息...') datas = json.loads(target.text) self.ipInfo(datas['results']) req.close() lock.release() except requests.exceptions.ReadTimeout: print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.red('[ERROR]'), '请求超时') except requests.exceptions.ConnectionError: print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.red('[ERROR]'), '网络超时') except json.decoder.JSONDecodeError: print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.red('[ERROR]'), '获取失败,请重试') lock.release() self.sem.release()
def bodys(self, rulesRegex, cms): webTitle = "" webServer = "" for key in list(WebInfos): if 'server' in WebInfos[key][0]: webServer = WebInfos[key][0]['server'] else: webServer = "None" webTitles = re.findall(self.rex, WebInfos[key][1]) if webTitles: webTitle = webTitles[0] else: webTitle = "None" resCodes = re.findall(rulesRegex, WebInfos[key][1]) if resCodes: logger.success("{} {} {} {} {}".format(mkPut.red(cms), mkPut.green( webServer), mkPut.yellow(WebInfos[key][2]), key, mkPut.blue(webTitle))) OutInfos[key] = cms, webServer, WebInfos[key][2], webTitle WebInfos.pop(key)
import logging from config.colors import mkPut from colorama import init class LoggingLevel: SUCCESS = 9 SYSINFO = 8 ERROR = 7 WARNING = 6 init(autoreset=True) logging.addLevelName(LoggingLevel.SUCCESS, mkPut.cyan("[+]")) logging.addLevelName(LoggingLevel.SYSINFO, mkPut.green("[INFO]")) logging.addLevelName(LoggingLevel.ERROR, mkPut.red("[ERROR]")) logging.addLevelName(LoggingLevel.WARNING, mkPut.yellow("[WARNING]")) LOGGER = logging.getLogger("GlassLog") formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s", datefmt=mkPut.fuchsia("[%H:%M:%S]")) LOGGER_HANDLER = logging.StreamHandler(sys.stdout) LOGGER_HANDLER.setFormatter(formatter) LOGGER.addHandler(LOGGER_HANDLER) LOGGER.setLevel(LoggingLevel.WARNING) class MY_LOGGER: def info(msg):