def todaySay(): # fileTamp = os.path.getctime(Paths.config[0] + 'today.json') fileTamp = os.stat(Paths.config[0]+'today.json').st_mtime timeArray = time.localtime(fileTamp) fileTime = time.strftime("%Y%m%d", timeArray) osTime = time.strftime("%Y%m%d", time.localtime()) if fileTime != osTime: try: req = requests.get( "https://rest.shanbay.com/api/v2/quote/quotes/today/", timeout=3) with open(Paths.config[0] + 'today.json', 'w', encoding="utf-8") as f: f.write(req.text) except requests.exceptions.ConnectionError: print(mkPut.fuchsia("[{0}]".format(time.strftime( "%H:%M:%S", time.localtime()))), mkPut.yellow("[warning]"), "更新每日一说超时") except requests.exceptions.ReadTimeout: print(mkPut.fuchsia("[{0}]".format(time.strftime( "%H:%M:%S", time.localtime()))), mkPut.yellow("[warning]"), "更新每日一说超时") # with open(Paths.config[0]+'today.json', 'w', encoding="utf-8") as f: # f.write(req.text) with open(Paths.config[0]+'today.json', 'r', encoding="utf-8") as f: today = json.load(f) content = today['data']['content'] translation = today['data']['translation'] author = "--- {0}".format(today['data']['author']) todaySays = ''' {0} {1} \t\t\t\t\t\t{2} '''.format(content, translation, author) return todaySays
def ipInfo(self, datas): print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green('[INFO]'), 'Success') tb = pt.PrettyTable() tb.field_names = ['IP', 'Title', 'Port', 'Domain', 'Protocol', 'Host'] print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green('[INFO]'), 'Url信息:') for data in datas: tb.add_row(data) for keys in data: if "http" == keys or "https" == keys: Urls.url.append("{0}://{1}/".format(data[4], data[5])) print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green('[INFO]'), "{0}://{1}/".format(data[4], data[5])) print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green('[INFO]'), '全部信息:') print(tb) print()
def heads(self, rulesRegex, cms): webTitle = "" webServer = "" for key in list(WebInfos): if 'server' in WebInfos[key][0]: webServer = WebInfos[key][0]['server'] else: webServer = "None" webTitles = re.findall(self.rex, WebInfos[key][1]) if webTitles: webTitle = webTitles[0] else: webTitle = "None" for head in WebInfos[key][0]: resHeads = re.findall(rulesRegex, WebInfos[key][0][head]) if resHeads: print( mkPut.fuchsia("[{0}]".format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.red(cms), mkPut.green(webServer), mkPut.yellow(WebInfos[key][2]), key, mkPut.blue(webTitle)) OutInfos[key] = cms, webServer, WebInfos[key][2], webTitle WebInfos.pop(key) break
def main(self): for rule in ruleDatas: cms = rule[0] rulesRegex = rule[2] if 'headers' == rule[1]: self.heads(rulesRegex, cms) elif 'cookie' == rule[1]: self.cookieInfo(rulesRegex, cms) else: self.bodys(rulesRegex, cms) webTitle = "" webServer = "" for key in WebInfos: if 'server' in WebInfos[key][0]: webServer = WebInfos[key][0]['server'] else: webServer = "None" webTitles = re.findall(self.rex, WebInfos[key][1]) if webTitles: webTitle = webTitles[0] else: webTitle = "None" print( mkPut.fuchsia("[{0}]".format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green(webServer), mkPut.yellow(WebInfos[key][2]), key, mkPut.blue(webTitle)) OutInfos[key] = webServer, WebInfos[key][2], webTitle
def fmain(ips): if fofaApi['email'] and fofaApi['key']: pass else: print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.yellow('[warning]'), '请修改配置文件{0}中fofaApi为您的API地址'.format(Paths.config[0] + 'config.py')) exit(0) threads = [] sem = threading.Semaphore(threadNum) try: for ip in ips: if '/' in ip: ip = 'ip="{0}"'.format(ip) sem.acquire() t = Fofa(ip, sem) t.setDaemon(True) threads.append(t) t.start() for t in threads: t.join() except KeyboardInterrupt: pass
def run(self): s = requests.Session() s.keep_alive = False s.headers = self.headers # s.mount("http://", HTTPAdapter(max_retries=3)) # s.mount("https://", HTTPAdapter(max_retries=3)) s.verify = False shiroCookie = {'rememberMe': '1'} s.cookies.update(shiroCookie) try: req = s.get(self.target, timeout=5) lock.acquire() webHeaders = req.headers webCodes = req.text WebInfos[self.target] = webHeaders, webCodes, req.status_code req.close() print(mkPut.fuchsia("[{0}]".format(time.strftime("%H:%M:%S", time.localtime( )))), mkPut.green("[INFO]"), "命中{0}个链接".format(len(WebInfos)), end='\r', flush=True) lock.release() except requests.exceptions.ReadTimeout: pass except requests.exceptions.ConnectionError: pass except requests.exceptions.ChunkedEncodingError: pass except KeyboardInterrupt: pass self.sem.release()
def mwebs(): threads = [] print(mkPut.fuchsia("[{0}]".format(time.strftime("%H:%M:%S", time.localtime()))), mkPut.green("[INFO]"), "共采集{0}个web链接".format(len(Urls.url))) print(mkPut.fuchsia("[{0}]".format(time.strftime( "%H:%M:%S", time.localtime()))), mkPut.green("[INFO]"), "获取网页信息中") sem = threading.Semaphore(threadNum) try: for url in Urls.url: sem.acquire() t = webInfo(url, sem) t.setDaemon(True) threads.append(t) t.start() for t in threads: t.join() except KeyboardInterrupt: pass print() print()
def run(self): keywordsBs = base64.b64encode(self.ip.encode('utf-8')) keywordsBs = keywordsBs.decode('utf-8') url = "https://fofa.so/api/v1/search/all?email={0}&key={1}&qbase64={2}&full=false&fields=ip,title,port,domain,protocol,host&size={3}".format( self.email, self.key, keywordsBs, fofaSize) try: req = requests.Session() req.keep_alive = False req.headers = self.headers req.mount("https://", HTTPAdapter(max_retries=10)) target = req.get(url, timeout=10) lock.acquire() print( mkPut.fuchsia('\n[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green('[INFO]'), '正在检测IP:', self.ip) print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.green('[INFO]'), '正在通过API获取信息...') datas = json.loads(target.text) self.ipInfo(datas['results']) req.close() lock.release() except requests.exceptions.ReadTimeout: print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.red('[ERROR]'), '请求超时') except requests.exceptions.ConnectionError: print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.red('[ERROR]'), '网络超时') except json.decoder.JSONDecodeError: print( mkPut.fuchsia('[{0}]'.format( time.strftime("%H:%M:%S", time.localtime()))), mkPut.red('[ERROR]'), '获取失败,请重试') lock.release() self.sem.release()
SYSINFO = 8 ERROR = 7 WARNING = 6 init(autoreset=True) logging.addLevelName(LoggingLevel.SUCCESS, mkPut.cyan("[+]")) logging.addLevelName(LoggingLevel.SYSINFO, mkPut.green("[INFO]")) logging.addLevelName(LoggingLevel.ERROR, mkPut.red("[ERROR]")) logging.addLevelName(LoggingLevel.WARNING, mkPut.yellow("[WARNING]")) LOGGER = logging.getLogger("GlassLog") formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s", datefmt=mkPut.fuchsia("[%H:%M:%S]")) LOGGER_HANDLER = logging.StreamHandler(sys.stdout) LOGGER_HANDLER.setFormatter(formatter) LOGGER.addHandler(LOGGER_HANDLER) LOGGER.setLevel(LoggingLevel.WARNING) class MY_LOGGER: def info(msg): return LOGGER.log(LoggingLevel.SYSINFO, msg) def error(msg): return LOGGER.log(LoggingLevel.ERROR, msg) def warning(msg): return LOGGER.log(LoggingLevel.WARNING, msg)