def init_conf(path): logger.sysinfo("Init tentacle config...") configs = { "basic": { "timeout": "5", "max_retries": "0", }, "proxy": { "proxy": False, "proxy_url": "socks5://127.0.0.1:1080", }, "google_api": { "developer_key": "developer_key", "search_enging": "developer_key" }, "zoomeye_api": { "username": "******", "password": "******" }, "fofa_api": { "email": "*****@*****.**", "token": "tentacle_123456" }, "shodan_api": { "token": "token@tentacle" }, "github_api": { "token": "token@tentacle", }, "ceye_api": { "identifier": "test.ceye.io", "token": "66ca15b9e782b5127d846af76bbe2aa1" } } cf = configparser.ConfigParser() for section in configs.keys(): cf[section] = configs[section] with open(path, 'w+') as configfile: cf.write(configfile)
def _fofa_api_today_poc(page): target_list = [] url = "https://fofa.so/about_client" res = mycurl('get',url) if res != None: poc_soup = BeautifulSoup(res.content,'lxml') poc_result_name = poc_soup.select('body > div.fdo > div:nth-of-type(3) > div > div > ul > li:nth-of-type(1)') poc_result_raw = poc_soup.select('body > div.fdo > div:nth-of-type(3) > div > div > ul > li:nth-of-type(4) > a') for i in range(len(poc_result_name)): result_name = str(poc_result_name[i])[11:-5] result_raw = str(poc_result_raw[i])[str(poc_result_raw[i]).find(';">'):-4];result_raw = result_raw.replace(';">','') logger.sysinfo("Search fofa api %s: %s"%(result_name,result_raw)) matchObj = re.search( r'[a-zA-Z0-9]+', result_name) if matchObj: server = matchObj.group().lower() for z in _fofa_api(result_raw, page, False): target_list.append((z, server)) else: for z in _fofa_api(result_raw, page, False): target_list.append(z, None) return target_list
def _fofa_api(search, page, flag=True): ''' https://fofa.so/api#auth ''' url_login = '******' result = [] try: email = conf['config']['fofa_api']['email'] key = conf['config']['fofa_api']['token'] except KeyError: sys.exit( logger.error( "Load tentacle config error: zfofa_api, please check the config in tentacle.conf." )) if flag: logger.sysinfo("Using fofa api...") search = str(base64encode(bytes(search, 'utf-8')), 'utf-8') for p in range(1, page + 1): logger.debug("Find fofa url of %d page..." % int(p)) res = mycurl( 'post', url_login + '?email={0}&key={1}&page={2}&qbase64={3}'.format( email, key, p, search)) if res != None: if int(res.status_code) == 401: sys.exit( logger.error( "Error fofa api access, maybe you should pay fofa coin and enjoy service." )) else: res_json = json.loads(res.text) if res_json["error"] is None: if len(res_json.get('results')) == 0: break for item in res_json.get('results'): logger.debug("Fofa Found: %s" % item[0]) result.append(item[0]) return result
def _shodan_api(search, page): ''' Please input your Shodan API Key (https://account.shodan.io/). ''' try: token = conf['config']['shodan_api']['token'] except KeyError: sys.exit( logger.error( "Load tentacle config error: shodan_api, please check the config in tentacle.conf." )) logger.sysinfo("Using shodan api...") anslist = [] for p in range(1, page + 1): logger.debug("Find shodan url of %d page..." % int(p)) _proxies = None try: api = shodan.Shodan(token, proxies=_proxies) result = api.search(query=search, page=p) except shodan.APIError as e: logger.error( "Error shodan api access, maybe you should pay $49 and enjoy service." ) return anslist total = result.get('total') if total == 0: logger.error("Found 0 target.") return anslist else: for match in result.get('matches'): target = match.get('ip_str') + ':' + str(match.get('port')) logger.debug("Shodan Found: %s" % target) anslist.append(target) return anslist
def run(self): logger.sysinfo('任务 开始: %s', self.name) pool = self._put_queue() next(pool) self.print_progress() for i in range(0, self.thread_num): t = threading.Thread(target=self._work, name=str(i)) self.set_thread_daemon(t) t.start() logger.debug("Wait for thread...") while True: if self.thread_count > 0 and self.is_continue: now_time = time.time() if now_time - self.current_time >= 60: self.current_time = now_time self.print_progress() if self.put_queue_flag and self.queue.qsize() < self.queue_pool_total: try: next(pool) logger.debug("Add queue pool for engine.") except StopIteration: self.put_queue_flag = False time.sleep(0.01) else: self.print_progress() break logger.sysinfo('Task Finished: %s', self.name)
def load_modules(self): modules_name = conf['modules_name'] func_name = conf['func_name'] if len(modules_name) < 0: msg = 'Can\'t find any modules. Please check you input.' sys.exit(logger.error(msg)) elif len(modules_name) == 1: logger.sysinfo('Loading modual: %s.' % (modules_name[0])) module = self._load_module(modules_name[0]) if func_name.lower() in ['show','help'] and module: sys.exit(help(module)) else: logger.sysinfo('Loading moduals...') for module_name in conf['modules_name']: module = self._load_module(module_name) if len(self.modules) > 1 and func_name.lower() in ['show','help']: sys.exit(logger.error('Can\'t show so many modules.')) elif func_name not in dir(module): logger.error('Can\'t find function: %s:%s(), please make sure the function is in the module.' % (module.__name__, func_name))
def output_excal(datalines,file,taskname = None): filename = os.path.join(paths.OUTPUT_PATH,file + '.xlsx') if taskname: logger.info('Task export to %s: %s' % (filename,taskname)) else: logger.info('Export to %s...' % (filename)) book = Workbook() ws = book.active i = 1 titleList = [] for line in datalines: i = i + 1 for key in line: if key not in titleList: titleList.append(key) ws.cell(row=1, column=len(titleList)).value = key try: if line[key] == None or line[key] == '': ws.cell(row=i, column=titleList.index(key) + 1).value = "" elif isinstance(line[key], int) or isinstance(line[key], str): ws.cell(row=i, column=titleList.index(key) + 1).value = line[key] elif isinstance(line[key], bytes) : ws.cell(row=i, column=titleList.index(key) + 1).value = str(line[key],'utf-8') elif isinstance(line[key], list): ws.cell(row=i, column=titleList.index(key) + 1).value = str(line[key]) elif isinstance(line[key], dict): ws.cell(row=i, column=titleList.index(key) + 1).value = str(line[key]) else: ws.cell(row=i, column=titleList.index(key) + 1).value = "Types of printing are not supported." except: ws.cell(row=i, column=titleList.index(key) + 1).value = "Some error." book.save(filename) if taskname: logger.sysinfo('Task exported to %s successful: %s' % (filename,taskname)) else: logger.sysinfo('Exported to %s successful!' % (filename))
async def _fofa_api(search, page, flag = True): ''' https://fofa.so/api#auth ''' url_login = '******' try: email = conf['fofa_api']['email'] key = conf['fofa_api']['token'] except KeyError: sys.exit(logger.error("Load tentacle config error: zfofa_api, please check the config in tentacle.conf.")) if flag: logger.sysinfo("Using fofa api...") search = str(base64encode(search)) async with ClientSession() as session: for p in range(1,page+1): logger.debug("Find fofa url of %d page..." % int(p)) async with session.post(url=url_login + '?email={0}&key={1}&page={2}&qbase64={3}'.format(email, key,p, search)) as response: if response !=None: if int(response.status) == 401: sys.exit(logger.error("Error fofa api access, maybe you should pay fofa coin and enjoy service.")) else: res = await response.text() if res !=None: res_json = json.loads(res) if res_json["error"] is None: if len(res_json.get('results')) == 0: break for item in res_json.get('results'): logger.debug("Fofa Found: %s" % item[0]) yield item[0] elif 'errmsg' in res_json: if 'FOFA coin is not enough!' in res_json["errmsg"]: logger.sysinfo(res_json["errmsg"]) break
def _parameter_register(self, input_parameter): if input_parameter: self.parameter = {} if input_parameter != None: if 'parameter' in conf.keys(): self.parameter = conf['parameter'] try: datas = input_parameter.split('&') for _data in datas: _key, _value = _data.split('=') self.parameter[_key] = _value except: msg = 'The parameter input error, please check your input e.g. -p "userlist=user.txt", and you should make sure the module\'s function need the parameter. ' sys.exit(logger.error(msg)) else: self.parameter = {} logger.sysinfo("Set parameter: %s" % str(input_parameter)) else: self.parameter = {} return self.parameter
def update_program(): success = False if not os.path.exists(os.path.join(paths.ROOT_PATH, ".git")): msg = "Have not a git repository. Please checkout the 'tentacle' repository " msg += "from GitHub (e.g. 'git clone --depth 1 https://github.com/orleven/tentacle.git tentacle')" logger.error(msg) else: msg = "Updating tentacle to the latest version from the gitHub repository." logger.sysinfo(msg) msg = "Tentacle will try to update itself using 'git' command." logger.sysinfo(msg) data_to_stdout("\r[%s] [INFO] update in progress " % time.strftime("%X")) try: process = subprocess.Popen("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=paths.ROOT_PATH.encode( locale.getpreferredencoding())) # Reference: http://blog.stastnarodina.com/honza-en/spot/python-unicodeencodeerror/ poll_process(process, True) stdout, stderr = process.communicate() success = not process.returncode except (IOError, OSError) as ex: success = False stderr = get_safe_ex_string(ex) if success: logger.success("The latest revision '%s'" % (get_revision_number())) else: if "Not a git repository" in stderr: msg = "Not a valid git repository. Please checkout the 'orleven/tentacle' repository " msg += "from GitHub (e.g. 'git clone --depth 1 https://github.com/orleven/tentacle.git tentacle')" logger.error(msg) else: logger.error("Update could not be completed ('%s')" % re.sub(r"\W+", " ", stderr).strip()) if not success: if sys.platform == 'win32': msg = "for Windows platform it's recommended " msg += "to use a GitHub for Windows client for updating " msg += "purposes (http://windows.github.com/) or just " msg += "download the latest snapshot from " msg += "https://github.com/orleven/tentacle" else: msg = "For Linux platform it's required " msg += "to install a standard 'git' package (e.g.: 'sudo apt-get install git')" logger.sysinfo(msg)
def show(self): _len = len(paths.ROOT_PATH) + 1 msg = 'There are available modules as follows: \r\n' msg += '-----------------------------------------------------------\r\n' msg += '| {: <55} |\r\n'.format('Module path, you can load module by -m module_path,') msg += '| {: <55} |\r\n'.format('and you can see module\' description for -f show') msg += '-----------------------------------------------------------\r\n' for parent, dirnames, filenames in os.walk(paths.SCRIPT_PATH, followlinks=True): for each in filenames: if '__init__' in each: continue file_path = os.path.join(parent, each) msg += '| {: <55} |\r\n'.format(file_path[_len:-3]) # import importlib.util # module_name = '.'.join(re.split('[\\\\/]',file_path[_len:-3])) # module_spec = importlib.util.find_spec(module_name) # if module_spec: # module = importlib.import_module(module_name) # from inspect import getmembers, isfunction # fun= [_fun[0] for _fun in getmembers(module) if isfunction(_fun[1]) and '_' not in _fun[0]] # doc = module.__doc__ if module.__doc__ !=None else '' # msg += '| {: <50} | {:<} \r\n'.format(file_path[_len:-3], doc ) msg += '-----------------------------------------------------------\r\n' sys.exit(logger.sysinfo(msg))
def info_register(args): if args.info: conf['info'] = args.info logger.sysinfo("载入引擎: %s" % (conf['info']))
def _github_api(search, page): ''' https://github.com/settings/tokens Generate new token ''' per_page_limit = 50 github_timeout = 20 # headers = {} url_api = "https://api.github.com/search/code?sort=updated&order=desc&per_page=%s&q=" % per_page_limit try: token = conf['config']['github_api']['token'] except KeyError: sys.exit( logger.error( "Load tentacle config error: github_api, please check the config in tentacle.conf." )) headers["Authorization"] = "token " + token resp = mycurl('get', url_api + search, headers=headers, timeout=github_timeout) if resp != None and resp.status_code == 200: logger.sysinfo("Using github api...") res_json = json.loads(resp.content) total = res_json["total_count"] logger.sysinfo("Found github url: %d" % int(total)) page_num = (total // per_page_limit) + 1 page_num = page_num if page_num < page else page git_urls = [] for p in range(1, page_num + 1): # Search url _url_api = "https://api.github.com/search/code?sort=updated&order=desc&page=%d&per_page=%s&q=" % ( p, per_page_limit) _resp = mycurl('get', _url_api + search, headers=headers, timeout=github_timeout) if _resp != None and _resp.status_code == 200: logger.debug("Find github url of %d page..." % int(p)) try: _res_json = json.loads(_resp.content) for i in range(len(_res_json['items'])): git_urls.append(_res_json['items'][i]["html_url"]) except: pass elif _resp != None and int(_resp.status_code) == 422: logger.error( "Warning: github api access rate limit 20/minute, 5000/hour, 1000 search results." ) logger.error("Error github api token. Wait for a minute.") # Access url and match, 既然限制了,那就干点其他事情。 logger.sysinfo( "So, this program will access target url and wait for rate limit. " ) _github_extract(git_urls, search) git_urls = [] elif _resp != None and int(_resp.status_code) == 403: p = p - 1 logger.error( "Too many times for access. So we should wait for ten minute." ) time.sleep(60 * 10) else: p = p - 1 logger.error(_resp.text) logger.error(_resp.status_code) time.sleep(60) _github_extract(search, git_urls) git_urls = [] elif int(resp.status_code) == 422: sys.exit(logger.error("Error github api token.")) return []
def module_register(args): _len = len(paths.ROOT_PATH) + 1 if args.show: msg = 'There are available modules as follows: \r\n' msg += '-----------------------------------------------------------\r\n' msg += '| {: <55} |\r\n'.format('Module path, you can load module by -m module_path,') msg += '| {: <55} |\r\n'.format('and you can see module\' description for -f show') msg += '-----------------------------------------------------------\r\n' for parent, dirnames, filenames in os.walk(paths.SCRIPT_PATH, followlinks=True): for each in filenames: if '__init__' in each: continue file_path = os.path.join(parent, each) msg += '| {: <55} |\r\n'.format(file_path[_len:-3]) # import importlib.util # module_name = '.'.join(re.split('[\\\\/]',file_path[_len:-3])) # module_spec = importlib.util.find_spec(module_name) # if module_spec: # module = importlib.import_module(module_name) # from inspect import getmembers, isfunction # fun= [_fun[0] for _fun in getmembers(module) if isfunction(_fun[1]) and '_' not in _fun[0]] # doc = module.__doc__ if module.__doc__ !=None else '' # msg += '| {: <50} | {:<} \r\n'.format(file_path[_len:-3], doc ) msg += '-----------------------------------------------------------\r\n' sys.exit(logger.sysinfo(msg)) input_module = args.module if not input_module : msg = 'Use -m to load module. Example: [-m test] or [-m ./script/test.py] or [-m @thinkphp], and you can see all module name by --show.' sys.exit(logger.error(msg)) modules = [] # -m * if input_module == '*': for parent, dirnames, filenames in os.walk(paths.SCRIPT_PATH, followlinks=True): if len(filenames) == 0: msg = 'Module [%s] is null.' % paths.SCRIPT_PATH logger.error(msg) for each in filenames: if '__init__' in each: continue file_path = os.path.join(parent, each) modules.append('.'.join(re.split('[\\\\/]',file_path[_len:-3]))) else: # -m test,./script/test.py,@www for _module in input_module.split(','): # @www if _module.startswith("@"): if _module[1:] == 'special': _path = os.path.join(paths.SPECIAL_SCRIPT_PATH, _module[1:], '*.py') else: _path = os.path.join(paths.SCRIPT_PATH, _module[1:], '*.py') module_name_list = glob.glob(_path) if len(module_name_list) == 0: msg = 'Module is not exist: %s (%s)' % (_module, _path) logger.error(msg) else: for each in module_name_list: if '__init__' in each: continue modules.append('.'.join(re.split('[\\\\/]',each[_len:-3]))) else: if not _module.endswith('.py'): _module += '.py' # handle input: "-m ./script/test.py" if os.path.split(_module)[0]: _path = os.path.abspath(os.path.join(paths.ROOT_PATH, _module)) # handle input: "-m test" "-m test.py" else: _path = os.path.abspath(os.path.join(paths.SCRIPT_PATH, _module)) if os.path.isfile(_path): modules.append('.'.join(re.split('[\\\\/]', _path[_len:-3]))) else: msg = 'Module is\'t exist: %s (%s)' % (_module,_path) logger.error(msg) conf['modules_name'] = list(set(modules)) logger.debug("Set module: %s" % input_module) conf['noportscan'] = args.noportscan logger.debug("Set port scan: %s" % args.noportscan)
def load(self): if len(self.modules_name) < 0: msg = 'Can\'t find any modules. Please check you input.' sys.exit(logger.error(msg)) elif len(self.modules_name) == 1: logger.sysinfo('Loading modual: %s' % (self.modules_name[0])) module = self._load_module(self.modules_name[0]) if module == None: logger.error( "Invalid POC script, Please check the script: %s" % self.modules_name[0]) sys.exit() if self.func_name.lower() in ['show', 'help'] and module: poc = module.POC() msg = "Show POC's Infomation:" msg += "\r\n ------------------------------- " msg += "\r\n| Name: " + str(poc.name if 'name' in poc.__dict__ else 'unknown') msg += "\r\n| Keyword: " + str(poc.keyword if 'keyword' in poc.__dict__ else ['unknown']) msg += "\r\n| Infomation: " + str( poc.info if 'info' in poc.__dict__ else 'Unknown POC, please set the infomation for me.') msg += "\r\n| Level: " + str(poc.level if 'level' in poc.__dict__ else 'unknown') msg += "\r\n| Refer: " + str(poc.refer if 'refer' in poc.__dict__ else None) msg += "\r\n| Type: " + str(poc.type if 'type' in poc.__dict__ else 'unknown') msg += "\r\n| Repaire: " + str(poc.repaire if 'repaire' in poc.__dict__ else 'unknown') msg += "\r\n| Default Port: " + str( poc.service_type if 'service_type' in poc.__dict__ else 'unknown') msg += "\r\n ------------------------------- " logger.sysinfo(msg) sys.exit() self.modules.append(module) if 'script.info.port_scan' not in self.modules_name: module = self._load_module('script.info.port_scan') self.modules_name.append('script.info.port_scan') self.modules.append(module) if module == None: logger.error( "Invalid POC script, Please check the script: %s" % self.modules_name[0]) sys.exit() else: modules = [] logger.sysinfo('Loading moduals...') if 'script.info.port_scan' not in self.modules_name: self.modules_name.append('script.info.port_scan') self.modules_name = list(set(self.modules_name)) for module_name in self.modules_name: module = self._load_module(module_name) if module == None: logger.error( "Invalid POC script, Please check the script: %s" % module_name) continue modules.append(module) if len(self.modules) > 1 and self.func_name.lower() in [ 'show', 'help' ]: sys.exit(logger.error('Can\'t show so many modules.')) # sort self.modules = sorted(modules, key=lambda modules: modules.POC().priority)
def load_targets(self): if 'target_simple' in conf.keys(): self._load_target(conf['target_simple']) logger.sysinfo("Loading target: %s" % (conf['target_simple'])) elif 'target_file' in conf.keys(): for _line in open(conf['target_file'], 'r'): line = _line.strip() if line: self._load_target(line) logger.sysinfo("Loading target: %s" % (conf['target_file'])) elif 'target_nmap_xml' in conf.keys(): import xml.etree.ElementTree as ET tree = ET.parse(conf['target_nmap_xml']) root = tree.getroot() for host in root.findall('host'): host_id = host.find('address').get('addr') # infoLit = [] for port in host.iter('port'): port_id = port.attrib.get('portid') port_protocol = port.attrib.get('protocol') port_state = port.find('state').attrib.get('state') try: port_service = port.find('service').attrib.get('name') except: port_service = "None" # infoDic = {"port": port_id, "status": port_state, "server": port_service, "other": port_protocol} # infoLit.append(infoDic) if port_state.lower() not in ['closed', 'filtered']: self._load_target(host_id + ":" + port_id, port_service) # resDic = {"host": host_id, "info": infoLit} # resLit.append(resDic) logger.sysinfo("Loading target: %s" % (conf['target_nmap_xml'])) elif 'target_network' in conf.keys(): self._load_target(conf['target_network']) logger.sysinfo("Loading target: %s" % (conf['target_network'])) elif 'target_task' in conf.keys(): hashdb = HashDB(os.path.join(paths.DATA_PATH, conf['target_task'])) hashdb.connect() for _row in hashdb.select_all(): if _row[4] != None and _row[4] != '': self._load_target(_row[4]) else: self._load_target(_row[2] + ":" + _row[3]) logger.sysinfo("Loading target: %s" % (conf['target_task'])) elif 'target_search_engine' in conf.keys(): logger.sysinfo("Loading target by baidu/bing/360so: %s" % (conf['target_search_engine'])) urls = search_engine(conf['target_search_engine']) for _url in urls: if _url: self._load_target(_url) elif 'target_zoomeye' in conf.keys(): logger.sysinfo("Loading target by zoomeye: %s" % (conf['target_zoomeye'])) urls = search_api(conf['target_zoomeye']) for _url in urls: if _url: self._load_target(_url) elif 'target_shodan' in conf.keys(): logger.sysinfo("Loading target by shadon: %s" % (conf['target_shodan'])) urls = search_api(conf['target_shodan']) for _url in urls: if _url: self._load_target(_url) elif 'target_fofa' in conf.keys(): logger.sysinfo("Loading target by fofa: %s" % (conf['target_fofa'])) urls = search_api(conf['target_fofa']) for _url in urls: if _url: self._load_target(_url) elif 'target_fofa_today_poc' in conf.keys(): logger.sysinfo("Loading target by fofa today poc: %s" % (conf['target_fofa_today_poc'])) obj = search_api(conf['target_fofa_today_poc']) for _url, _server in obj: if _url: self._load_target(_url, _server) elif 'target_google' in conf.keys(): logger.sysinfo("Loading target by google: %s" % (conf['target_google'])) urls = search_api(conf['target_google']) for _url in urls: if _url: self._load_target(_url) elif 'target_github' in conf.keys(): logger.sysinfo("Loading target by github: %s" % (conf['target_github'])) urls = search_api(conf['target_github']) else: sys.exit( logger.error("Can't load any targets! Please check input.")) if len(self.targets) == 0: sys.exit( logger.error("Can't load any targets! Please check input."))
def _port_register(self, input_target): if input_target.limit_port_scan: self.limit_port = [] logger.sysinfo("Set port: %s" % (input_target.limit_port_scan)) for _port_scope in input_target.limit_port_scan.lower().split(','): if '-' in _port_scope: try: pattern = re.compile(r'(\d+)-(\d+)') match = pattern.match(_port_scope) if match: start_port = int(match.group(1)) end_port = int(match.group(2)) if start_port > 0 and start_port < 65536 and end_port > 0 and end_port < 65536: self.limit_port += [ x for x in range(start_port, end_port + 1) ] else: sys.exit( logger.error("Illegal input: %s" % _port_scope)) else: sys.exit( logger.error("Illegal input: %s" % _port_scope)) except Exception as err: sys.exit( logger.error("Illegal input: %s" % _port_scope)) elif 'top' in _port_scope or 'all' == _port_scope or '*' == _port_scope: if 'top10' == _port_scope: self.limit_port += [ x for x in SERVICE_PORT_MAP.TOP10[1] ] if 'top50' == _port_scope: self.limit_port += [ x for x in SERVICE_PORT_MAP.TOP50[1] ] if 'top100' == _port_scope: self.limit_port += [ x for x in SERVICE_PORT_MAP.TOP100[1] ] if 'top150' == _port_scope: self.limit_port += [ x for x in SERVICE_PORT_MAP.TOP150[1] ] if 'top1000' == _port_scope: self.limit_port += [ x for x in SERVICE_PORT_MAP.TOP100[1] ] if 'all' == _port_scope or '*' == _port_scope: self.limit_port += [x for x in range(1, 65536)] else: sys.exit( logger.error("Illegal input: %s" % _port_scope)) else: try: start_port = int(_port_scope) if start_port > 0 and start_port < 65536: self.limit_port.append(start_port) else: sys.exit( logger.error("Illegal input: %s" % _port_scope)) except: sys.exit( logger.error("Illegal input: %s" % _port_scope)) self.limit_port = list(set(self.limit_port))
def print_progress(self): self.total = len(self.targets) * len(self.modules) - self.exclude msg = '[%s] %s found | %s error | %s remaining | %s scanning | %s scanned in %.2f seconds.(total %s)' % ( self.name, self.found_count, self.error_count, self.queue.qsize(), self.scanning_count, self.scan_count, time.time() - self.start_time,self.total) logger.sysinfo(msg)
def thread_register(args): if not 0 < args.thread < 501: msg = '输入的线程数量错误 [-t] , 范围: 1 - 500.' sys.exit(logger.error(msg)) conf['thread_num'] = args.thread logger.sysinfo("设置线程数量: %s" % str(conf['thread_num']))
def __del__(self): self.hashdb.disconnect() logger.sysinfo("Task over: %s" % self.name)
def _github_api(search, page): ''' https://github.com/settings/tokens Generate new token ''' per_page_limit = 50 github_timeout = 20 # InformationRegex = {"mail": r"([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)", "domain": r"(http[s]*://[^<|\"|?]*)", "pass1": r"(pass[^<|?]{30})", "pass2": r"(password[^<|?]{30})", "pass3": r"(pwd[^<|?]{30})", "root": r"(root[^<|?]{0,30})", "title": r"<title>(.*)<\/title>", "ip": r"([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}:*[0-9]{0,5})"} headers = {} url_api = "https://api.github.com/search/code?sort=updated&order=desc&per_page=%s&q=" %per_page_limit try: token = conf['config']['github_api']['token'] except KeyError: sys.exit(logger.error("Load tentacle config error: github_api, please check the config in tentacle.conf.")) headers["Authorization"] = "token " + token resp = mycurl('get',url_api + search, headers = headers, timeout=github_timeout) if resp!=None and resp.status_code == 200: logger.sysinfo("Using github api...") res_json = json.loads(resp.content) total = res_json["total_count"] logger.sysinfo("Found github url: %d"%int(total)) page_num = (total // per_page_limit) + 1 page_num = page_num if page < page_num else page git_urls = [] for p in range(1,page_num + 1): # Search url _url_api = "https://api.github.com/search/code?sort=updated&order=desc&page=%d&per_page=%s&q=" % (p,per_page_limit) _resp = mycurl('get',_url_api + search, headers=headers, timeout=github_timeout) if _resp!=None and _resp.status_code == 200: logger.debug("Find github url of %d page..." % int(p)) try: _res_json = json.loads(_resp.content) for i in range(len(_res_json['items'])): git_urls.append(_res_json['items'][i]["html_url"]) except: pass elif _resp!=None and int(_resp.status_code) == 422: logger.error("Warning: github api access rate limit 20/minute, 5000/hour, 1000 search results.") logger.error("Error github api token. Wait for a minute.") # Access url and match, 既然限制了,那就干点其他事情。 logger.sysinfo("So, this program will access target url and wait for rate limit. ") git_urls = list(set(git_urls)) for url in git_urls: try: _resp = mycurl('get',url,timeout=github_timeout) except: _resp = None if _resp and _resp.status_code == 200: for i in InformationRegex: _text = _resp.text.lower() _text = _text.replace('"', '"') _text = _text.replace('&', '&') _text = _text.replace('<', '<') _text = _text.replace('>', '>') _text = _text.replace(' ', ' ') res = re.findall(InformationRegex[i], _text) for _re in res: if 'github' not in _re: if search in _re: if InformationRegex[i] == 'mail' : logger.sysinfo("Found info: %s [%s]"%(url,_re)) elif InformationRegex[i] == 'domain' : logger.sysinfo("Found info: %s [%s]" % (url, _re)) elif 'pass' in InformationRegex[i]: logger.sysinfo("Found info: %s [%s]" % (url, _re)) elif _resp and _resp.status_code == 404: pass else : logger.error(_resp.text) logger.error(_resp.status_code) time.sleep(60) git_urls = [] elif _resp!=None and int(_resp.status_code) == 403: p = p - 1 logger.error("Too many times for access. So we should wait for ten minute.") time.sleep(60*10) else: p = p - 1 logger.error(_resp.text) logger.error(_resp.status_code) time.sleep(60) elif int(resp.status_code) == 422: sys.exit(logger.error("Error github api token.")) return []
def load_function(self): self.func_name = conf['func_name'] logger.sysinfo("Loading function: %s" % (conf['func_name']))
async def load(self): no = 0 if self.target_simple != None: logger.sysinfo("Loading target: %s" % (self.target_simple)) for target in self._load_target(no + 1, self.target_simple): yield target if self.target_file != None: with open(self.target_file, 'r') as f: logger.sysinfo("Loading target: %s" % (self.target_file)) for _line in f.readlines(): no += 1 line = _line.replace("\r", "").replace("\n", "").strip() if line and line != '': for li in self._load_target(no, line): yield li if self.target_nmap_xml != None: logger.sysinfo("Loading target: %s" % (self.target_nmap_xml)) tree = ET.parse(self.target_nmap_xml) root = tree.getroot() for host in root.findall('host'): host_id = host.find('address').get('addr') # infoLit = [] for port in host.iter('port'): no += 1 port_id = port.attrib.get('portid') port_protocol = port.attrib.get('protocol') port_state = port.find('state').attrib.get('state') try: port_service = port.find('service').attrib.get('name') except: port_service = "None" # infoDic = {"port": port_id, "status": port_state, "server": port_service, "other": port_protocol} # infoLit.append(infoDic) if port_state.lower() not in ['closed', 'filtered']: yield self._load_target( no, ':'.join([host_id, str(port_id)]), port_service) # resDic = {"host": host_id, "info": infoLit} # resLit.append(resDic) if self.target_network != None: logger.sysinfo("Loading target: %s" % (self.target_network)) for target in self._load_target(no + 1, self.target_network): yield target if self.target_task != None: logger.sysinfo("Loading target: %s" % (self.target_task)) hashdb = TaskDataDB(os.path.join(paths.DATA_PATH, self.target_task)) hashdb.connect() for _row in hashdb.select_all(): no += 1 if _row[5] != None and _row[5] != '': for target in self._load_target(no, _row[5]): yield target else: for target in self._load_target( no, ':'.join([_row[3], str(_row[4])])): yield target if self.target_search_engine != None: logger.sysinfo("Loading target by baidu/bing/360so: %s" % (self.target_search_engine)) urls = await search_api(self.target_search_engine, type=API_TYPE.OTHER_SEARCH_ENGINE) for _url in urls: if _url: no += 1 for target in self._load_target(no, _url): yield target if self.target_zoomeye != None: logger.sysinfo("Loading target by zoomeye: %s" % (self.target_zoomeye)) urls = await search_api(self.target_zoomeye, type=API_TYPE.ZOOMEYE) for _url in urls: if _url: no += 1 for target in self._load_target(no, _url): yield target if self.target_shodan != None: logger.sysinfo("Loading target by shadon: %s" % (self.target_shodan)) urls = await search_api(self.target_shodan, type=API_TYPE.SHODAN) for _url in urls: if _url: no += 1 for target in self._load_target(no, _url): yield target if self.target_fofa != None: logger.sysinfo("Loading target by fofa: %s" % (self.target_fofa)) urls = await search_api(self.target_fofa, type=API_TYPE.FOFA) for _url in urls: if _url: no += 1 for target in self._load_target(no, _url): yield target # if self.target_fofa_today_poc != None: # logger.sysinfo("Loading target by fofa today poc: %s" % (self.target_fofa_today_poc)) # obj = await search_api(self.target_fofa_today_poc, type=API_TYPE.FOFA_TODAY_POC) # for _url,_server in obj: # if _url: # no += 1 # for target in self._load_target(no, _url, _server): # yield target if self.target_google != None: logger.sysinfo("Loading target by google: %s" % (self.target_google)) urls = await search_api(self.target_google, type=API_TYPE.GOOGLE) for _url in urls: if _url: no += 1 for target in self._load_target(no, _url): yield target