def translate(self, raw, to_lang='英文'): if not self.asyn: self.asyn = Exe(10) def query(url): res = to(url) en = res.encoding return res.status_code, res.content.decode(en) def display(code, content): ''' display content ''' if int(code / 200) == 1: res = '\n'.join([ i.text for i in self.xpath( content, "div[%s]" % Baidu.config['translate_tag'], 'td', '*') ]) LogControl.ok(res) else: LogControl.fail("not found this") data = Baidu.config['translate_url'].format(query=quote(raw + ' ' + to_lang)) self.asyn.done(query, display, data)
def running(msg, interval=1): running = True # tag = ['| ', '/ ', '- ', '\\ '] def _timer(): m = 0 while running: print(' ...', end='') sys.stdout.flush() time.sleep(interval) e = Exe(2) try: info(msg, end=' ') e.submit(_timer) with stdout(None) : yield except Exception as e: running = False err(e) sys.exit(0) else: running = False print("\b\b\b" , end='') cprint('[%s]' % colored('√', 'green', attrs=['bold'])) finally: running = False
def ex(self): # print(self.options) if self.options['Editor']: os.system("vi %s" % J(MODULE_PATH, self.module_name + '.py')) raise SystemExit("0") if self.options['thread'] > 1: self.exe = Exe(self.options['thread']) LogControl.i('thread: %d' % self.options['thread'], txt_color='blue') if self.options['add_data']: self.add_res(self.options['add_data']) raise SystemExit("0") if self.options['delete_data']: self.del_res() raise SystemExit("0") self.parser() try: if not isinstance(self.payloads, (list, dict, tuple,)): # LogControl.wrn("check here") self.run(self.options) # LogControl.wrn("check here1") raise SystemExit(0) for payload in self.payloads: self.options['payload'] = payload LogControl.title("load payload: ", colored(payload, attrs=['underline', 'bold']) , txt_color="blue",end="\r") self.run(self.options) except (KeyboardInterrupt, InterruptedError): LogControl.i("~~bye") raise SystemExit(0)
def __init__(self, ssl=True, asyn=True, debug=False, db=False, database=None, proxy=False): self.url_pre = 'https://www.' if ssl else 'https//www.' self.search_name = self.__class__.__name__.lower() self.host = self.url_pre + self.search_name + '.com' self.agent = random_choice(AGS) self.asyn = None self.db = None self.debug = debug self.proxy = None self.grep_html_content = dict() if asyn: self.asyn = Exe(20) if proxy: LogControl.info("loading proxy..", end='') self.proxy = proxy #Mongo('local').find("setting")[0].get("proxy") if self.proxy: LogControl.ok("") if db: self.use_db = db db_path = os.path.join( os.getenv("HOME"), '.Search-engine-sqlite3-db.db') if not database else database self.db_path = db_path self.db = SqlEngine(database=db_path) if not self.db.table_list(): self.db.create(self.search_name, query=str, content=str, type='web')
class Baidu(Searcher): config = { 'translate_tag': "@class='op_dict_content'", 'translate_url': "https://www.baidu.com/s?ie=utf-8&f=8&rsv_bp=0&rsv_idx=1&tn=baidu&wd={query}\ &rsv_pq=be1d9cdc00001f3a&rsv_t=e851oZevI5oKNV9F6W9CwY5wqlPyiK9Pqz0RLr7vdyawQru3NgKGrAcsizA\ &rqlang=cn&rsv_enter=1&rsv_sug3=20&rsv_sug2=0&inputT=2703&rsv_sug4=6023" } def translate(self, raw, to_lang='英文'): if not self.asyn: self.asyn = Exe(10) def query(url): res = to(url) en = res.encoding return res.status_code, res.content.decode(en) def display(code, content): ''' display content ''' if int(code / 200) == 1: res = '\n'.join([ i.text for i in self.xpath( content, "div[%s]" % Baidu.config['translate_tag'], 'td', '*') ]) LogControl.ok(res) else: LogControl.fail("not found this") data = Baidu.config['translate_url'].format(query=quote(raw + ' ' + to_lang)) self.asyn.done(query, display, data)
def __init__(self, id=None, timeout=20, connection="127.0.0.1:8775", load=False, verbose=False, thread_num=4, init=False, **connect_options): self.exe = Exe(thread_num) self.target = 'http://{connection}'.format(connection=connection) self.test_url = None self.injectable = False self.id = id self.connect = False self.start_time = None self.terminated = False self.connect_options = connect_options self.verbose = verbose self.timeout = timeout if init: db = SqlEngine(**self.connect_options) db.create('Task', **self.__class__.TASK_TABLE) if self.id is None: if load: db = SqlEngine(**self.connect_options) self.id = db.last('Task', 'task_id') else: self.create_task()
def __init__(self, thread_num=12, **kargs): self.host = None self.start_url = None self.domain = None self.unreadch_url = Queue() self.try_times = 10 self.reach_url = set() self.stop = False self.outer_url = set() self.mail_url = set() self.js_url = set() self.all_links = set() self.charset = None self.db = None self.stop_count = 0 self.parser = _Parser self.grep = None if not self.back_executor: Handler.back_executor = Exe(thread_num) # init args for k in kargs: if hasattr(self, k): setattr(self, k, kargs[k]) if not self.start_url: raise UrlValid("not set start url", 0) sys.exit(0) if not self.domain: self.domain = self.start_url.split("/")[2] info("init domain :", self.domain) if not self.host: self.host = '/'.join(self.start_url.split("/")[:3]) info("init host :", self.host) self._index(self.on_start)
# db engine # db_engine = pymongo.Connection()['local'] db_connect_cmd = r'database="./db.sql"' db_engine = SqlEngine(database="./db.sql") # static path rdir_path = os.path.dirname(__file__) static_path = rdir_path + r"\static" if sys.platform.startswith( "win") else "./static" files_path = rdir_path + r".\static\files" if sys.platform.startswith( "win") else "./static/files" # set log level # LogControl.LOG_LEVEL |= LogControl.OK # LogControl.LOG_LEVEL |= LogControl.INFO exe = Exe(7) Settings = { 'db': db_engine, # 'L': LogControl, 'exe': exe, 'debug': True, "ui_modules": ui, 'autoreload': True, 'cookie_secret': 'This string can be any thing you want', 'static_path': static_path, } ## follow is router try: os.mkdir(files_path) except FileExistsError:
import os, sys import random from urllib.parse import urljoin BASE_URL = os.getenv("Q_PATH") sys.path.append(BASE_URL) from qlib.asyn import Exe from qlib.net import to _exe = Exe(6) _GEO_IP = [ # 0: "http://ip-api.com/json/", "https://freegeoip.net/json/", ] def check_local(ip): if ip == "127.0.0.1": return "hangzhou " _LOCAL_IP = { "10": True, "172": lambda x: int(x.split(".")[1]) > 15, "192": lambda x: int(x.split(".")[1]) == 168, } head = ip.split(".") geo_api = _GEO_IP[random.randint(0, len(_GEO_IP) - 1)]
class Searcher: """ @proxy is setting request's proxy sample: { 'http': 'socks5://127.0.0.1:1080', 'https': 'socks5://127.0.0.1:1080' } """ config = { 'search_url': '', 'search_args': {}, } def __init__(self, ssl=True, asyn=True, debug=False, db=False, database=None, proxy=False): self.url_pre = 'https://www.' if ssl else 'https//www.' self.search_name = self.__class__.__name__.lower() self.host = self.url_pre + self.search_name + '.com' self.agent = random_choice(AGS) self.asyn = None self.db = None self.debug = debug self.proxy = None self.grep_html_content = dict() if asyn: self.asyn = Exe(20) if proxy: LogControl.info("loading proxy..", end='') self.proxy = proxy #Mongo('local').find("setting")[0].get("proxy") if self.proxy: LogControl.ok("") if db: self.use_db = db db_path = os.path.join( os.getenv("HOME"), '.Search-engine-sqlite3-db.db') if not database else database self.db_path = db_path self.db = SqlEngine(database=db_path) if not self.db.table_list(): self.db.create(self.search_name, query=str, content=str, type='web') def proxy_to(self, url, **kargs): if self.proxy: return to(url, proxy=self.proxy, **kargs) else: return to(url, **kargs) def xpath(self, html, *tags, exclude=None): xhtml = HTML(html) exclude = '[not(name()={})]'.format(exclude) if exclude else '' LogControl.info("//" + "//".join(tags) + exclude) if self.debug else '' for item in xhtml.xpath("//" + "//".join(tags) + exclude): yield item def ready(self, pre_url, *args, **options): pass def set_args(self, **kargs): return parameters(**kargs) def grep(self, url, grep_str, num): """ async to grep url's content by grep_str, then will return (num, [*url]) -> callback """ def _request(): if grep_str.lower().find(url.split(".").pop().lower()) != -1: return num, [] res = self.proxy_to(url) if res.status_code / 100 == 2: encoding = 'utf8' #res.encoding if res.encoding else 'utf8' LogControl.err(url, res.encoding) if res.content: try: h_content = res.content.decode(encoding) return num, [ i.attrib.get('href') for i in HTML(h_content).xpath('//a') if i.attrib.get('href', '').find(grep_str) != -1 ] except UnicodeError as e: LogControl.err(e, '\n', res.encoding, encoding, url) return num, [] except KeyError as e: LogControl.err(e, url) return num, [] else: return num, [] if self.asyn: self.asyn.done(_request, self.save_grep) def save_grep(self, num, res): self.grep_html_content[num] = res
def translate(self, raw, from_lan='en', to_lan='zh', detail=False, example=False): """ detail : if detail == true will get more information. example : if example == true will get some sample. """ self.host = 'fanyi.baidu.com/v2transapi' if not self.asyn: self.asyn = Exe(10) def query(data): res = to(self.host, method='post', data=data) return res.status_code, res.json() def tree_dict(d, func, key=None, lfunc=None): if isinstance(d, dict): for item in d: tree_dict(d[item], func, key=item) elif isinstance(d, list): for item in d: if lfunc: tree_dict(item, lfunc) else: tree_dict(item, func) else: if key: if key == 'dst': LogControl.ok(d, txt_color='green', txt_attr=['underline', 'bold']) else: func(d, key) else: func(d) def single_display(val, key=None, level=1): if key: LogControl.ok('[' + key + ']', '\t', val) else: LogControl.info('\t\t' + ' ' * level, val) def display(code, content): ''' display content ''' if int(code / 200) == 1: try: tree_dict(content['trans_result'], single_display, 'base') if detail: tree_dict(content['dict_result'], single_display, 'dict') if example: tree_dict(content['liju_result'], single_display, 'sample') except KeyError: LogControl.fail("Only: ", *content.keys()) for k in content: LogControl.err(k, content[k]) else: LogControl.fail("not found this") data = { 'from': from_lan, 'to': to_lan, 'query': raw, 'transtype': 'translang', 'simple_means_flag': '3', } self.asyn.done(query, display, data)
class SqlmapApi: """ first use need add 'init' kargs like : sqlmapapi(type='mysql', user='******', init=True) #use mysql sqlmapapi(database='/tmp/test.sql', init=True) # use sqlite3 sqlmapapi(id='cdffe123fab3', database='/tmp/test.sql') # use sqlite3 and load a exist id sqlmapapi(database='/tmp/test.sql') # use sqlite3 and create a task sqlmapapi(database='/tmp/test.sql', load=True) # use sqlite3 and load the last task id Class Task(Table): task_id: "some hash id" url: str, status: 'not running' finishtime: time @get("/task/new") @get("/task/<taskid>/delete") @get("/admin/<taskid>/list") @get("/admin/<taskid>/flush") @get("/option/<taskid>/list") @post("/option/<taskid>/get") @post("/option/<taskid>/set") @post("/scan/<taskid>/start") @get("/scan/<taskid>/stop") @get("/scan/<taskid>/kill") @get("/scan/<taskid>/status") @get("/scan/<taskid>/data") @get("/scan/<taskid>/log/<start>/<end>") @get("/scan/<taskid>/log") @get("/download/<taskid>/<target>/<filename:path>") """ TASK_TABLE = { 'task_id': 'id', 'url': 'http://abc.com', 'status': 'not running', 'data': str, 'ftime': time } TAMPER = [ 'apostrophemask', 'apostrophenullencode', 'appendnullbyte', 'base64encode', 'between', 'bluecoat', 'chardoubleencode', 'charencode' ] def __init__(self, id=None, timeout=20, connection="127.0.0.1:8775", load=False, verbose=False, thread_num=4, init=False, **connect_options): self.exe = Exe(thread_num) self.target = 'http://{connection}'.format(connection=connection) self.test_url = None self.injectable = False self.id = id self.connect = False self.start_time = None self.terminated = False self.connect_options = connect_options self.verbose = verbose self.timeout = timeout if init: db = SqlEngine(**self.connect_options) db.create('Task', **self.__class__.TASK_TABLE) if self.id is None: if load: db = SqlEngine(**self.connect_options) self.id = db.last('Task', 'task_id') else: self.create_task() def on_result(self, t, v): if t == 'new': self.id = v['taskid'] db = SqlEngine(**self.connect_options) if db is not None: db.insert("Task", ['task_id', 'url', 'status'], self.id, 'None', 'not running') db.close() elif t == 'data': if self.verbose: info(v) db = SqlEngine(**self.connect_options) if v['data']: if db: upadte_set = { 'url': self.test_url, 'status': 'injectable', 'data': str(v['data']), 'ftime': datetime.datetime.now() } db.update("Task", upadte_set, task_id=self.id) else: if self.verbose: ok(v['data']) self.injectable = True else: upadte_set = { 'url': self.test_url, 'status': 'failed', 'ftime': datetime.datetime.now() } db.update("Task", upadte_set, task_id=self.id) db.close() # when scan stoped , to delete task; self.task_cmd("delete") elif t == "status": try: if self.verbose: info(v[u'status']) if v[u'status'] == 'terminated': self.terminated = True self.result() else: if not self.terminated: self.exe.timmer(5, self.status) except KeyError: pass elif t == 'start': if self.verbose: info(v['success']) elif t == 'set': if self.verbose: ok('\ninit options') elif t == 'kill': if self.verbose: fail(v) elif t == 'stop': if self.verbose: wrn(v) elif t == 'list': for k in v['options']: if self.verbose: ok(k, v['options'][k]) elif t == 'task': if self.verbose: info(v) elif t == 'log': for msg in v['log']: # if self.log: info(msg) def handle(self, tag, cmd, **kargs): return tag, to(urljoin(self.target, cmd), **kargs).json() def create_task(self): try: self.exe.done(self.handle, self.on_result, 'new', 'task/new') self.connect = True except Exception as e: err("check sqlmapapi server in ") self.connect = False def delete_task(self): self.cmd('task', 'delete') self.id = None # self.injectable = False # self.test_url = None def scan_cmd(self, cmd): # self.exe.done(self.handle, self.on_result, cmd, 'scan/{id}/{cmd}'.format(id=self.id, cmd=cmd)) self.cmd('scan', cmd) def task_cmd(self, cmd): self.cmd('task', cmd) def option(self, cmd): self.cmd('option', cmd) def cmd(self, tag, cmd): if self.connect: self.exe.done(self.handle, self.on_result, cmd, '{tag}/{id}/{cmd}'.format(tag=tag, id=self.id, cmd=cmd)) else: wrn("not connect") def result(self): self.scan_cmd("data") def scan(self, url, **options): ''' @url: target url @options: { 'tamper': True, 'smart': True, 'delay': 1 } ''' # if 'tamper' in options: # options['tamper'] = SqlmapApi.TAMPER self.test_url = url if options: data = json.dumps(options) self.exe.done( self.handle, self.on_result, 'set', 'option/{id}/set'.format(id=self.id), data=data, method='post', headers={'Content-Type': 'application/json'}) data = json.dumps({'url': url}) self.exe.done( self.handle, self.on_result, 'start', 'scan/{id}/start'.format(id=self.id), data=data, method='post', headers={'Content-Type': 'application/json'}) self.exe.timmer(5, self.status) self.exe.timmer(self.timeout, self.result) def status(self): self.scan_cmd('status') def log(self): self.scan_cmd('log') def stop(self, id=None): self.scan_cmd("stop")
import os import sys import json import qlib.viewer.templates as templates from copy import deepcopy with open(os.path.join(os.environ['HOME'],".q_config")) as fp: BASE_URL = fp.read().strip() sys.path.append(BASE_URL) from qlib.asyn import Exe from qlib.net import to _NodeClient__exe = Exe(6) _GeoClient__exe = _NodeClient__exe def callback(err, arg, result): if not err: pass else: print(err) class ViewerBase: """ type: 'node' for d3 force 'tree' for d3 tree """ def __init__(self, url, type): self.url = url self.template = getattr(templates, type)
def check(url): show(url, end='') global STA try: db_init = SqlEngine(database='%s/res_db/%s.db' % (ROOT, url)) db_init.create( 'cdn', url=str, sid=int, avgtime=str, srcip=str, srcname=str, isp=str, name=str, view=str, ) except Exception as e: pass headers = { 'origin': 'https://www.17ce.com', 'referer': 'https://www.17ce.com/', 'content-type': 'application/x-www-form-urlencoded', } req, res = to("www.17ce.com", cookie=True, agent=True) req.headers.update(headers) req.cookies.update({'allSites': url}) verify = sha1(b'C^dLMi%r&JH7bkmdFCgGl8' + url.encode('utf8') + b"1TnvST&D9LJ").hexdigest() data = urlencode({ 'rt': '1', 'nocache': '0', 'url': url, 'verify': verify, 'host': '', 'referer': '', 'cookie': '', 'agent': '', 'speed': '', 'postfield': '', 'pingcount': '', 'pingsize': '', }) for i in range(4): data += '&' + urlencode({'area[]': i}) for i in [0, 1, 2, 4, 6, 7, 8]: data += '&' + urlencode({'isp[]': i}) show(" init ", end='') res = req.post("https://www.17ce.com/site/ping", data=data).content res = json.loads(res.decode('utf8', 'ignore')) L(" ok") time.sleep(1) # show(res) show("... from server getting data .. wait") e = Exe(3) d = urlencode({'tid': res['tid'], 'num': 0, 'ajax_over': 0}) STA[url] = True e.done(reqq, recv, url, req, "https://www.17ce.com/site/ajaxfresh", d) b = bar() cc = 0 while STA[url]: cc += 1 time.sleep(1) show(next(b), end='\r') if STA[url] == 're-try': e.done(reqq, recv, url, req, "https://www.17ce.com/site/ajaxfresh", d) STA[url] = True