def __init__(self): super().__init__() self.db = SqlEngine(database=SOCIAL_DB_PATH) for k in SOCIAL: if (k,) not in self.db.table_list(): self.db.create(k, **SOCIAL[k]) self.social = SOCIAL
def __init__(self, table_name): self.table = table_name self.db = SqlEngine(database=DB_PATH) if not (table_name,) in self.db.table_list(): ex_values = {} while 1: name = dinput("[q or None exit] name>", None) if name in ['q', None]: break value = dinput("\r[q or None exit] value>", None) if value in ['q', None]: break if value is "int": ex_values[name] = int elif value is 'str': ex_values[name] = str elif value is "time": ex_values[name] = time elif value in digits: ex_values[name] = int(value) else: ex_values[name] = value self.db.create(table_name, payload='some payload',**ex_values) self.columns = tuple([i[0] for i in self.db.check_table(self.table)][2:])
def recv(url, resp): global STA content = resp.content sql_fi = '%s/res_db/%s.db' % (ROOT, url) db_init = SqlEngine(database=sql_fi) c = content.decode('utf8', 'ignore') o = demjson.decode(c) # path = ROOT+"/cdn_cache/" + url # with open(path,'w') as fp: # json.dump(o, fp) da = o['freshdata'] if not da: STA[url] = 're-try' show("try - again") return for k in da: if not db_init.first('cdn', sid=k): db_init.insert( 'cdn', ['url', 'avgtime', 'srcip', 'srcname', 'isp', 'name', 'view'], url, da[k]['Avg'], da[k]['SrcIP']['srcip'], da[k]['SrcIP']['ipfrom'], da[k]['isp'], da[k]['name'], da[k]['view'], ) STA[url] = False
def execute(cmd, help=False, console=False, **args): LogControl.ok('\n') t_dir = dir_gen() try: os.makedirs(t_dir) except Exception as e: pass DB_Handler = SqlEngine(database=DB_PATH) cmds = [] options = set() for i in DB_Handler.select("templates", 'cmd', 'keys', 'output', group_key=cmd): cmds.append([i[0], i[2]]) [ options.add(m) for m in i[1].split()] if help: LogControl.i("need to set options: ", options) for cmd, out in cmds: LogControl.i(cmd, ' -> ', out, txt_color='yellow') return True else: for cmd, out in cmds: try: rrun(cmd.format(**args), t_dir, out) except Exception as e: LogControl.err(e,cmd) LogControl.i("need to set options: ", options) continue if console: try: os.system('sleep 2 && tail -f %s/*' % t_dir) except KeyboardInterrupt as e: LogControl.info("~bye")
def search_comba_cmds(tag=False): """ list multi-templates. """ DB_Handler = SqlEngine(database=DB_PATH) for key in DB_Handler.select('templates', 'group_key'): if not tag: yield key[0], list(DB_Handler.select('templates', group_key=key[0])) else: if key[0].find(tag) != -1: yield key[0], list(DB_Handler.select('templates', group_key=key[0]))
def __init__(self, ssl=True, asyn=True, debug=False, db=False, database=None, proxy=False): self.url_pre = 'https://www.' if ssl else 'https//www.' self.search_name = self.__class__.__name__.lower() self.host = self.url_pre + self.search_name + '.com' self.agent = random_choice(AGS) self.asyn = None self.db = None self.debug = debug self.proxy = None self.grep_html_content = dict() if asyn: self.asyn = Exe(20) if proxy: LogControl.info("loading proxy..", end='') self.proxy = proxy #Mongo('local').find("setting")[0].get("proxy") if self.proxy: LogControl.ok("") if db: self.use_db = db db_path = os.path.join( os.getenv("HOME"), '.Search-engine-sqlite3-db.db') if not database else database self.db_path = db_path self.db = SqlEngine(database=db_path) if not self.db.table_list(): self.db.create(self.search_name, query=str, content=str, type='web')
def list_ip(url): if not url + '.db' in os.listdir(SQL_F): return '' ips = {} ip_g = {} s = SqlEngine(database=SQL_F + "/" + url + ".db") for r in s.select("cdn"): l = list(r[4:]) l[0] = r[5] l[1] = r[4] if l[0] not in ips: ips[l[0]] = 1 else: ips[l[0]] += 1 if l[0] not in ip_g: ip_g[l[0]] = {l[2]} else: ip_g[l[0]].add(l[2]) if len(ip_g) > 4: show(url + ' ' + ' | '.join(list(ip_g)[:4]) + " ...") else: show(url + ' ' + ' | '.join(list(ip_g)), tag=url)
class ExpDBHandler: """ can ues functions: get(*args, **kargs) to_db(*values) can use args: self.table self.db self.columns """ def __init__(self, table_name): self.table = table_name self.db = SqlEngine(database=DB_PATH) if not (table_name,) in self.db.table_list(): ex_values = {} while 1: name = dinput("[q or None exit] name>", None) if name in ['q', None]: break value = dinput("\r[q or None exit] value>", None) if value in ['q', None]: break if value is "int": ex_values[name] = int elif value is 'str': ex_values[name] = str elif value is "time": ex_values[name] = time elif value in digits: ex_values[name] = int(value) else: ex_values[name] = value self.db.create(table_name, payload='some payload',**ex_values) self.columns = tuple([i[0] for i in self.db.check_table(self.table)][2:]) def get(self, *columns, **kargs): for item in self.db.select(self.table, *columns, **kargs): yield item def to_db(self, *value): try: self.db.insert(self.table, self.columns, *value) except Exception as e: return False else: return True
def shows(url, opt): show(url) if url + '.db' in os.listdir(SQL_F): ips = {} ip_g = {} s = SqlEngine(database=SQL_F + "/" + url + ".db") for r in s.select("cdn"): l = list(r[4:]) l[0] = r[5] l[1] = r[4] if opt == 'd': L(*l, color='blue') if l[0] not in ips: ips[l[0]] = 1 else: ips[l[0]] += 1 if l[0] not in ip_g: ip_g[l[0]] = {l[2]} else: ip_g[l[0]].add(l[2]) res = {i[1]: i[0] for i in ips.items()} L(res) for v in res.values(): L(v, ip_g[v], color='green')
def __init__(self, id=None, timeout=20, connection="127.0.0.1:8775", load=False, verbose=False, thread_num=4, init=False, **connect_options): self.exe = Exe(thread_num) self.target = 'http://{connection}'.format(connection=connection) self.test_url = None self.injectable = False self.id = id self.connect = False self.start_time = None self.terminated = False self.connect_options = connect_options self.verbose = verbose self.timeout = timeout if init: db = SqlEngine(**self.connect_options) db.create('Task', **self.__class__.TASK_TABLE) if self.id is None: if load: db = SqlEngine(**self.connect_options) self.id = db.last('Task', 'task_id') else: self.create_task()
BASE_URL = os.path.join(os.getenv("HOME"), ".ssh_hosts") from qlib.data.sql import SqlEngine from qlib.log import LogControl as L # env.roledefs['targets'] = ['localhost'] DOC = """ Usage: Ex [host fuzz name] ,, cmd Example: >> Ex example.host.com ,, ls . >> Ex ls . """ output.running = False sql = SqlEngine(BASE_URL) if not sql.table_list(): sql.create("host", login="******", port="port", passwd="passwd") def get_records(fuzz=None): if fuzz: for r in sql.search("host", "login", "port", "passwd", login=fuzz): yield r else: for r in sql.select("host", "login", "port", "passwd"): yield r def par(l): if l:
import os, sys from importlib import util from qlib.data.sql import SqlEngine from qlib.data import GotRedis # some shortcut . let it easy. J = os.path.join ENV = os.getenv redis = GotRedis() DB_PATH = J(ENV('HOME'), '.hacker_cmds') DB_Handler = SqlEngine(database=DB_PATH) if 'templates' not in [i[0] for i in DB_Handler.table_list()]: DB_Handler.create('templates', cmd=str, keys='target', group_key='comba', output='default.log') # ETC_FILE = J('/usr/local/etc/', 'hack.') OUTPUT_DIR = '/tmp/HackerOutput/' ROOT_DIR = list(util.find_spec("Hacker").submodule_search_locations).pop() RES = J(ROOT_DIR, 'res') TEMPLATE_PATH = J(J(ROOT_DIR, 'ini'), 'TEMPLATES.py') MODULE_PATH = J( list(util.find_spec("Hacker").submodule_search_locations).pop(), 'modules')
class Searcher: """ @proxy is setting request's proxy sample: { 'http': 'socks5://127.0.0.1:1080', 'https': 'socks5://127.0.0.1:1080' } """ config = { 'search_url': '', 'search_args': {}, } def __init__(self, ssl=True, asyn=True, debug=False, db=False, database=None, proxy=False): self.url_pre = 'https://www.' if ssl else 'https//www.' self.search_name = self.__class__.__name__.lower() self.host = self.url_pre + self.search_name + '.com' self.agent = random_choice(AGS) self.asyn = None self.db = None self.debug = debug self.proxy = None self.grep_html_content = dict() if asyn: self.asyn = Exe(20) if proxy: LogControl.info("loading proxy..", end='') self.proxy = proxy #Mongo('local').find("setting")[0].get("proxy") if self.proxy: LogControl.ok("") if db: self.use_db = db db_path = os.path.join( os.getenv("HOME"), '.Search-engine-sqlite3-db.db') if not database else database self.db_path = db_path self.db = SqlEngine(database=db_path) if not self.db.table_list(): self.db.create(self.search_name, query=str, content=str, type='web') def proxy_to(self, url, **kargs): if self.proxy: return to(url, proxy=self.proxy, **kargs) else: return to(url, **kargs) def xpath(self, html, *tags, exclude=None): xhtml = HTML(html) exclude = '[not(name()={})]'.format(exclude) if exclude else '' LogControl.info("//" + "//".join(tags) + exclude) if self.debug else '' for item in xhtml.xpath("//" + "//".join(tags) + exclude): yield item def ready(self, pre_url, *args, **options): pass def set_args(self, **kargs): return parameters(**kargs) def grep(self, url, grep_str, num): """ async to grep url's content by grep_str, then will return (num, [*url]) -> callback """ def _request(): if grep_str.lower().find(url.split(".").pop().lower()) != -1: return num, [] res = self.proxy_to(url) if res.status_code / 100 == 2: encoding = 'utf8' #res.encoding if res.encoding else 'utf8' LogControl.err(url, res.encoding) if res.content: try: h_content = res.content.decode(encoding) return num, [ i.attrib.get('href') for i in HTML(h_content).xpath('//a') if i.attrib.get('href', '').find(grep_str) != -1 ] except UnicodeError as e: LogControl.err(e, '\n', res.encoding, encoding, url) return num, [] except KeyError as e: LogControl.err(e, url) return num, [] else: return num, [] if self.asyn: self.asyn.done(_request, self.save_grep) def save_grep(self, num, res): self.grep_html_content[num] = res
def on_result(self, t, v): if t == 'new': self.id = v['taskid'] db = SqlEngine(**self.connect_options) if db is not None: db.insert("Task", ['task_id', 'url', 'status'], self.id, 'None', 'not running') db.close() elif t == 'data': if self.verbose: info(v) db = SqlEngine(**self.connect_options) if v['data']: if db: upadte_set = { 'url': self.test_url, 'status': 'injectable', 'data': str(v['data']), 'ftime': datetime.datetime.now() } db.update("Task", upadte_set, task_id=self.id) else: if self.verbose: ok(v['data']) self.injectable = True else: upadte_set = { 'url': self.test_url, 'status': 'failed', 'ftime': datetime.datetime.now() } db.update("Task", upadte_set, task_id=self.id) db.close() # when scan stoped , to delete task; self.task_cmd("delete") elif t == "status": try: if self.verbose: info(v[u'status']) if v[u'status'] == 'terminated': self.terminated = True self.result() else: if not self.terminated: self.exe.timmer(5, self.status) except KeyError: pass elif t == 'start': if self.verbose: info(v['success']) elif t == 'set': if self.verbose: ok('\ninit options') elif t == 'kill': if self.verbose: fail(v) elif t == 'stop': if self.verbose: wrn(v) elif t == 'list': for k in v['options']: if self.verbose: ok(k, v['options'][k]) elif t == 'task': if self.verbose: info(v) elif t == 'log': for msg in v['log']: # if self.log: info(msg)
class Social(cmd.Cmd): intro = colored("Social DB", "red") prompt = colored("<Social>", "red") def __init__(self): super().__init__() self.db = SqlEngine(database=SOCIAL_DB_PATH) for k in SOCIAL: if (k,) not in self.db.table_list(): self.db.create(k, **SOCIAL[k]) self.social = SOCIAL def do_add(self, name): m = SOCIAL[name] res = dict_cmd(m) res = file_check(res) self.db.insert(name, list(res.keys()), *res.values()) def do_change(self, name): self.do_search(name) tm1 = { 'title': str, 'new':str, 'id': str, } tm1 = dict_cmd(tm1) self.db.update(name, {tm1['title']: tm1['new']}, id=int(tm1['id'])) def do_setting(self, args): for k in SOCIAL: LogControl.i(k, txt_color='red') m = dict_cmd({"which table":None}) for k in SOCIAL[m["which table"]]: LogControl.i(k, txt_color='red', tag=m["which table"]) res = dict_cmd({ 'Title': None, 'Value': None, }) new_cols = {res['Title']: res['Value']} self.db.alter(m["which table"], **new_cols) def do_clear(self, arg): os.system("tput cl") def do_vi_setting(self,arg): os.system("vim " + TEMPLATE_PATH) def do_search(self, name): data_tmp = SOCIAL[name] tmp = { "set search key": str, } tmp = dict_cmd(tmp) keys = dict.fromkeys(tmp['set search key'].split(',') if tmp['set search key'].find(",") != -1 else tmp['set search key'].split() ) keys = dict_cmd(keys) cols = input_default("which interesting?\n %s\n>" % ' '.join([colored(i, attrs=['underline']) for i in data_tmp.keys() ]) ) if 'all' in cols: cols = ' '.join(list(data_tmp.keys())) cols = cols.split(",") if cols.find(",") != -1 else cols.split() res = self.db.search(name, *cols, **keys) for i in res: v = dict(zip(cols, i)) for k in v: print("\t", end='') v[k] = v[k][:90] + '...' if len(v[k]) > 90 else v[k] LogControl.i(v[k], tag=k, txt_color='yellow') def do_list(self, name): data_tmp = SOCIAL[name] cols = list(data_tmp.keys()) res = self.db.search(name, *cols) for i in res: v = dict(zip(cols, i)) for k in v: print("\t", end='') v[k] = v[k][:90] + '...' if len(v[k]) > 90 else v[k] LogControl.i(v[k], tag=k, txt_color='yellow') print( "---------------------") def export(self, name): tmp = SOCIAL[name] keys = list(tmp.keys()) data = self.db.select(name, *keys) for item in data: for i,v in enumerate(item): if v.startswith('[*b64*]'): v = b642file(v[7:]) r = v.find("]") + 1 v_name = v[1: r-1] v = v[r:] str_tmp = "%s: %s" % (keys[i] , v) yield str_tmp yield "--------------------------------------------------------" def do_export(self, name): data = self.export(name) output = dict_cmd({'output[path to save]': "./pdf.pdf"}) fpath = output['output[path to save]'] LogControl.title('to',output_to_pdf(data, fpath)) def do_quit(self, some): return True
def check(url): show(url, end='') global STA try: db_init = SqlEngine(database='%s/res_db/%s.db' % (ROOT, url)) db_init.create( 'cdn', url=str, sid=int, avgtime=str, srcip=str, srcname=str, isp=str, name=str, view=str, ) except Exception as e: pass headers = { 'origin': 'https://www.17ce.com', 'referer': 'https://www.17ce.com/', 'content-type': 'application/x-www-form-urlencoded', } req, res = to("www.17ce.com", cookie=True, agent=True) req.headers.update(headers) req.cookies.update({'allSites': url}) verify = sha1(b'C^dLMi%r&JH7bkmdFCgGl8' + url.encode('utf8') + b"1TnvST&D9LJ").hexdigest() data = urlencode({ 'rt': '1', 'nocache': '0', 'url': url, 'verify': verify, 'host': '', 'referer': '', 'cookie': '', 'agent': '', 'speed': '', 'postfield': '', 'pingcount': '', 'pingsize': '', }) for i in range(4): data += '&' + urlencode({'area[]': i}) for i in [0, 1, 2, 4, 6, 7, 8]: data += '&' + urlencode({'isp[]': i}) show(" init ", end='') res = req.post("https://www.17ce.com/site/ping", data=data).content res = json.loads(res.decode('utf8', 'ignore')) L(" ok") time.sleep(1) # show(res) show("... from server getting data .. wait") e = Exe(3) d = urlencode({'tid': res['tid'], 'num': 0, 'ajax_over': 0}) STA[url] = True e.done(reqq, recv, url, req, "https://www.17ce.com/site/ajaxfresh", d) b = bar() cc = 0 while STA[url]: cc += 1 time.sleep(1) show(next(b), end='\r') if STA[url] == 're-try': e.done(reqq, recv, url, req, "https://www.17ce.com/site/ajaxfresh", d) STA[url] = True