class ExpDBHandler: """ can ues functions: get(*args, **kargs) to_db(*values) can use args: self.table self.db self.columns """ def __init__(self, table_name): self.table = table_name self.db = SqlEngine(database=DB_PATH) if not (table_name,) in self.db.table_list(): ex_values = {} while 1: name = dinput("[q or None exit] name>", None) if name in ['q', None]: break value = dinput("\r[q or None exit] value>", None) if value in ['q', None]: break if value is "int": ex_values[name] = int elif value is 'str': ex_values[name] = str elif value is "time": ex_values[name] = time elif value in digits: ex_values[name] = int(value) else: ex_values[name] = value self.db.create(table_name, payload='some payload',**ex_values) self.columns = tuple([i[0] for i in self.db.check_table(self.table)][2:]) def get(self, *columns, **kargs): for item in self.db.select(self.table, *columns, **kargs): yield item def to_db(self, *value): try: self.db.insert(self.table, self.columns, *value) except Exception as e: return False else: return True
from qlib.data.sql import SqlEngine from qlib.log import LogControl as L # env.roledefs['targets'] = ['localhost'] DOC = """ Usage: Ex [host fuzz name] ,, cmd Example: >> Ex example.host.com ,, ls . >> Ex ls . """ output.running = False sql = SqlEngine(BASE_URL) if not sql.table_list(): sql.create("host", login="******", port="port", passwd="passwd") def get_records(fuzz=None): if fuzz: for r in sql.search("host", "login", "port", "passwd", login=fuzz): yield r else: for r in sql.select("host", "login", "port", "passwd"): yield r def par(l): if l: for i in l.split(","):
class Searcher: """ @proxy is setting request's proxy sample: { 'http': 'socks5://127.0.0.1:1080', 'https': 'socks5://127.0.0.1:1080' } """ config = { 'search_url': '', 'search_args': {}, } def __init__(self, ssl=True, asyn=True, debug=False, db=False, database=None, proxy=False): self.url_pre = 'https://www.' if ssl else 'https//www.' self.search_name = self.__class__.__name__.lower() self.host = self.url_pre + self.search_name + '.com' self.agent = random_choice(AGS) self.asyn = None self.db = None self.debug = debug self.proxy = None self.grep_html_content = dict() if asyn: self.asyn = Exe(20) if proxy: LogControl.info("loading proxy..", end='') self.proxy = proxy #Mongo('local').find("setting")[0].get("proxy") if self.proxy: LogControl.ok("") if db: self.use_db = db db_path = os.path.join( os.getenv("HOME"), '.Search-engine-sqlite3-db.db') if not database else database self.db_path = db_path self.db = SqlEngine(database=db_path) if not self.db.table_list(): self.db.create(self.search_name, query=str, content=str, type='web') def proxy_to(self, url, **kargs): if self.proxy: return to(url, proxy=self.proxy, **kargs) else: return to(url, **kargs) def xpath(self, html, *tags, exclude=None): xhtml = HTML(html) exclude = '[not(name()={})]'.format(exclude) if exclude else '' LogControl.info("//" + "//".join(tags) + exclude) if self.debug else '' for item in xhtml.xpath("//" + "//".join(tags) + exclude): yield item def ready(self, pre_url, *args, **options): pass def set_args(self, **kargs): return parameters(**kargs) def grep(self, url, grep_str, num): """ async to grep url's content by grep_str, then will return (num, [*url]) -> callback """ def _request(): if grep_str.lower().find(url.split(".").pop().lower()) != -1: return num, [] res = self.proxy_to(url) if res.status_code / 100 == 2: encoding = 'utf8' #res.encoding if res.encoding else 'utf8' LogControl.err(url, res.encoding) if res.content: try: h_content = res.content.decode(encoding) return num, [ i.attrib.get('href') for i in HTML(h_content).xpath('//a') if i.attrib.get('href', '').find(grep_str) != -1 ] except UnicodeError as e: LogControl.err(e, '\n', res.encoding, encoding, url) return num, [] except KeyError as e: LogControl.err(e, url) return num, [] else: return num, [] if self.asyn: self.asyn.done(_request, self.save_grep) def save_grep(self, num, res): self.grep_html_content[num] = res
from importlib import util from qlib.data.sql import SqlEngine from qlib.data import GotRedis # some shortcut . let it easy. J = os.path.join ENV = os.getenv redis = GotRedis() DB_PATH = J(ENV('HOME'), '.hacker_cmds') DB_Handler = SqlEngine(database=DB_PATH) if 'templates' not in [i[0] for i in DB_Handler.table_list()]: DB_Handler.create('templates', cmd=str, keys='target', group_key='comba', output='default.log') # ETC_FILE = J('/usr/local/etc/', 'hack.') OUTPUT_DIR = '/tmp/HackerOutput/' ROOT_DIR = list(util.find_spec("Hacker").submodule_search_locations).pop() RES = J(ROOT_DIR, 'res') TEMPLATE_PATH = J(J(ROOT_DIR, 'ini'), 'TEMPLATES.py') MODULE_PATH = J( list(util.find_spec("Hacker").submodule_search_locations).pop(), 'modules') try:
class Social(cmd.Cmd): intro = colored("Social DB", "red") prompt = colored("<Social>", "red") def __init__(self): super().__init__() self.db = SqlEngine(database=SOCIAL_DB_PATH) for k in SOCIAL: if (k,) not in self.db.table_list(): self.db.create(k, **SOCIAL[k]) self.social = SOCIAL def do_add(self, name): m = SOCIAL[name] res = dict_cmd(m) res = file_check(res) self.db.insert(name, list(res.keys()), *res.values()) def do_change(self, name): self.do_search(name) tm1 = { 'title': str, 'new':str, 'id': str, } tm1 = dict_cmd(tm1) self.db.update(name, {tm1['title']: tm1['new']}, id=int(tm1['id'])) def do_setting(self, args): for k in SOCIAL: LogControl.i(k, txt_color='red') m = dict_cmd({"which table":None}) for k in SOCIAL[m["which table"]]: LogControl.i(k, txt_color='red', tag=m["which table"]) res = dict_cmd({ 'Title': None, 'Value': None, }) new_cols = {res['Title']: res['Value']} self.db.alter(m["which table"], **new_cols) def do_clear(self, arg): os.system("tput cl") def do_vi_setting(self,arg): os.system("vim " + TEMPLATE_PATH) def do_search(self, name): data_tmp = SOCIAL[name] tmp = { "set search key": str, } tmp = dict_cmd(tmp) keys = dict.fromkeys(tmp['set search key'].split(',') if tmp['set search key'].find(",") != -1 else tmp['set search key'].split() ) keys = dict_cmd(keys) cols = input_default("which interesting?\n %s\n>" % ' '.join([colored(i, attrs=['underline']) for i in data_tmp.keys() ]) ) if 'all' in cols: cols = ' '.join(list(data_tmp.keys())) cols = cols.split(",") if cols.find(",") != -1 else cols.split() res = self.db.search(name, *cols, **keys) for i in res: v = dict(zip(cols, i)) for k in v: print("\t", end='') v[k] = v[k][:90] + '...' if len(v[k]) > 90 else v[k] LogControl.i(v[k], tag=k, txt_color='yellow') def do_list(self, name): data_tmp = SOCIAL[name] cols = list(data_tmp.keys()) res = self.db.search(name, *cols) for i in res: v = dict(zip(cols, i)) for k in v: print("\t", end='') v[k] = v[k][:90] + '...' if len(v[k]) > 90 else v[k] LogControl.i(v[k], tag=k, txt_color='yellow') print( "---------------------") def export(self, name): tmp = SOCIAL[name] keys = list(tmp.keys()) data = self.db.select(name, *keys) for item in data: for i,v in enumerate(item): if v.startswith('[*b64*]'): v = b642file(v[7:]) r = v.find("]") + 1 v_name = v[1: r-1] v = v[r:] str_tmp = "%s: %s" % (keys[i] , v) yield str_tmp yield "--------------------------------------------------------" def do_export(self, name): data = self.export(name) output = dict_cmd({'output[path to save]': "./pdf.pdf"}) fpath = output['output[path to save]'] LogControl.title('to',output_to_pdf(data, fpath)) def do_quit(self, some): return True