def __init__(self, proxy=False): self.main_url = "https://fofa.so" self.flow = FLowNet("https://fofa.so", proxy=proxy) self.waiter = WebDriverWait(self.flow.phantom, 12) self.LOGIN_INFO = LOGIN_INFO self.LOGIN_INFO_SESSION = LOGIN_INFO_SESSION self.DB_Handle = Cache(DB_FOFA)
def new(path): c = Cache(path) try: f = max(c.query(Message), key=lambda x: x.id) return f except ValueError: return None
def run(self): db = Cache(self.db) print(f"connect to db: {self.db}") while 1: msgs = list(Message.update_msg(self.token)) new_msg = None for msg in msgs: if db.query_one(Message, msg_id=msg.msg_id): continue msg.save(db) # print(f"to db : {msg.msg_id} : {msg.time}", end='\r') new_msg = msg if new_msg: print(f"got new: {new_msg.msg_id} => {new_msg.msg_text}") com, args = self.get_command(new_msg.msg_text) f = self._map.get(com) if f: print(f"callback {com} : {args}") try: f(*args) except Exception as e: logging.info(str(e)) print(f"err {str(e)}") time.sleep(self.interval)
def _query(key): db = Cache(os.path.expanduser(MSG_DB)) list(db.fuzzy_search(Msg, key, BackDb.save)) res = BackDb.extract_all() if res: return json.dumps(res) else: return json.dumps({"fail": "no found"})
def new(self, num=8): self._workers = [h for h in self._dispatch_servers(num)] self._name = str(int(time.time())) self._session_db = Cache( os.path.join(conf['session-db-dir'], self._name + ".db")) index = Book(name=self._name, workers='|'.join(self._workers)) # pdb.set_trace() c = Cache(conf['session-db']) index.save(c) return self._name
def _load(self, name): c = Cache(conf['session-db']) index = c.query_one(Book, name=name) if index: self._workers = index.get_servers() self._name = name self._session_db = Cache( os.path.join(conf['session-db-dir'], name + ".db")) self._conf = Config(file=J(conf['session-db-dir'], name + ".conf")) return self._name return 'Not found'
def _dispatch_servers(self, num): _host_db = Cache(HOST_DB) hs = [i.host for i in _host_db.query(Host)] c = 0 while 1: w = random.randint(0, len(hs)) yield hs.pop(w) c += 1 if c >= num: break if len(hs) == 0: break
def update_auth(db, token): c = Cache(db) t = c.query_one(Token, phone='0') if not t: t = Token(tp='tel', token='0', phone='0', hash_code=token, set_timeout=24 * 60) t.hash_code = token print(t.hash_code) res = t.save(c) logging.info(f"db handle : {res}")
def use_db(): try: ca = Cache(DB_FOFA) yield ca finally: del ca
def auth_and_settoken(token): if len(token) == 36: try: res = requests.get("https://api.vultr.com/v1/account/info", headers={'API-Key': token}).json() c = Cache(DB_PATH) try: c.drop(Token) except Exception: pass tt = Token(token=token) tt.save(c) print(colored("[+]", 'green'), 'token -> ', token) except Exception as e: print(colored("[+]", 'red'), e) else: print(colored("[+]", 'red'), "len is error: ", token)
def onQQMessage(bot, contact, member, content): # print(f"{contact} | {member.name} : {content}") m = MsgMan() print(colored(contact.name + "|" + member.name, 'green'), end='\r') msg_d = contact.__dict__ msg_d['sendUser'] = member.name msg_d['sendQq'] = member.uin msg_d['sendNick'] = member.nick msg_d['sendType'] = contact.ctype msg_d['sendContent'] = content msg = Msg(**msg_d) m.syncs_msg(msg) c = Cache(MSG_DB) ding_tk = list(c.query(Token, tp='ding'))[-1] if_send = False for k in c.query(Key): if k.text in content: if_send = True break if if_send: send_notification(msg, ding_tk.text)
def post(self): # you should get some argument from follow data = self.get_argument("data") if_zip = self.get_argument("if_zip") if if_zip == 'true': if_zip = True mac = self.get_argument("mac") msgs = unpackage({ 'data': data, 'if_zip': if_zip, 'mac': mac, }) db = Cache(os.path.expanduser(MSG_DB)) db.save_all(*msgs) # ..... # for parse json post # post_args = json.loads(self.request.body.decode("utf8", "ignore"))['msg'] # redirect or reply some content # self.redirect() self.write(json.dumps({'ok': len(msgs)})) self.finish()
def post(self): # you should get some argument from follow tloop = tornado.ioloop.IOLoop.instance() json_data = self.get_argument("setting") data = json.loads(json_data) if 'key' in data: key_v = data['key'] db = Cache(os.path.expanduser(MSG_DB)) k = Key(text=key_v) k.save(db) self.write(json.dumps({'res': 'add key: %s' % key_v})) elif 'token' in data: key_v = data['token'] db = Cache(os.path.expanduser(MSG_DB)) t = Token(text=key_v, tp='ding') t.save(db) self.write(json.dumps({'res': 'add ding token:%s' % key_v})) else: self.write("noting happend") self.finish()
import os from os import path # here to load all controllers # from Qtornado.log import LogControl from .controller import * import logging as L from qlib.data import Cache, dbobj # load ui modules import Reserver.ui as ui import sys import os # db engine # db_engine = pymongo.Connection()['local'] db_connect_cmd = os.path.expanduser('~/db.sql') print("-- connecto db:", db_connect_cmd) db_engine = Cache(db_connect_cmd) # static path rdir_path = os.path.dirname(__file__) static_path = os.path.join(rdir_path, "static") files_path = os.path.join(static_path, "files") # set log level # LogControl.LOG_LEVEL |= LogControl.OK # LogControl.LOG_LEVEL |= LogControl.INFO Settings = { 'db': db_engine, 'L': L, 'debug': True, "ui_modules": ui,
def StartWeRobot(): parser = ArgumentParser() parser.add_argument("-s", "--start", action="store_true", default=False, help="start server") parser.add_argument("-a", "--add-token", default=None, help="setting token") parser.add_argument("-I", "--initialization", action="store_true", default=False, help="initialization config") args = parser.parse_args() c = Cache(MSG_DB) if args.start: t = list(c.query(Token, tp='ding'))[-1] l = Loginer(t.text) itchat.auto_login(qrCallback=l, hotReload=True) itchat.run() elif args.add_token: t = Token(text=args.add_token, tp='ding') t.save(c) elif args.initialization: text = """[program:x-wechat] command=/usr/local/bin/Qwechat -s stdout_logfile= /var/log/x-wechat.log stderr_logfile= /var/log/x-wechat.err.log """ text3 = """[program:x-replayer] command=/usr/local/bin/Qserver -p 14144 stdout_logfile= /var/log/x-relayer.log stderr_logfile= /var/log/x-relayer.err.log """ text2 = """[unix_http_server] file=/tmp/supervisor.sock ; path to your socket file [supervisord] logfile=/var/log/supervisord/supervisord.log ; supervisord log file logfile_maxbytes=50MB ; maximum size of logfile before rotation logfile_backups=10 ; number of backed up logfiles loglevel=error ; info, debug, warn, trace pidfile=/var/run/supervisord.pid ; pidfile location nodaemon=false ; run supervisord as a daemon minfds=1024 ; number of startup file descriptors minprocs=200 ; number of process descriptors user=root ; default user childlogdir=/var/log/supervisord/ ; where child log files will live [rpcinterface:supervisor] supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface [supervisorctl] [inet_http_server] port = 127.0.0.1:9001 [include] files = /root/.config/services/*.conf """ if not os.path.exists(os.path.expanduser("~/.config/services")): os.mkdir(os.path.expanduser("~/.config/services")) with open(os.path.expanduser("~/.config/services/x-wechat.conf"), 'w') as fp: fp.write(text) with open(os.path.expanduser("~/.config/services/x-relayer.conf"), 'w') as fp: fp.write(text3) with open(os.path.expanduser("~/.config/supervisor.conf"), 'w') as fp: fp.write(text2) print("-- init --- install ok") os.popen( "pip3 install -U git+https://github.com/Supervisor/supervisor.git 1>/dev/null 2>/dev/null;" ).read() os.popen("mkdir /var/log/supervisord/ ").read() os.popen("supervisord -c ~/.config/supervisor.conf").read() print("-- init --- start supervisor ok") os.popen("supervisorctl reread;supervisorctl update;").read() print("-- init --- start load procecers ok")
def destroy(self): c = Cache(conf['session-db']) index = c.query_one(Book, name=self._name) c.delete(index)
class Fofa: def __init__(self, proxy=False): self.main_url = "https://fofa.so" self.flow = FLowNet("https://fofa.so", proxy=proxy) self.waiter = WebDriverWait(self.flow.phantom, 12) self.LOGIN_INFO = LOGIN_INFO self.LOGIN_INFO_SESSION = LOGIN_INFO_SESSION self.DB_Handle = Cache(DB_FOFA) def selector(self, x, time=0): try: r = self.waiter.until( EC.presence_of_element_located((By.CSS_SELECTOR, x))) if r: return r except TimeoutException as e: if isinstance(time, int) and time > 3: rprint("not found : %s , you can see error page in " % x + colored("/tmp/TimeoutException.png ", 'red')) self.flow.screenshot("TimeoutException") with open("/tmp/TimeoutException.html", "w") as fp: fp.write(self.flow.html()) # raise TimeoutException("time out not found : %s" %x) sys.exit(0) return self.selector(x, time + 1) def load_session_login(self): if os.path.exists(self.LOGIN_INFO_SESSION): self.load_session() else: self.login() def login(self, u=None): user = None passwd = None if not u: if os.path.exists(self.LOGIN_INFO): with open(LOGIN_INFO) as fp: u = json.load(fp) user = u['username'] passwd = u['password'] else: user = input("email>") passwd = getpass.getpass("passwd>") else: user = u['username'] passwd = u['password'] gprint("try login") #selector = lambda x: waiter.until(EC.presence_of_element_located((By.CSS_SELECTOR, x))) to_login = self.selector("a#but_zc") self.flow.screenshot('login') to_login.click() input_user = self.selector('#username') input_pass = self.selector('#password') input_user.send_keys(user) input_pass.send_keys(passwd + "\n") gprint(" --- Login ---") # with open(LOGIN_INFO_SESSION, 'w') as fp: u = {'username': user, 'password': passwd} with open(self.LOGIN_INFO, 'w') as fp: json.dump(u, fp) self.save_session() return user, passwd def save_session(self): gprint(" save session cookies") self.flow.go(self.main_url) with open(self.LOGIN_INFO_SESSION, 'wb') as fp: pickle.dump(self.flow.phantom.get_cookies(), fp) def load_session(self): gprint(" load cookies from") cookies = pickle.load(open(self.LOGIN_INFO_SESSION, "rb")) self.flow.go(self.main_url) self.flow.screenshot("beforelogin") for cookie in cookies: gprint(str(cookie)) try: self.flow.phantom.add_cookie(cookie) except Exception as e: rprint("load cookie failed. try --login") def search(self, key, page=0): search_input = self.selector("input#q") search_input.send_keys("{}\n".format(key)) self.flow.screenshot("mod") check_if_found = self.selector(".list_jg") check_if_found = check_if_found.get_attribute('outerHTML') num = 0 try: num = re.findall(r'Total\ results:\s(\d+)', check_if_found)[0] gprint("found result in :%s = %s" % (key, num)) num = int(num) if num == 0: return except IndexError: self.flow.screenshot("error.png") rprint("page load error !! see /tmp/error.png ") return self.selector(".list_mod") res = self.flow.html() iis = [] iis += self.parse(res) gprint("Done %d" % len(iis)) if len(iis) >= num: return iis gprint(" page 1") if page and isinstance(page, int): for i in range(page): next_page = self.selector("a.next_page") next_page.click() self.selector(".list_mod") res = self.flow.html() gprint(" page " + str(i + 2)) iis += self.parse(res) return iis def parse(self, res): mods = BS(res, 'html.parser').select('.list_mod') infos = [] c = 0 for m in mods: c += 1 ports = '/'.join([ i.text for i in m.select('.list_mod_t > .span > span') ]).replace("\n", "").replace(" ", "") pa = m.select(".list_sx1 > li") rip = m.select("li > i.fa-map-marker") rtime = m.select("li > i.fa-clock-o") rplane = m.select("li > i.fa-plane") rhost = m.select(".list_mod_t > a") ros = m.select("li > span.list_xs2") ip = "" ti = "" time = "" geo = "" os = "" host = "" if rip: ip = rip[0].parent.text.replace("\n", "").replace(" ", "") if rtime[0].parent.text != pa[0].text: ti = pa[0].text.replace('\n', '').replace(' ', '') if rtime: time = rtime[0].parent.text.replace('\n', '').replace(' ', '') if rplane: geo = rplane[0].parent.text.replace('\n', '').replace(' ', '') if rhost: host = rhost[0].attrs['href'] if ros: os = ros[0].parent.text.replace("\n", "").replace(" ", "") body = m.select('.auto-wrap')[0] if len(list(body)) > 1: body = ''.join([i.__str__() for i in list(body)]) else: body = body.text lprint(title=ti, host=host, ip=ip, ports=ports, os=os, time=time, geo=geo, body=body) info = Info(title=ti, host=host, ip=ip, ports=ports, os=os, ctime=time, geo=geo, body=body) infos.append(info) iis = [] for i in infos: # gprint(i.ip) if not self.DB_Handle.query_one( Info, m='and', ip=ip, ports=ports, ctime=time): iis.append(i) gprint("-- save %d --" % len(iis)) self.DB_Handle.save_all(*iis) return infos
def list(cls): c = Cache(conf['session-db']) return list(c.query(Book))
def use_mem_db(): try: ca = Cache("/tmp/memory.db") yield ca finally: del ca
def get_db(): return Cache(DB_PATH)
def get_db(): return Cache(DB_FOFA)