def search(self, *key): gprint(key[-1]) if not self.cookies: self.load_session() res = requests.get("https://github.com/{}/product".format(self.user)) self.cookies = res.cookies.get_dict() gprint(str(self.cookies)) url = "https://github.com/search?q={}&type=code".format("+".join(key)) self.sess.driver.get(url) res = self.sess.driver.page_source b = BeautifulSoup(res, 'lxml') codes = b.select(".code-list-item") if len(codes) > 0: gprint("Found : %d" % len(codes)) else: gprint("Not found:") rprint(b.text.replace("\n", "")) # for i in b.select("a"): # gprint(str(i)) ss = {} for code in codes: k = code.select(".text-bold")[0].text v = { colored(str(n), 'green'): i.text.replace("\n", "") for n, i in enumerate(code.select("td.blob-code")) } gprint(colored(k, "blue")) Tprint(v)
def do_add(self, args): target = None passwd = 'chopper' encoding = 'UTF-8' type = None All = args.split() if len(All) < 3: rprint( "at least [target] [passwd] | like http://localhost/1.php chopper " ) return for w in All: if w.startswith("http"): target = w if target.endswith(".php"): type = 'php' elif target.endswith(".jsp"): type = 'jsp' elif target.endswith(".asp"): type = 'asp' elif w.lower() in ['utf-8', 'gbk']: encoding = w elif w in ['php', 'jsp', 'asp', 'aspx']: type = w else: passwd = w checken = Checken(target=target, passwd=passwd, encoding=encoding, type=type) with use_db() as db: checken.save(db) gprint(type, target, passwd, encoding)
def save_session(self, name, password, cookie): gprint("save cred and session") with open(GITHUB_LOGIN, "wb") as fp: u = {"user": name, "pass": password} pickle.dump(u, fp) with open(GITHUB_SESSION, 'wb') as fp: pickle.dump(cookie, fp)
def do_start_server(self, args): if os.path.exists("/tmp/bpserver.pid"): with open("/tmp/bpserver.pid") as fp: pid = int(fp.read().strip()) os.kill(pid, signal.SIGTERM) os.remove("/tmp/bpserver.pid") os.popen("x-sehen --start-server bp").read() pid = int(open("/tmp/bpserver.pid").read().strip()) Bp._SERVER_PID = pid gprint("bp server running")
def load_session(self): gprint(" load cookies from") cookies = pickle.load(open(self.LOGIN_INFO_SESSION, "rb")) self.flow.go(self.main_url) self.flow.screenshot("beforelogin") for cookie in cookies: gprint(str(cookie)) try: self.flow.phantom.add_cookie(cookie) except Exception as e: rprint("load cookie failed. try --login")
def do_load(self, path): if path == 'last': fs = sorted(os.listdir(BP_SESSION_DIR)) if len(fs) > 0: path = os.path.join(BP_SESSION_DIR, fs[-1]) if os.path.exists(path): with open(path, 'rb') as fp: Bp._brute_req = fp.read() gprint("Load ok") else: rprint("Not found : %s" % path)
def do_brute(self, args): parse = ParseRequest(Bp._brute_req) self._exe = ThreadPoolExecutor(max_workers=self.thread) for data, args in parse.eval_and_replace(): # gprint('use data: %s' % data) f = partial(resender, parse, data, args, proxy=self.proxy) futu = self._exe.submit(f) futu.add_done_callback(self.summary) self.futures.append(futu) gprint("run all")
def login(self, name=None, password=None): self.sess.driver.get("https://github.com/login") self.sess.driver.find_element_by_css_selector( "input[name=login]").send_keys(name) self.sess.driver.find_element_by_css_selector( "input[name=password]").send_keys(password) self.sess.driver.find_element_by_css_selector( "input[name=commit]").click() self.sess.transfer_driver_cookies_to_session() self.cookies = self.sess.cookies.get_dict() gprint(str(self.cookies)) self.save_session(name, password, self.cookies)
def eval_and_replace(self): old = self.req_body.decode('utf8', 'ignore') gprint(old) options = re.findall(r'\{\{(.+?)\}\}', old) eval_res = [] for op in options: pp = self._eval_option(op) eval_res.append(pp) if eval_res: for w in self._gen_map(eval_res): body_old = old for i in w: body_old = self._replace_b(body_old, i) yield body_old, w
def search_in_db(key=None, *show_options): prepare_test_info = [] DB_Handle = Cache(DB_FOFA) if not key: for i in DB_Handle.query(Info): lprint(title=i.title, os=i.os, ip=i.ip, ports=i.ports, time=i.ctime, geo=i.geo, body=i.body) else: if not show_options: def printer(i): lprint(title=i.title, os=i.os, ip=i.ip, ports=i.ports, time=i.ctime, geo=i.geo, body=i.body) prepare_test_info.append(i) else: def printer(i): prepare_test_info.append(i) f = {} for show in show_options: if hasattr(i, show): f[show] = getattr(i, show) elif show == 'time': f[show] = getattr(i, 'c' + show) lprint(**f) for i in DB_Handle.fuzzy_search(Info, key, printer=printer): pass gprint("set target: %d" % len(prepare_test_info)) return prepare_test_info
def load_session(self): gprint("load seesion form github") if os.path.exists(GITHUB_SESSION): with open(GITHUB_SESSION, 'rb') as fp: self.cookies = pickle.load(fp) self.sess.cookies.update(self.cookies) self.sess.get("https://github.com") self.sess.transfer_session_cookies_to_driver() with open(GITHUB_LOGIN, 'rb') as fp: u = pickle.load(fp) self.user = u['user'] elif os.path.exists(GITHUB_LOGIN): with open(GITHUB_LOGIN, 'rb') as fp: u = pickle.load(fp) self.login(name=u['user'], password=u['pass']) else: name = input('Github name:') passwd = getpass.getpass("Github pass:") self.login(name, passwd)
def _eval_option(self, one): batch_words = [] if os.path.exists(one.strip()): gprint("load file form : %s" % one) with open(one.strip()) as fp: for l in fp: o = l.strip() batch_words.append(o) else: try: if '[[' in one and ']]' in one: # gprint("detect file in code") tone = one for d in self._get_c(tone): # gprint("patch %s" % d) one = self._replace_c(one, d) gprint("try parse from python code:\n %s" % colored(one, 'blue')) w = eval(one) if isinstance(w, list): batch_words = w except Exception as e: rprint(str(e)) gprint("only as words") batch_words = one.split() return batch_words
def login(self, u=None): user = None passwd = None if not u: if os.path.exists(self.LOGIN_INFO): with open(LOGIN_INFO) as fp: u = json.load(fp) user = u['username'] passwd = u['password'] else: user = input("email>") passwd = getpass.getpass("passwd>") else: user = u['username'] passwd = u['password'] gprint("try login") #selector = lambda x: waiter.until(EC.presence_of_element_located((By.CSS_SELECTOR, x))) to_login = self.selector("a#but_zc") self.flow.screenshot('login') to_login.click() input_user = self.selector('#username') input_pass = self.selector('#password') input_user.send_keys(user) input_pass.send_keys(passwd + "\n") gprint(" --- Login ---") # with open(LOGIN_INFO_SESSION, 'w') as fp: u = {'username': user, 'password': passwd} with open(self.LOGIN_INFO, 'w') as fp: json.dump(u, fp) self.save_session() return user, passwd
def scan(): if not os.path.exists("/tmp/MasscanReports"): os.mkdir("/tmp/MasscanReports") ca = Cache(DB_FOFA) infos = [] for i in os.listdir("/tmp/MasscanReports"): if not i.endswith(".mas"):continue s = os.path.join("/tmp/MasscanReports", i) downloaded = False gprint("Found file: %s" % i) with open(s, 'rb') as fp: res = fp.read().decode('utf-8', 'ignore') if '</nmaprun>' in res: info = Masscan.reportload(res) for info_i in info: infos.append(info_i) downloaded = True if downloaded: os.rename(s, os.path.join(ReportPATH, i)) ca.save_all(*infos) gprint("save : %d" % len(infos)) return infos
def search(self, key, page=0): search_input = self.selector("input#q") search_input.send_keys("{}\n".format(key)) self.flow.screenshot("mod") check_if_found = self.selector(".list_jg") check_if_found = check_if_found.get_attribute('outerHTML') num = 0 try: num = re.findall(r'Total\ results:\s(\d+)', check_if_found)[0] gprint("found result in :%s = %s" % (key, num)) num = int(num) if num == 0: return except IndexError: self.flow.screenshot("error.png") rprint("page load error !! see /tmp/error.png ") return self.selector(".list_mod") res = self.flow.html() iis = [] iis += self.parse(res) gprint("Done %d" % len(iis)) if len(iis) >= num: return iis gprint(" page 1") if page and isinstance(page, int): for i in range(page): next_page = self.selector("a.next_page") next_page.click() self.selector(".list_mod") res = self.flow.html() gprint(" page " + str(i + 2)) iis += self.parse(res) return iis
def do_summary(self, args): if not args: t = [[i] + n for i, n in enumerate(self._res)] t.insert(0, ['id'] + ['args' for i in range(len(t[0]) - 2)] + ['lens']) print(tabulate(t, headers='firstrow')) urgly = t[0] tc = Counter([i[-1] for i in t]) v = min(tc, key=lambda x: tc[x]) for i in t: if i[-1] == v: if i != urgly: gprint("---- focus on ---- ") print(tabulate([urgly])) break else: try: w = self._res_detail[int(args)] gprint(BS(w, 'lxml').text) except Exception as e: rprint(str(e)) gprint("Must int ")
def run(self): gprint("Run Masscan: "+ self.cmd) os.popen("masscan " + self.cmd).read() return MasscanDaemon.scan()
def run(cls, Obj): TestBase.process_now = 0 def try_run(*args, **kargs): try: return Obj.test(*args, **kargs) except Exception as e: rprint(e) print_exception(e) test_if_same = set() result_zusammen = dict() hs = [] for i in cls.ins: if (i.target.ip + i.target.ports) in test_if_same: continue if '/' in i.target.ports: i.target.port = i.target.ports.split("/")[0].strip() else: i.target.port = i.target.ports.strip() test_if_same.add(i.target.ip + i.target.ports) hs.append(i.target) #hs = [i.target for i in cls.ins] process_len = len(hs) if hasattr(Obj, '__name__'): cls.log("use :", Obj.__name__) if hasattr(Obj, "mode"): if Obj.mode == "thread": thread = 7 if hasattr(Obj, 'thread'): thread = int(Obj.thread) if hasattr(Obj, 'timeout'): timeout = Obj.timeout else: timeout = 12 gprint("set mode : %s" % Obj.mode) gprint("set thread : %d" % thread) gprint("set timeout : %d" % timeout) with ThreadPoolExecutor(max_workers=thread) as exe: if not hasattr(Obj, 'callback'): if hasattr(Obj, 'log') and Obj.log == 'simple': callback = lambda x: gprint( x, "\nfinish done | %s" % colored( "-" * 5 + '\n', 'blue')) else: callback = lambda x: TestBase.process_add( process_len) else: callback = Obj.callback def callback_out(future, url=''): try: r = future.result(timeout=timeout) result_zusammen[url] = r callback(r) except futures.TimeoutError: rprint('timeout:', url) for h in hs: future = exe.submit(try_run, h) future.add_done_callback( partial(callback_out, url=h.ip)) if 'has' in Obj.__name__ or 'if' in Obj.__name__: Tprint(result_zusammen, color='green', attrs=['bold']) else: res = try_run(hs) if res: cls.log(res)
def do_show_options(self, args): optins = re.findall(r'\{\{(.*)\}\}', self._brute_req.decode()) for nu, op in enumerate(optins): gprint(nu, op.strip())
def do_preview(self, args): parse = ParseRequest(Bp._brute_req) for data, args in parse.eval_and_replace(): gprint(data)
def parse(self, res): mods = BS(res, 'html.parser').select('.list_mod') infos = [] c = 0 for m in mods: c += 1 ports = '/'.join([ i.text for i in m.select('.list_mod_t > .span > span') ]).replace("\n", "").replace(" ", "") pa = m.select(".list_sx1 > li") rip = m.select("li > i.fa-map-marker") rtime = m.select("li > i.fa-clock-o") rplane = m.select("li > i.fa-plane") rhost = m.select(".list_mod_t > a") ros = m.select("li > span.list_xs2") ip = "" ti = "" time = "" geo = "" os = "" host = "" if rip: ip = rip[0].parent.text.replace("\n", "").replace(" ", "") if rtime[0].parent.text != pa[0].text: ti = pa[0].text.replace('\n', '').replace(' ', '') if rtime: time = rtime[0].parent.text.replace('\n', '').replace(' ', '') if rplane: geo = rplane[0].parent.text.replace('\n', '').replace(' ', '') if rhost: host = rhost[0].attrs['href'] if ros: os = ros[0].parent.text.replace("\n", "").replace(" ", "") body = m.select('.auto-wrap')[0] if len(list(body)) > 1: body = ''.join([i.__str__() for i in list(body)]) else: body = body.text lprint(title=ti, host=host, ip=ip, ports=ports, os=os, time=time, geo=geo, body=body) info = Info(title=ti, host=host, ip=ip, ports=ports, os=os, ctime=time, geo=geo, body=body) infos.append(info) iis = [] for i in infos: # gprint(i.ip) if not self.DB_Handle.query_one( Info, m='and', ip=ip, ports=ports, ctime=time): iis.append(i) gprint("-- save %d --" % len(iis)) self.DB_Handle.save_all(*iis) return infos
def do_list(self, args): with use_db() as db: for che in db.query(Checken): gprint(che.type, che.id, che.target, che.passwd, che.encoding)
def save_session(self): gprint(" save session cookies") self.flow.go(self.main_url) with open(self.LOGIN_INFO_SESSION, 'wb') as fp: pickle.dump(self.flow.phantom.get_cookies(), fp)