def entirewebs(query, start=0): sout.write("Exteracting 87.5% Entireweb ... ") p = urllib.urlencode({ 'q': query, 'of': start, 'ts': time.time(), 'md': 'web', 'gs': '6YgT4Egb8uJkH' }) s = req.curllib("http://www.entireweb.com/web/", p) if 'did not match' in s: p = urllib.urlencode({ 'q': query, 'of': start, 'ts': time.time(), 'md': 'web', 'gs': '6YgT4Egb8uJkH' }) s = req.curllib("http://www.entireweb.com/web/", p) if s != "Error": p = re.compile('container"><a href="(.*?)">', re.UNICODE | re.IGNORECASE) s = re.findall(p, s) return s else: sout.write("\033[1;31mError connecting to Entireweb! \033[0m") return []
def entirewebs(query, start=0): sout.write("Exteracting 87.5% Entireweb ... ") p = urllib.urlencode({'q':query, 'of':start, 'ts':time.time(),'md':'web', 'gs':'6YgT4Egb8uJkH'}) s = req.curllib("http://www.entireweb.com/web/", p) if 'did not match' in s: p = urllib.urlencode({'q':query, 'of':start, 'ts':time.time(),'md':'web', 'gs':'6YgT4Egb8uJkH'}) s = req.curllib("http://www.entireweb.com/web/", p) if s != "Error": p = re.compile('container"><a href="(.*?)">', re.UNICODE | re.IGNORECASE) s = re.findall(p, s) return s else: sout.write("\033[1;31mError connecting to Entireweb! \033[0m") return []
def ixquick(query, start=0): sout.write("Exteracting 62.5% Ixquick ... ") p = urllib.urlencode({'cmd':'process_search', 'language':'english', 'query':query, 'cat':'web', 'page_num':start}) s = req.curllib("https://s3-us2.ixquick.com/do/search", p) if s != "Error" : s = re.findall("<h3 class='clk'><a href='(.*?)' id='title",s) return s else: sout.write("\033[31mError connecting to Ixquick! \033[0m") return []
def googsearch(query,start=0,rnum=8): sout.write("Exteracting 12.5% Google ... ") result = [] p = urllib.urlencode({'callback':'google.search.WebSearch.RawCompletion', 'rsz' : rnum,'hl':'en','source':'gsc','gss':'.com','sig':'432dd570d1a386253361f581254f9ca1', 'q':query,'start':start,'gl':'www.google.com','oq':query, 'gs_l':'partner.12...0.0.1.7160.0.0.0.0.0.0.0.0..0.0.gsnos%2Cn%3D13...0.0..1ac..25.partner..0.0.0.', 'qid':'1506e6c76391e584b', 'context':1,'key':'notsupplied', 'v':'1.0', 'nocache':'1444961600505'}) s = req.curllib("http://www.google.com/uds/GwebSearch?", p) if s != "Error": if "Suspected Terms" in s: sout.stopwatch("\033[31m"+"[+]:"+"\033[0m"+" Banded!, Bypassing... ",30) s = req.curllib("http://www.google.com/uds/GwebSearch?", p) if "Suspected Terms" in s: return [] s = re.findall('","url":"(.*?)"', s) return result else: sout.write("\033[31mError connecting to Google! \033[0m") return []
def bingsearch(query, start=0): sout.write("Exteracting 25% Bing ... ") p = urllib.urlencode({"q": query, "first": start}) s = req.curllib("http://www.bing.com/search", p) if s != "Error": s = re.findall('<li class="b_algo"><h2><a href="(.*?)" ', s) return s else: sout.write("\033[1;31mError connecting to Bing! \033[0m") return []
def bingsearch(query, start=0): sout.write("Exteracting 25% Bing ... ") p = urllib.urlencode({'q' : query, 'first' : start }) s = req.curllib("http://www.bing.com/search", p) if s != "Error": s = re.findall('<li class="b_algo"><h2><a href="(.*?)" ',s) return s else: sout.write("\033[1;31mError connecting to Bing! \033[0m") return []
def googsearch(query, start=0, rnum=8): sout.write("Exteracting 12.5% Google ... ") result = [] p = urllib.urlencode({ 'callback': 'google.search.WebSearch.RawCompletion', 'rsz': rnum, 'hl': 'en', 'source': 'gsc', 'gss': '.com', 'sig': '432dd570d1a386253361f581254f9ca1', 'q': query, 'start': start, 'gl': 'www.google.com', 'oq': query, 'gs_l': 'partner.12...0.0.1.7160.0.0.0.0.0.0.0.0..0.0.gsnos%2Cn%3D13...0.0..1ac..25.partner..0.0.0.', 'qid': '1506e6c76391e584b', 'context': 1, 'key': 'notsupplied', 'v': '1.0', 'nocache': '1444961600505' }) s = req.curllib("http://www.google.com/uds/GwebSearch?", p) if s != "Error": if "Suspected Terms" in s: sout.stopwatch( "\033[1;31m" + "[+]:" + "\033[0m" + " Banded!, Bypassing... ", 30) s = req.curllib("http://www.google.com/uds/GwebSearch?", p) if "Suspected Terms" in s: return [] s = re.findall('","url":"(.*?)"', s) return result else: sout.write("\033[1;31mError connecting to Google! \033[0m") return []
def sogousearch(query, start=1): sout.write("Exteracting 32% Bing ... ") p = urllib.urlencode({ 'keyword':query, 'p':start, 'pg':'webSearchList', 'type':'1' }) s = req.curllib("http://wap.sogou.com/web/searchList.jsp", p) if s != "Error": s = re.findall('&url=(.*?)&',s) for x in range(0, len( list( s ) ) ): s[x] = urllib.unquote(s[x]).decode('utf8') return list( s ) else: sout.write("\033[1;31mError connecting to Bing! \033[0m") return []
def asksearche(query,start=0): sout.write("Exteracting 50% Ask ... ") p = urllib.urlencode({'q':query, 'qsrc':0, 'o':0, 'l':'dir', 'page':start}) s = req.curllib("http://www.ask.com/web?", p) if s != "Error" : asks = re.findall('<a class="web-result-title-link" href="(.*?)"( target="_blank"| onmousedown=")',s) s = [] for i in asks: s.append(i[0]) return s else: sout.write("\033[31mError connecting to Ask! \033[0m") return []
def asksearche(query,start=0): sout.write("Exteracting 50% Ask ... ") p = urllib.urlencode({'q':query, 'qsrc':0, 'o':0, 'l':'dir', 'page':start}) s = req.curllib("http://www.ask.com/web?", p) if s != "Error" : asks = re.findall('<a class="web-result-title-link" href="(.*?)"( target="_blank"| onmousedown=")',s) s = [] for i in asks: s.append(i[0]) return s else: sout.write("\033[1;31mError connecting to Ask! \033[0m") return []
def yahoosearc(query,start=0): sout.write("Exteracting 37.5% Yahoo ... ") p = urllib.urlencode({'p':query,'fr':'sfp', 'fr2':'sb-top-search', 'iscqry':'','b':3}) s = req.curllib("https://search.yahoo.com/search?", p) if s != "Error" and ("We did not find results" in s) == False: s = re.findall('" href="(https?:\/\/(?:www\.|(?!.*?\.yahoo\.com))[^\s\.]+\.[^\s]{2,}|www\.[^\s]+\.[^\s]{2,})" referrerpolicy=',s) return s else: if "We did not find results" in s: sout.write("\033[31mNo result from Yahoo! \033[0m") else: sout.write("\033[31mError connecting to Yahoo! \033[0m") return []
def dogpsearch(query, start=0): sout.write("Exteracting 75% Dogpile ... ") p = urllib.urlencode({'qsi':start, 'q':query}) s = req.curllib('http://www.dogpile.com/info.dogpl/search/web', p); if s != "Error" : s = re.findall(';ru=(.*?)&ap=',s) for x in range(0,len(s)): s[x] = urllib.unquote(s[x]) return s else: sout.write("\033[1;31mError connecting to Dogpile! \033[0m") return []
def bingsearch(query, start=0): sout.write("Exteracting 25% Bing ... ") p = urllib.urlencode({'q' : query, 'first' : start , 'count': '50'}) s = req.curllib("http://www.bing.com/search", p) if s != "Error": if "<li class=\"b_algo\"><h2>" in req : s = re.findall('<li class="b_algo"><h2><a href="(.*?)" ',s) return s elif "</span><h2><a href=" in req : s = re.findall('<li class="b_algo"><h2><a href="(.*?)" ',s) return s else: sout.write("\033[1;31mError connecting to Bing! \033[0m") return []
def sogousearch(query, start=1): sout.write("Exteracting 32% Bing ... ") p = urllib.urlencode({ 'keyword': query, 'p': start, 'pg': 'webSearchList', 'type': '1' }) s = req.curllib("http://wap.sogou.com/web/searchList.jsp", p) if s != "Error": s = re.findall('&url=(.*?)&', s) for x in range(0, len(list(s))): s[x] = urllib.unquote(s[x]).decode('utf8') return list(s) else: sout.write("\033[1;31mError connecting to Bing! \033[0m") return []
def yahoosearc(query, start=0): sout.write("Exteracting 37.5% Yahoo ... ") p = urllib.urlencode({ 'p': query, 'fr': 'sfp', 'fr2': 'sb-top-search', 'iscqry': '', 'b': 3 }) s = req.curllib("https://search.yahoo.com/search?", p) if s != "Error" and ("We did not find results" in s) == False: s = re.findall( '" href="(https?:\/\/(?:www\.|(?!.*?\.yahoo\.com))[^\s\.]+\.[^\s]{2,}|www\.[^\s]+\.[^\s]{2,})" referrerpolicy=', s) return s else: if "We did not find results" in s: sout.write("\033[31mNo result from Yahoo! \033[0m") else: sout.write("\033[31mError connecting to Yahoo! \033[0m") return []