def out(web, totlist):
    web = web.replace('http://','')
    web = web.replace('https://','')
    print(GR+'\n [*] Writing found URLs to DB...')
    for lists in totlist:
        save_data(database, module, lvl1, lvl2, lvl3, name, str(lists))
    print()
Exemple #2
0
def getos0x00(web):
    name = targetname(web)
    lvl2 = "getcensys"
    module = "ScanANDEnum"
    lvl1 = "Scanning & Enumeration"
    lvl3 = ""
    global flag
    flag = 0x00
    ip_addr = socket.gethostbyname(web)
    print(C+' [*] Querying Reverse DNS...')
    time.sleep(0.7)
    print(O+' [+] Website IP :' +C+color.TR3+C+G+ str(ip_addr)+C+color.TR2+C)
    time.sleep(0.5)
    print(GR+' [*] Trying to identify operating system...')
    time.sleep(0.5)
    print(C+' [!] Configuring requests...')
    result = requests.get('https://www.censys.io/ipv4/%s/raw' % ip_addr).text
    print(GR+' [*] Getting raw data...')
    time.sleep(0.8)
    print(R+' [*] Analysing responses...')
    try:
        match = re.search(r'&#34;os_description&#34;: &#34;[^<]*&#34;', result)
        if match:
            flag = 0x01
            os = match.group().split('n&#34;: &#34;')[1][:-5]
            print(B+' [+] Operating System Identified : ' + C+ os)
            save_data(database, module, lvl1, lvl2, lvl3, name, os)
        else:
            print(R+' [-] No exact Operating System matches for '+O+web+C+'...')
            save_data(database, module, lvl1, lvl2, lvl3, name, "No exact Operating System matches for "+web)
            os = ""
            flag = 0x00
        return (flag, os)
    except Exception as e:
        print(R+' [-] Unhandled Exception : '+str(e))
Exemple #3
0
def apircv(web):
    requests = session()
    try:
        domain = web.replace('http://', '')
        domain = web.replace('https://', '')
        html = requests.get('http://w3techs.com/siteinfo.html?fx=y&url=' +
                            domain).text
        soup = BeautifulSoup(html, 'lxml')
        table = soup.findAll('table', attrs={'class': 'w3t_t'})[0]
        trs = table.findAll('tr')

        for tr in trs:
            th = tr.find('th')
            td = tr.find('td').text

            if td[-7:] == 'more...':
                td = td[:-9]

            print(G + ' [+] ' + th.text + ':' + C + color.TR2 + C)
            print(C + '      ' + td + '\n')
            time.sleep(0.7)
            data = th.text + ":\n" + td
            save_data(database, module, lvl1, lvl2, lvl3, name, data)
    except Exception:
        print(R + ' [-] Outbound Query Exception!')
Exemple #4
0
def getcmslook(web, name):
    requests = session()
    global found
    global dtect
    web = web.split('//')[1]
    print(GR + ' [*] Passive Fingerprinting CMS...')
    time.sleep(1)
    print(C + ' [!] Setting priority to False...')
    dtect = False
    print(GR + ' [*] Importing token...')
    try:
        from files.API_KEYS import WHATCMS_ACCESS_TOKEN
        print(O + ' [+] Token detected :' + C + color.TR3 + C + G +
              WHATCMS_ACCESS_TOKEN + C + color.TR2 + C)
        request = requests.get('https://whatcms.org/APIEndpoint/Detect?url=' +
                               web + '&key=' + WHATCMS_ACCESS_TOKEN,
                               verify=False)
        response = json.loads(request.text)
        status = response['result']['code']
        if 'retry' in response:
            print(R + ' [-] Outbound Query Exception!')
        else:
            if status == 200:
                dtect = True
                print(O + ' [+] CMS Detected:' + C + color.TR3 + C + G +
                      response['result']['name'] + C + color.TR2 + C + '\n')
                save_data(database, module, lvl1, lvl2, lvl3, name,
                          response['result']['name'])
            else:
                dtect = False
    except ImportError:
        print(R + ' [-] No API Token detected. Skipping first module...')
        time.sleep(0.4)
Exemple #5
0
def grabhead(web):
    name = targetname(web)
    lvl2 = "grabhead"
    module = "ReconANDOSINT"
    lvl1 = "Active Reconnaissance"
    lvl3 = ""
    time.sleep(0.4)
    #print(R+'\n      ==================================')
    #print(R+'      G R A B   H T T P   H E A D E R S')
    #print(R+'     ===================================\n')
    from core.methods.print import posintact
    posintact("grab http headers")
    print(GR + color.BOLD + ' [*] Grabbing HTTP Headers...')
    time.sleep(0.4)
    web = web.rstrip()
    try:
        headerwhole = str(urllib.request.urlopen(web).info())
        header = headerwhole.splitlines()
        print('')
        for m in header:
            n = m.split(':')
            print('  ' + C + n[0] + ': ' + C + n[1])
        print('')
        save_data(database, module, lvl1, lvl2, lvl3, name, headerwhole)
    except urllib.error.HTTPError as e:
        print(R + ' [-] ' + e.__str__())
def grab(web):
    lvl2 = "bannergrab"
    module = "ScanANDEnum"
    lvl1 = "Scanning & Enumeration"
    lvl3 = ""
    api = shodan.Shodan(SHODAN_API_KEY)
    print(GR + ' [*] Resolving hostnames...')
    time.sleep(0.7)
    try:
        print(C + ' [!] Parsing information...')
        hostIP = socket.gethostbyname(web)

        print(C + ' [!] Setting query parameters...')
        host = api.host(hostIP)

        for item in host['data']:
            print(GR + '\n [+] Port : ' + C + str(item['port']))
            print(G + ' [+] Banner :' + C + color.TR2 + C + ' \n')
            for q in str(item['data']).splitlines():
                if ':' in q:
                    print(O + '    ' + q.split(':')[0] + ' :' + C + color.TR3 +
                          C + G + q.split(':')[1].strip() + C + color.TR2 + C)
                else:
                    print(C + '    ' + q)
                    time.sleep(0.02)
                data = q + " @ port " + str(item['port'])
                save_data(database, module, lvl1, lvl2, lvl3, name, data)

    except KeyboardInterrupt:
        print(R + ' [-] An error occured...\n')
def googlegroups(web):
    name = targetname(web)
    module = "ReconANDOSINT"
    lvl1 = "Passive Reconnaissance & OSINT"
    lvl3 = ''
    lvl2 = inspect.stack()[0][3]
    time.sleep(0.7)
    #print(R+'\n    ===========================')
    #print(R+'     G O O G L E   G R O U P S')
    #print(R+'    ===========================\n')
    from core.methods.print import posintpas
    posintpas("google groups")

    print(C + ' [!] Initiating enumeration via Google Web...')
    time.sleep(0.7)
    print(C + ' [!] Parsing url...')
    web = web.replace('https://', '')
    web = web.replace('http://', '')
    if "@" in web:
        web = web.split("@")[1]
    data = getemails0x00(web)
    if flag == False:
        print(R + ' [-] No results found via enumeration on Google Groups...')
        save_data(database, module, lvl1, lvl2, lvl3, name,
                  "No results found via enumeration on Google Groups.")
    else:
        save_data(database, module, lvl1, lvl2, lvl3, name, str(data))
    print(C + ' [+] Done!')
def piwebenum(web):
    name = targetname(web)
    lvl2 = "piwebenum"
    module = "ReconANDOSINT"
    lvl1 = "Active Reconnaissance"
    lvl3 = ""
    requests = session()
    time.sleep(0.4)
    web = web.split('//')[1]
    #print(R+'\n   =============================================')
    #print(R+'    P I N G / N P I N G   E N U M E R A T I O N')
    #print(R+'   =============================================\n')
    from core.methods.print import posintact
    posintact("(n)ping enumeration") 
    print(GR + ' [!] Pinging website...')
    time.sleep(0.5)
    print(C+' [*] Using adaptative ping and debug mode with count 5...')
    time.sleep(0.4)
    print(GR+' [!] Press Ctrl+C to stop\n'+color.END)
    os.system('ping -D -c 5 '+ web)
    print('')
    time.sleep(0.6)
    print(C+' [*] Trying NPing (NMap Ping)...')
    print(C+" [~] Result: \n")
    print('')
    text = requests.get('http://api.hackertarget.com/nping/?q=' + web).text
    nping = str(text)
    print(color.END+ nping +C+'\n')
    save_data(database, module, lvl1, lvl2, lvl3, name, nping)
def whoischeckup(web):
    name = targetname(web)
    module = "ReconANDOSINT"
    lvl1 = "Passive Reconnaissance & OSINT"
    lvl3 = ''
    lvl2 = inspect.stack()[0][3]
    requests = session()
    web = web.replace('http://', '')
    web = web.replace('https://', '')
    if "@" in web:
        web = web.split("@")[1]
    #print(R+'\n   =========================')
    #print(R+'    W H O I S   L O O K U P')
    #print(R+'   =========================\n')
    from core.methods.print import posintpas
    posintpas("whois lookup")
    time.sleep(0.4)
    print('' + GR + color.BOLD + ' [!] Looking Up for WhoIS Information...')
    time.sleep(0.4)
    print("" + GR + color.BOLD + " [~] Result: \n" + color.END)
    domains = [web]
    for dom in domains:
        text = requests.get('http://api.hackertarget.com/whois/?q=' + dom).text
        res = str(text)
        if 'error' not in res:
            print(color.END + res + C)
            save_data(database, module, lvl1, lvl2, lvl3, name, res)
        else:
            print(R + ' [-] Outbound Query Exception!')
            time.sleep(0.8)
Exemple #10
0
    def xssref0x00(web, parallel):

        #print(R+'\n    ===========================')
        print(R + '\n     X S S  (Referrer Based)')
        print(R + '    ---<>----<>----<>----<>----\n')

        success = []
        if not parallel:
            success += refatck(pay, web)
        else:
            paylists = listsplit(pay, round(len(pay) / processes))
            with Pool(processes=processes) as pool:
                res = [
                    pool.apply_async(refatck, args=(
                        l,
                        web,
                    )) for l in paylists
                ]
                for y in res:
                    i = y.get()
                    success += i
        if success:
            data = "XSS Vulnerability (Referrer) found! Payloads :> " + str(
                success)
            save_data(database, module, lvl1, lvl2, lvl3, name, data)
            print(
                " [+] XSS Vulnerability (Referrer) found! Successful payloads:"
            )
            for i in success:
                print(i)
        else:
            data = "(referrer) no payload succeeded."
            save_data(database, module, lvl1, lvl2, lvl3, name, data)
            print(R + "\n [-] No payload succeeded." + C)
Exemple #11
0
def piweb(web):
    requests = session()
    name = targetname(web)
    module = "ReconANDOSINT"
    lvl1 = "Passive Reconnaissance & OSINT"
    lvl3=''
    lvl2=inspect.stack()[0][3]
    dom = web.split('//')[1]
    if "@" in dom:
        dom = dom.split("@")[1]
    #print(R+'\n   =====================')
    #print(R+'    P I N G   C H E C K ')
    #print(R+'   =====================\n')
    from core.methods.print import posintpas
    posintpas("ping check")
    time.sleep(0.4)
    print(GR + color.BOLD + ' [!] Pinging website using external APi...')
    time.sleep(0.4)
    print(GR + color.BOLD + " [~] Result: "+ color.END)
    text = requests.get('http://api.hackertarget.com/nping/?q=' + dom).text
    nping = str(text)
    if 'null' not in nping:
        save_data(database, module, lvl1, lvl2, lvl3, name, nping)
        print(color.END+ nping+C)
    else:
        print(R+' [-] Outbound Query Exception!')
        time.sleep(0.8)
Exemple #12
0
def check0x00(req, name):
    comments = re.findall(signature, req)
    print(GR + " [*] Searching for Internal IPs...")
    for comment in comments:
        print(C + '   ' + comment)
        time.sleep(0.03)
        found = 0x01
        save_data(database, module, lvl1, lvl2, lvl3, name, comment)
    def sqliuser0x00(web, parallel):

        #print(R+'\n    =============================')
        print(R + '\n     S Q L i  (User-Agent Based)')
        print(R + '    ---<>----<>----<>----<>----<>\n')
        success = []
        requests = session()
        if not parallel:
            for i in pay:
                print(B + ' [*] Using payload : ' + C + i)
                time.sleep(0.7)
                user_agent = {
                    'User-agent':
                    'Mozilla/5.0 (X11; Ubuntu; Linux' +
                    'x86_64; rv:39.0) Gecko/20100101 Firefox/39.0'
                }
                user_agent['User-agent'] += i
                req = requests.get(web, headers=user_agent)
                print(O + ' [*] Using ' + R + '!nfected' + O + ' UA : ' + GR +
                      user_agent['User-agent'])
                #flag = u' '.join(req.text).encode('utf-8').strip()
                flag = " ".join(req.text).strip()
                if 'error' in flag or 'syntax' in flag or 'MySQL'.lower(
                ) in flag.lower():
                    print(
                        G +
                        '\n [!] Error based SQLi (User-Agent Based) Detected!')
                    print(R + ' [!] User-Agent : ' + O +
                          user_agent['User-agent'])
                    success.append(i)
        else:
            paylists = listsplit(pay, round(len(pay) / processes))
            with Pool(processes=processes) as pool:
                res = [
                    pool.apply_async(userpre, args=(
                        l,
                        web,
                    )) for l in paylists
                ]
                #res1 = pool.apply_async(portloop, )
                for i in res:
                    j = i.get()
                    success += j
        if success:
            data = "SQLi Vulnerability (useragent) found!\nSuccessful payloads: " + str(
                success)
            save_data(database, module, lvl1, lvl2, lvl3, name, data)
            print(
                " [+] SQLi Vulnerability (useragent) found! Successful payloads:"
            )
            for i in success:
                print(i)
        else:
            print(R + "\n [-] No payload succeeded." + C)
            save_data(database, module, lvl1, lvl2, lvl3, name,
                      "(useragent) no payload succeeded.")
Exemple #14
0
def check0x00(req, lvl2, name):
    comments = re.findall(signature, req)
    print(GR + " [*] Searching for Emails...")
    if comments:
        print('\n' + G + ' [+] Found Email(s):' + C + color.TR2 + C)
        for comment in comments:
            print(C + '   - ' + comment)
            time.sleep(0.03)
            found = 0x01
            save_data(database, module, lvl1, lvl2, lvl3, name, comment)
def check0x00(content,url, lvl2, name):
    for pattern in patterns:
        print(C+' [!] Finding '+B+pattern+C+' ...')
        time.sleep(0.005)
        if search(pattern, content):
            print(O+' [!] Possible error at '+C+color.TR3+C+G+url+C+color.TR2+C)
            print(G+" [+] Found : \"%s\" at %s" % (pattern,url)+C+color.TR2+C)
            data = str(pattern) + " @ " + str(url)
            save_data(database, module, lvl1, lvl2, lvl3, name, data)
            found = 0x01
def out(web, list0):

    web = web.replace('http://','')
    web = web.replace('https://','')
    print(GR+' [*] Writing found URLs to DB...')
    print(C+' [!] Sorting only scope urls...')
    time.sleep(1)
    for lists in list0:
        if str(web) in lists:
            save_data(database, module, lvl1, lvl2, lvl3, name, str(lists))
Exemple #17
0
def brute0x00(web, parallel):
    try:
        if properties["DICT"][1] == " ":
            print(O + ' [!] Enter path to payload file ' + R +
                  '(Default: files/fuzz-db/rfi_paths.lst)')
            fi = input(O + ' [§] Your input (Press Enter if default) :> ')
        elif properties["DICT"][1].lower() == "none":
            fi = ""
        else:
            fi = properties["DICT"][1]

        if fi == '':
            fi = 'files/fuzz-db/rfi_paths.lst'

        print(GR + ' [*] Importing wordlist...')
        if os.path.exists(fi) == True:
            print(G + ' [+] File path found!')
            time.sleep(0.6)
            print(O + ' [*] Importing wordlist...')
            with open(fi, 'r') as wew:
                for w in wew:
                    w = w.strip('\n')
                    payloads.append(w)
            print(GR + ' [*] Starting bruteforce...')
            time.sleep(0.7)
            success = []
            if not parallel:
                success += checkbrute(payloads, web)
            else:
                paylists = listsplit(payloads,
                                     round(len(payloads) / processes))
                with Pool(processes=processes) as pool:
                    res = [
                        pool.apply_async(checkbrute, args=(
                            l,
                            web,
                        )) for l in paylists
                    ]
                    for y in res:
                        i = y.get()
                        success += i
            if success:
                data = "Possible RFI at: " + str(success)
                save_data(database, module, lvl1, lvl2, lvl3, name, data)
                print(" [+] Remote File Inclusion found! Successful payloads:")
                for i in success:
                    print(i)
            else:
                print(R + "\n [-] No payload succeeded." + C)
                save_data(database, module, lvl1, lvl2, lvl3, name,
                          "(brute) no payload succeeded.")

    except Exception as e:
        print(R + ' [-] Unexpected Exception Encountered!')
        print(R + ' [-] Exception : ' + str(e))
def getRes0x00():
    requests = session()
    email = input(C + ' [§] Enter the email :> ' + R)
    if '@' in email and '.' in email:
        pass
    else:
        email = input(C + ' [§] Enter a valid email :> ' + R)

    print(GR + ' [*] Setting headers... (behaving as a browser)...')
    time.sleep(0.7)
    headers = {
        'User-Agent':
        'Mozilla/5.0 (Windows; U; Windows NT 6.1; rv:2.2) Gecko/20110201',
        'Accept-Language': 'en-US;',
        'Accept-Encoding': 'gzip, deflate',
        'Accept': 'text/html,application/xhtml+xml,application/xml;',
        'Connection': 'close'
    }
    print(P + ' [!] Making the no-verify request...' + C)
    time.sleep(0.5)
    url = 'https://hacked-emails.com/api?q=' + str(email)

    try:
        req = requests.get(url, headers=headers, timeout=10, verify=False)
        content = req.text
        if content != "":
            content = json.loads(content)
            if content['status'] == "found":
                print("Result found (" + G + str(content['results']) +
                      " results" + C + Style.RESET_ALL + ")")
                for line in content['data']:
                    try:
                        print(O + " [+] " + email + " found in :" + C +
                              color.TR3 + C + G + str(line['title']) + " (" +
                              str(line['date_leaked']) + ')' + C + color.TR2 +
                              C)
                        data = email + " found in :" + str(
                            line['title']) + " (" + str(
                                line['date_leaked']) + ')'
                        save_data(database, module, lvl1, lvl2, lvl3, "", data)
                    except Exception:
                        print(R +
                              " [-] Can't parse the leak titles via APi...")
            else:
                print(R + ' [-] Email ' + O + email + R +
                      ' not found in any breaches!')
                data = 'Email ' + email + ' not found in any breaches!'
                save_data(database, module, lvl1, lvl2, lvl3, "", data)
        else:
            print(R + ' [-] Error found in Json Request...')

    except Exception:
        print(R + " [-] Can't reach url...")
        print(R + ' [-] Request timed out!')
    def sqliuser0x00(web, parallel):

        #print(R+'\n    =============================')
        print(R + '\n     S Q L i  (User-Agent Based)')
        print(R + '    ---<>----<>----<>----<>----<>\n')

        requests = session()
        getrq = requests.get(web, verify=False)
        success = []
        if not parallel:
            for i in pay:
                print(B + '\n [*] Using payload : ' + C + i)
                time.sleep(0.7)
                user_agent = {
                    'User-agent':
                    'Mozilla/5.0 (X11; Ubuntu; Linux' +
                    'x86_64; rv:39.0) Gecko/20100101 Firefox/39.0'
                }
                user_agent['User-agent'] += str(i)
                req = requests.get(web, headers=user_agent, verify=False)
                print(O + ' [*] Using ' + R + '!nfected' + O + ' UA : ' + GR +
                      user_agent['User-agent'])
                if len(req.content) != len(getrq.content):
                    print(G +
                          ' [!] Blind based SQLi (User-Agent Based) Detected!')
                    print(R + ' [!] User-Agent : ' + O +
                          user_agent['User-agent'])
        else:
            paylists = listsplit(pay, round(len(pay) / processes))
            with Pool(processes=processes) as pool:
                res = [
                    pool.apply_async(userpre, args=(
                        web,
                        l,
                        getrq,
                    )) for l in paylists
                ]
                #res1 = pool.apply_async(portloop, )
                for i in res:
                    j = i.get()
                    success += j
        if success:
            data = "SQLi Vulnerability (useragent) found!\nSuccessful payloads: " + str(
                success)
            save_data(database, module, lvl1, lvl2, lvl3, name, data)
            print(
                " [+] SQLi Vulnerability (useragent) found! Successful payloads:"
            )
            for i in success:
                print(i)
        else:
            print(R + "\n [-] No payload succeeded." + C)
            save_data(database, module, lvl1, lvl2, lvl3, name,
                      "(useragent) no payload succeeded.")
Exemple #20
0
 def google_it(dork, lvl2, name):
     clear_cookie()
     data = []
     module = "ReconANDOSINT"
     lvl1 = "Passive Reconnaissance & OSINT"
     lvl3 = ''
     for title in search(dork, stop=30):
         print(B + ' [!] Profile Found :> ' + C + title)
         data.append(title)
         time.sleep(0.5)
     save_data(database, module, lvl1, lvl2, lvl3, name, str(data))
Exemple #21
0
def waf(web):
    global name
    name = targetname(web)
    global lvl2
    lvl2 = inspect.stack()[0][3]
    global module
    module = "ScanANDEnum"
    global lvl1
    lvl1 = "Scanning & Enumeration"
    global lvl3
    lvl3 = ""
    check = 0x00
    time.sleep(0.7)
    #print(R+'\n    ===============================')
    #print(R+'     W A F   E N U M E R A T I O N ')
    #print(R+'    ===============================\n')
    from core.methods.print import pscan
    pscan("waf enumeration")
    time.sleep(0.7)
    print(GR + ' [*] Testing the firewall/loadbalancer...')
    time.sleep(1)
    head, con = getReq0x00(web)
    waftypes = detectWaf0x00(head, con)
    for i in range(0, len(waftypes)):
        try:
            if waftypes[i] != None and waftypes[i] != '':
                print(
                    GR +
                    '\n [*] Response seems to be matching a WAF signature...')
                time.sleep(0.6)
                print(C + ' [+] The website seems to be behind a WAF...')
                time.sleep(0.6)
                print(B + ' [+] Firewall Detected : ' + C + waftypes[i])
                check = 0x01
                save_data(database, module, lvl1, lvl2, lvl3, name,
                          waftypes[i])
                break

            else:
                print(B + ' [+] Matching signatures for : ' + C + wafs[i],
                      end='',
                      flush=True)
                time.sleep(0.1)

        except Exception as e:
            pass

    if check == 0x00:
        save_data(database, module, lvl1, lvl2, lvl3, name,
                  "Generic detection failed to fingerprint WAF.")
        print(R + ' [-] Generic detection failed to fingerprint WAF...')

    print('\n' + G + ' [+] WAF Fingerprinting module completed!' + C +
          color.TR2 + C + '\n')
def out(web, list0):

    web = web.split('//')[1]
    print(GR+' [*] Writing found URLs to DB...')
    #if os.path.exists('tmp/logs/'+web+'-logs/'+web+'-links.lst'):
    #    fil = open('tmp/logs/'+web+'-logs/'+web+'-links.lst','w+')
    print(P+' [!] Sorting only scope urls...'+C)
    time.sleep(1)
    for lists in list0:
        if str(web) in lists:
            save_data(database, module, lvl1, lvl2, lvl3, name, str(lists))
Exemple #23
0
def dig(target):
    requests = session()
    name = target.name
    module = "ReconANDOSINT"
    lvl1 = "Passive Reconnaissance & OSINT"
    lvl2=inspect.stack()[0][3]
    lvl3=''
    DIGSCAN = "dig "+name
    results_dig = subprocess.check_output(DIGSCAN, shell=True)
    data=results_dig.decode().replace("<<","").replace(">>","")
    save_data(database, module, lvl1, lvl2, lvl3, name, data)
    return
Exemple #24
0
def threatintel(web):
    name = targetname(web)
    module = "ReconANDOSINT"
    lvl1 = "Passive Reconnaissance & OSINT"
    lvl3 = ''
    lvl2 = inspect.stack()[0][3]
    time.sleep(0.7)
    #print(R+'\n    =======================================')
    #print(R+'     T H R E A T   I N T E L L I G E N C E')
    #print(R+'    =======================================\n')
    from core.methods.print import posintpas
    posintpas("threat intelligence")
    print(O + ' [Data in these threatlists is the latest data')
    print(O + '            not older than a week!]\n')
    print(C + ' [!] Parsing Url..')
    time.sleep(0.7)
    web = web.replace('https://', '')
    web = web.replace('http://', '')
    if "@" in web:
        web = web.split("@")[1]
    print(O + ' [!] Getting host information...')
    time.sleep(0.8)
    ip = socket.gethostbyname(web)
    print(G + ' [+] DNS : ' + O + str(ip))
    print(C + ' [!] Loading up modules...')
    time.sleep(0.7)
    print(GR + ' [*] Starting gathering...')
    usom(web, ip)
    badip(web, ip)
    blocklistssh(web, ip)
    blocklistmail(web, ip)
    blocklistsip(web, ip)
    blocklistftp(web, ip)
    blocklistpop3(web, ip)
    blocklistirc(web, ip)
    blocklistimap(web, ip)
    blocklistbots(web, ip)
    blockliststrong(web, ip)
    blocklistapache(web, ip)
    blocklistbrute(web, ip)
    emergethreats(web, ip)
    emergecompro(web, ip)
    binarydefense(web, ip)
    openphish(web, ip)
    zeustracker(web, ip)
    projecthoneypot(web, ip)
    if flaglist:
        data = web + " appeared as a threat on the following lists: " + str(
            flaglist)
    else:
        data = web + " seems to be clean."
    save_data(database, module, lvl1, lvl2, lvl3, name, data)
    print(G + ' [+] Done!')
Exemple #25
0
def photon(web):
    global name
    name = targetname(web)
    global lvl2
    lvl2 = inspect.stack()[0][3]
    global module
    module = "ScanANDEnum"
    global lvl1
    lvl1 = "Crawling"
    global lvl3
    lvl3 = ""
    time.sleep(0.5)
    pscan("photon")
    if properties["ROOT"][1] == " ":
        root = input(" [§] Enter the root URL :> ")
    else:
        root = properties["ROOT"][1]

    if properties["ARGS"][1] == " ":
        try:
            try:
                help_photon = subprocess.call(["photon", "--help"])
            except:
                #in case of buggy photon pip installation
                help_photon = subprocess.call(
                    ["python3", "core/lib/Photon/photon.py", "--help"])
            arguments = input(
                " [§] Enter arguments (as you would after $photon -u ROOT on the commandline) :> "
            )
            assert "-u" not in arguments and "--url" not in arguments
        except AssertionError:
            arguments = input(
                " [-] Argument '-u' already present in command string.\n [§] Enter arguments (as you would after $photon -u ROOT on the commandline) :> "
            )
    else:
        arguments = properties["ARGS"][1]

    arglist = re.split("\s+", arguments)
    print(
        " [+] Starting Photon Scan (this will take a while, output piped into variable)"
    )
    try:
        #command = "photon -u " + root + " " + arguments
        command = ["photon", "-u", root] + arglist
        results_photon = subprocess.check_output(command)
    except:
        command = ["python3", "core/lib/Photon/photon.py", "-u", root
                   ] + arglist
        results_photon = subprocess.check_output(command)
    data = results_photon.decode().replace("<<", "").replace(">>", "")
    print(data)
    gprint("\n [+] Photon Scan finished! Saving to database...")
    save_data(database, module, lvl1, lvl2, lvl3, root, data)
Exemple #26
0
def check0x00(req, name):
    found = 0x00
    print(C + ' [!] Setting parse parameters...')
    comments = re.findall(signature, req)
    print(GR + " [+] Searching for Phone Numbers...")
    if comments:
        print(G + '\n [+] Found Phone Numbers:' + C + color.TR2 + C)
    for comment in comments:
        print(C + '   ' + comment)
        time.sleep(0.03)
        found = 0x01
        save_data(database, module, lvl1, lvl2, lvl3, name, comment)
Exemple #27
0
def links(web):
    name = targetname(web)
    module = "ReconANDOSINT"
    lvl1 = "Passive Reconnaissance & OSINT"
    lvl3 = ''
    lvl2 = inspect.stack()[0][3]
    requests = session()
    #print(R+'\n   =====================')
    #print(R+'    P A G E   L I N K S ')
    #print(R+'   =====================\n')
    from core.methods.print import posintpas
    posintpas("page links")
    time.sleep(0.4)
    print('' + GR + color.BOLD + ' [!] Fetching links to the website...')
    time.sleep(0.4)
    print(GR + " [~] Result: " + color.END)

    if "https://" in web:
        web0 = web.replace('https://', '')
    else:
        web0 = web.replace('http://', '')
    if "@" in web:
        if "https" in web:
            web = "https://" + web.split("@")[1]
        else:
            web = "http://" + web.split("@")[1]
        web0 = web0.split("@")[1]

    domains = [web]
    for dom in domains:
        text = requests.get('http://api.hackertarget.com/pagelinks/?q=' +
                            dom).text
        result = str(text)
        if 'null' not in result and 'no links found' not in result:

            woo = result.splitlines()
            for w in woo:
                if str(web0).lower() in w.lower():
                    final_links.append(w)

            print(C + '\n [!] Receiving links...')
            for p in final_links:
                print(O + ' [+] Found link :' + C + color.TR3 + C + G + p + C +
                      color.TR2 + C)
                time.sleep(0.06)

            save_data(database, module, lvl1, lvl2, lvl3, name,
                      str(final_links))
            print('')

        else:
            print(R + ' [-] Outbound Query Exception!')
            time.sleep(0.8)
 def google_it(site, dork, lvl2, name):
     data = []
     module = "ReconANDOSINT"
     lvl1 = "Passive Reconnaissance & OSINT"
     lvl3 = ''
     clear_cookie()
     for title in search(dork, stop=30):
         print(O + ' [!] Site Found :>' + C + color.TR3 + C + G + title +
               C + color.TR2 + C)
         data.append(title)
         time.sleep(0.1)
     save_data(database, module, lvl1, lvl2, lvl3, name, str(data))
Exemple #29
0
def referrerpol0x00(headx):
    for header in headx:
        if "Referrer-Policy:".lower() in header.lower():
            print("\033[1;32m [+]\033[0m Detected Referrer-Policy - '" +
                  header.rstrip() + "' \033[1;32m(OK)\033[0m")
            save_data(database, module, lvl1, lvl2, lvl3, name,
                      "Detected Referrer-Policy - " + header.rstrip())
            return
    save_data(database, module, lvl1, lvl2, lvl3, name,
              "Referrer-Policy not present.")
    print(
        "\033[1;31m [-]\033[0m Referrer-Policy not present \033[1;31m(Not OK)\033[0m"
    )
Exemple #30
0
def xframe0x00(headx):
    for header in headx:
        if "X-Frame-Options:".lower() in header.lower():
            save_data(database, module, lvl1, lvl2, lvl3, name,
                      "Detected X-Frame-Options - " + header.rstrip())
            print("\033[1;32m [+]\033[0m Detected X-Frame-Options - '" +
                  header.rstrip() + "' \033[1;32m(OK)\033[0m")
            return
    save_data(database, module, lvl1, lvl2, lvl3, name,
              "X-Frame-Options not present.")
    print(
        "\033[1;31m [-]\033[0m X-Frame-Options not present \033[1;31m(Not OK)\033[0m"
    )