Esempio n. 1
0
    def sending(self, url):
        r = sender.send_get(self.options, url, cookies=None)
        results = []
        if r:
            response = r.text
            if 'class="errormsg"' in response:
                utils.print_bad("No entry found for: {0}".format(self.query))
                return

            soup = utils.soup(response)
            result = self.analyze(soup)
            if not result:
                return False

            # checking if we have more than one pages
            pages = self.check_pages(soup)
            if pages:
                utils.print_info("Detect pages {0} for query".format(str(len(pages))))
                for page in pages:
                    page_url = self.baseURL + page
                    r1 = sender.send_get(self.options, page_url, cookies=None)
                    if r1:
                        response1 = r1.text
                        soup1 = utils.soup(response1)
                        self.analyze(soup1)

            results.extend(result)
            return results
Esempio n. 2
0
    def optimize(self, query):
        url = 'https://www.shodan.io/search/_summary?query={0}'.format(query)
        utils.print_good("Analyze first page for more result")
        r = sender.send_get(self.options, url, self.cookies)
        
        if r.status_code == 200:
            soup = utils.soup(r.text)
        else:
            return False
        
        query_by_cities = []
        # check if query have country filter or not
        if 'country' in query:
            links = soup.find_all("a")
            country = utils.get_country_code(utils.url_decode(query))

            for link in links:
                if 'city' in link.get('href'):
                    item = {
                        'url': link.get('href'),
                        'city': link.text,
                        'country': country
                    }
                    utils.print_debug(self.options, item)
                    query_by_cities.append(item)
        else:
            links = soup.find_all("a")
            countries = []
            for link in links:
                if 'country' in link.get('href'):
                    countries.append(utils.get_country_code(utils.url_decode(link.get('href'))))
            utils.print_debug(self.options, countries)

            for country in countries:
                # seding request again to get city
                country_query = utils.url_encode(' country:"{0}"'.format(country))
                url = 'https://www.shodan.io/search/_summary?query={0}{1}'.format(
                    query, country_query)
                r1 = sender.send_get(self.options, url, self.cookies)
                utils.random_sleep(5, 8)
                utils.print_info(
                    "Sleep for couple seconds because Shodan server is really strict")
                if r1.status_code == 200:
                    soup1 = utils.soup(r1.text)
                    links = soup1.find_all("a")
                    # countries = []
                    for link in links:
                        if 'city' in link.get('href'):
                            # countries.append(utils.get_city_name(
                            #     utils.url_decode(link.get('href'))))
                            item = {
                                'url': link.get('href'),
                                'city': link.text,
                                'country': country
                            }
                            utils.print_debug(self.options, item)
                            query_by_cities.append(item)

        utils.print_debug(self.options, query_by_cities)
        return query_by_cities
Esempio n. 3
0
    def do_login(self):
        utils.print_info("Reauthen using credentials from: {0}".format(
            self.options.get('config')))

        login_url = 'https://censys.io/login'
        r = sender.send_get(self.options, login_url, cookies=None)

        if r.status_code == 200:
            cookies = r.cookies
            form = utils.soup(r.text).find_all("form")
            if form:
                inputs = form[0].findChildren('input')

            for tag in inputs:
                if tag.get('name') == 'csrf_token':
                    csrf_token = tag.get('value')

            username, password = utils.get_cred(self.options, source='censys')

            data = {"csrf_token": csrf_token, "came_from": "/",
                    "from_censys_owned_external": "False", "login": username, "password": password}

            really_login_url = 'https://censys.io/login'
            r1 = sender.send_post(
                self.options, really_login_url, cookies, data, follow=False)

            if r1.status_code == 302:
                for item in r1.cookies.items():
                    if item[1]:
                        censys_cookies = item[1]
                        utils.set_session(
                            self.options, censys_cookies, source='censys')
                        return censys_cookies
        return False
Esempio n. 4
0
    def do_login(self):
        utils.print_info("Reauthen using credentials from: {0}".format(
            self.options.get('config')))

        login_url = 'https://account.shodan.io/login'
        r = sender.send_get(self.options, login_url, cookies=None)

        if r.status_code == 200:
            cookies = r.cookies
            form = utils.soup(r.text).find_all("form")
            if form:
                inputs = form[0].findChildren('input')

            for tag in inputs:
                if tag.get('name') == 'csrf_token':
                    csrf_token = tag.get('value')

            username, password = utils.get_cred(self.options, source='shodan')
            data = {"username": username, "password": password, "grant_type": "password",
                          "continue": "https://www.shodan.io/", "csrf_token": csrf_token, "login_submit": "Login"}

            really_login_url = 'https://account.shodan.io/login'
            r1 = sender.send_post(
                self.options, really_login_url, cookies, data, follow=False)

            if r1.status_code == 302:
                for item in r1.cookies.items():
                    if item.get('polito'):
                        shodan_cookies = item.get('polito')
                        utils.set_session(
                            self.options, shodan_cookies, source='shodan')
                        return shodan_cookies
        
        return False
Esempio n. 5
0
    def github(self, url):
        result = []
        r = sender.send_get(self.options, url, cookies=None)
        if r.status_code == 200:
            response = r.text
            # store raw json
            raw_file_path = self.options[
                'raw'] + '/write_up_github_{0}.html'.format(
                    self.query.replace(' ', '_'))
            if self.options.get('store_content'):
                utils.just_write(raw_file_path, response)
                utils.print_debug(
                    self.options,
                    "Writing raw response to: {0}".format(raw_file_path))

            soup = utils.soup(response)

            # Custom here
            body = soup.find_all('article', 'markdown-body')[0]
            links = body.findChildren('a')
            for link in links:
                if self.query.lower() in link.text.lower():
                    item = {
                        'Query': self.query,
                        'Title': link.text,
                        'Content': link.text,
                        'External_url': link.get('href'),
                        'Source': url,
                        'Warning': 'Write-Up',
                        'Raw': raw_file_path
                    }
                    utils.print_debug(self.options, item)
                    result.append(item)

        return result
Esempio n. 6
0
    def sending(self, url):
        # sending request and return the response
        utils.print_debug(self.options, url)
        # catch error when session timeout
        try:
            r = sender.send_get(self.options, url, self.cookies)
        except:
            r = False
        if r:
            response = r.text
            if self.options['store_content']:
                ts = str(int(time.time()))
                raw_file = self.options['raw'] + \
                    "/fofa/{0}_{1}".format(utils.url_encode(
                        url.replace(self.base_url, '')).replace('/', '_'), ts)
                utils.just_write(raw_file, response)

            soup = utils.soup(response)
            self.analyze(soup)
            # checking if there is many pages or not
            page_num = self.check_pages(soup)
            # if you're log in and have many results
            if page_num and self.logged_in and not self.options[
                    'disable_pages']:
                utils.print_info("Continue grab more pages")
                self.pages(page_num)
Esempio n. 7
0
    def optimize(self, json_response):
        analytics = json_response.get('aggs')
        if not analytics:
            return False

        # get analytics respoonse
        url = 'https://www.zoomeye.org/aggs/{0}'.format(analytics)
        r = sender.send_get(self.options, url, headers=self.headers)

        if r.status_code == 200:
            analytics_json = utils.get_json(r.text)
        else:
            return False

        analytics_countries = analytics_json.get('country')

        raw_query = self.options['zoomeye_query']
        clean_query = self.options['zoomeye_query']

        if 'country' in raw_query:
            country_code = utils.get_country_code(utils.url_decode(raw_query))
            # clean country and subdivisions if it exist
            clean_query = raw_query.replace(' +country:', '').replace(
                '"{0}"'.format(str(country_code)), '')

        for country_item in analytics_countries:
            utils.print_info(
                "Optimize query by filter with coutry: {0}".format(
                    country_item.get('name')))
            # loop through city
            for city in country_item.get('subdivisions'):
                if 'country' in raw_query:
                    real_query = raw_query + ' +subdivisions:"{0}"'.format(
                        city.get('name'))
                else:
                    real_query = clean_query + \
                        ' +country:"{0}"'.format(country_item.get('name')) + \
                        ' +subdivisions:"{0}"'.format(city.get('name'))

                query = utils.url_encode(real_query)

                url = 'https://www.zoomeye.org/search?q={0}&t=host'.format(
                    query)
                r = sender.send_get(self.options, url, headers=self.headers)
                if r and r.status_code == 200:
                    json_response = utils.get_json(r.text)
                    self.analyze(json_response)
Esempio n. 8
0
 def asnlookup(self, company):
     utils.print_banner(f"Starting scraping {company} from asnlookup.com")
     url = f'http://asnlookup.com/api/lookup?org={company}'
     r = sender.send_get(self.options, url, None)
     data = r.json()
     if not data:
         utils.print_bad('No IP found')
     else:
         content = "\n".join(data)
         print(content)
         utils.just_write(self.options['output'], content)
         utils.just_cleanup(self.options['output'])
Esempio n. 9
0
    def tweet(self, tag):
        results = []
        query = '#{0} #{1}'.format(self.query, tag)
        # @TODO improve by increase the the position
        url = 'https://twitter.com/search?vertical=default&q={0}&src=unkn'.format(
            utils.url_encode(query))
        r = sender.send_get(self.options, url, cookies=None)
        if r.status_code == 200:
            response = r.text

            # store raw json
            raw_file_path = self.options['raw'] + '/tweets_{1}_{0}.html'.format(
                self.query.replace(' ', '_'), tag)
            if self.options.get('store_content'):
                utils.just_write(raw_file_path, response)
                utils.print_debug(
                    self.options,
                    "Writing raw response to: {0}".format(raw_file_path))
            soup = utils.soup(response)

            # Custom here
            divs = soup.find_all('div', 'original-tweet')
            for div in divs:
                content = div.findChildren('p',
                                           'TweetTextSize')[0].text.strip()
                links = [
                    x.get('data-expanded-url') for x in div.findChildren('a')
                    if 't.co' in x.get('href')
                ]
                # print(links)
                if len(links) == 0:
                    external_url = 'N/A'
                else:
                    external_url = '|'.join([str(x) for x in links])

                item = {
                    'Query': self.query,
                    'Title': query,
                    'Content': content,
                    'External_url': external_url,
                    'Source': url,
                    'Warning': 'Tweet',
                    'Raw': raw_file_path
                }
                utils.print_debug(self.options, item)
                results.append(item)

        return results
Esempio n. 10
0
    def get_num_pages(self, url):
        summary_url = 'https://www.shodan.io/search/_summary?{0}'.format(
            utils.get_query(url))
        
        r = sender.send_get(self.options, summary_url, self.cookies)
        
        if r.status_code == 200:
            soup = utils.soup(r.text)
            results_total = soup.find_all('div', 'bignumber')[
                0].text.replace(',', '')
            page_num = str(int(int(results_total) / 10))
            utils.print_good("Detect posible {0} pages per {1} result".format(
                page_num, results_total))
            return page_num

        return False
Esempio n. 11
0
    def get_asn(self):
        ip_target = utils.resolve_input(self.target)
        if not ip_target:
            return False
        utils.print_banner(f"Starting scraping detail ASN of {ip_target}")

        utils.print_info(f'Get ASN from IP: {ip_target}')
        url = f'https://ipinfo.io/{ip_target}/json'
        r = sender.send_get(self.options, url, None)
        org_info = r.json().get('org')
        asn = utils.get_asn(org_info)
        if asn:
            utils.print_info(f"Detect target running on {asn}")
            ips = self.get_asn_ip(asn)
            utils.just_write(self.options['output'], "\n".join(ips))
            utils.just_cleanup(self.options['output'])
        else:
            return False
Esempio n. 12
0
    def sending(self, url):
        # sending request and return the response
        r = sender.send_get(self.options, url, self.cookies)
        if r:
            response = r.text
            if self.options['store_content']:
                ts = str(int(time.time()))
                raw_file = self.options['raw'] + \
                    "/shodan/{0}_{1}".format(utils.url_encode(
                        url.replace(self.base_url, '')).replace('/', '_'), ts)
                utils.just_write(raw_file, response)

            soup = utils.soup(response)
            self.analyze(soup)

            # checking if there is many pages or not
            if self.logged_in and not self.options['disable_pages']:
                utils.print_info("Continue grab more pages")
                self.pages(self.get_num_pages(url))
Esempio n. 13
0
    def pages(self, page_num):
        for i in range(2, int(page_num) + 1):
            utils.print_info("Get more result from page: {0}".format(str(i)))
            utils.random_sleep(1, 2)

            query = utils.url_encode(self.options['censys_query'])
            url = 'https://censys.io/ipv4/_search?q={1}&page={0}'.format(
                str(i), query)

            r = sender.send_get(self.options, url, self.cookies)
            if r.status_code == 200:
                response = r.text
                if 'class="alert alert-danger"' in response:
                    utils.print_bad("Reach to the limit at page {0}".format(
                        str(i)))
                    return
                else:
                    soup = utils.soup(response)
                    self.analyze(soup)
Esempio n. 14
0
    def do_login(self):
        utils.print_info("Reauthen using credentials from: {0}".format(
            self.options.get('config')))

        login_url = 'https://i.nosec.org/login?service=http%3A%2F%2Ffofa.so%2Fusers%2Fservice'
        r = sender.send_get(self.options, login_url, cookies=None)

        if r.status_code == 200:
            cookies = r.cookies
            form = utils.soup(r.text).find(id="login-form")
            inputs = form.findChildren('input')

            for tag in inputs:
                if tag.get('name') == 'authenticity_token':
                    authenticity_token = tag.get('value')
                if tag.get('name') == 'lt':
                    lt = tag.get('value')
                if tag.get('name') == 'authenticity_token':
                    authenticity_token = tag.get('value')

            username, password = utils.get_cred(self.options, source='fofa')

            data = {
                "utf8": "\xe2\x9c\x93",
                "authenticity_token": authenticity_token,
                "lt": lt,
                "service": "http://fofa.so/users/service",
                "username": username,
                "password": password,
                "rememberMe": "1",
                "button": ''
            }

            really_login_url = 'https://i.nosec.org/login'
            r1 = sender.send_post(self.options, really_login_url, cookies,
                                  data)

            if r1.status_code == 200:
                fofa_cookie = r1.cookies.get('_fofapro_ars_session')
                utils.set_session(self.options, fofa_cookie, source='fofa')
                return fofa_cookie
        return False
Esempio n. 15
0
    def check_session(self):
        utils.print_debug(self.options, "Checking session for Censys")
        sess_url = 'https://censys.io/account'

        r = sender.send_get(self.options, sess_url, self.cookies)

        if r.status_code == 302 or '/login' in r.text:
            utils.print_bad("Look like Censys session is invalid.")
            new_cookie = self.do_login()
            if new_cookie:
                utils.print_good("Reauthen success")
                self.cookies = {"auth_tkt": new_cookie}
                return True

            return False
        elif r.status_code == 200:
            utils.print_good("Getting result as authenticated user")
            return True

        return False
Esempio n. 16
0
    def pages(self, page_num):
        for i in range(2, int(page_num) + 1):
            utils.print_info("Get more result from page: {0}".format(str(i)))

            query = utils.url_encode(
                utils.just_b64_encode(self.options['fofa_query']))
            url = 'https://fofa.so/result?page={0}&qbase64={1}'.format(
                str(i), query)
            utils.print_debug(self.options, url)
            r = sender.send_get(self.options, url, self.cookies)

            if r.status_code == 200:
                response = r.text
                if 'class="error"' in response:
                    utils.print_bad("Reach to the limit at page {0}".format(
                        str(i)))
                    return
                else:
                    soup = utils.soup(response)
                    self.analyze(soup)
Esempio n. 17
0
    def pages(self, page_num):
        for i in range(2, int(page_num) + 1):
            utils.random_sleep(1, 2)
            utils.print_info("Get more result from page: {0}".format(str(i)))

            query = utils.url_encode(self.options['zoomeye_query'])
            url = 'https://www.zoomeye.org/search?q={0}&t=host&p={1}'.format(
                query, str(i))
            r = sender.send_get(self.options, url, headers=self.headers)

            if r.status_code == 200:
                response = r.text
                if '"msg": "forbidden"' in response:
                    utils.print_bad("Reach to the limit at page {0}".format(
                        str(i)))
                    return
                else:
                    json_response = utils.get_json(response)
                    self.analyze(json_response)
                    self.optimize(json_response)
Esempio n. 18
0
    def check_session(self):
        utils.print_debug(self.options, "Checking session for FoFa")
        sess_url = 'https://fofa.so/user/users/info'
        r = sender.send_get(self.options, sess_url, self.cookies)

        if r.status_code == 302 or '/login' in r.text:
            utils.print_bad(
                "Look like fofa session is invalid. You gonna get very litlle result"
            )
            new_cookie = self.do_login()
            if new_cookie:
                utils.print_good("Reauthen success")
                self.cookies = {"_fofapro_ars_session": new_cookie}

            return False
        elif r.status_code == 200:
            utils.print_good("Getting result as authenticated user")
            return True

        return False
Esempio n. 19
0
    def pages(self, page_num):
        for i in range(2, int(page_num) + 1):
            utils.print_info("Sleep for couple seconds because Shodan server is really strict")
            utils.random_sleep(3, 6)
            utils.print_info("Get more result from page: {0}".format(str(i)))

            query = utils.url_encode(self.options['shodan_query'])
            url = 'https://www.shodan.io/search?query={1}&page={0}'.format(
                str(i), query)

            r = sender.send_get(self.options, url, self.cookies)

            if r.status_code == 200:
                response = r.text
                if 'class="alert alert-error text-center"' in response:
                    utils.print_bad(
                        "Reach to the limit at page {0}".format(str(i)))
                    return
                else:
                    soup = utils.soup(response)
                    self.analyze(soup)
Esempio n. 20
0
    def initial(self):
        product = utils.url_encode(self.query)
        url = 'https://www.cvedetails.com/product-search.php?vendor_id=0&search={0}'.format(
            product)

        # get summary table
        products = []
        r = sender.send_get(self.options, url, cookies=None)
        if r.status_code == 200:
            response = r.text
            if 'class="errormsg"' in response:
                utils.print_bad("No entry found for: {0}".format(self.query))
                return
            
            summary_table = utils.soup(response).find_all("table", "listtable")
            # <table class = "listtable"
            if summary_table:
                trs = summary_table[0].findChildren('tr')
                if len(trs) <= 1:
                    utils.print_bad(
                        "No entry found for: {0}".format(self.query))
                    return
                
                for tr in trs[1:]:
                    for td in tr.findChildren('td'):
                        if td.a:
                            if 'See all vulnerabilities' in td.a.get('title'):
                                products.append(td.a.get('href'))

        final = []
        # if found product and have vulnerabilities, go get it
        if products:
            for url in products:
                results = self.sending(self.baseURL + url)
                if results:
                    final.extend(results)
            # self.details(products)
        # print(final)
        # write final output
        self.conclude(final)
Esempio n. 21
0
    def optimize(self, query):
        utils.print_good("Analyze metadata page for more result")

        raw_query = utils.url_decode(query)
        if 'location.country' in raw_query:
            country = utils.get_country_code(raw_query, source='censys')
            query = raw_query.replace(country, '').replace(
                'AND ' + country, '').replace('and ' + country, '')

        url = 'https://censys.io/ipv4/metadata?q={0}'.format(query)
        r = sender.send_get(self.options, url, self.cookies)

        if r.status_code == 200:
            soup = utils.soup(r.text)
        else:
            return False

        query_by_countries = []
        # check if query have country filter or not
        divs = soup.find_all("div", 'left-table')
        country_tables = []
        for div in divs:
            if 'Country Breakdown' in div.h6.text:
                country_tables = div.find_all('tr')

        for row in country_tables:
            item = {
                'url': 'N/A',
                'country': 'N/A'
            }

            tds = row.find('td')
            for td in tds:
                if td.findChildren('a'):
                    item['url'] = self.base_url + td.a.get('href')
                    item['country'] = td.a.text
                query_by_countries.append(item)

        utils.print_debug(self.options, query_by_countries)
        return query_by_countries
Esempio n. 22
0
    def optimize(self, query):
        utils.print_good("Analyze result by country and city for more result")
        # custom headers for stats
        custom_headers = {
            "User-Agent":
            "Mozilla/5.0 (X11; FreeBSD amd64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36",
            "Accept":
            "text/html, application/javascript, application/ecmascript, application/x-ecmascript, */*; q=0.01",
            "Accept-Language": "en-US,en;q=0.5",
            "Accept-Encoding": "gzip, deflate",
            "X-Requested-With": "XMLHttpRequest",
            "Connection": "close"
        }
        url = 'https://fofa.so/search/result_stats?qbase64={0}'.format(query)
        r = sender.send_get(self.options,
                            url,
                            self.cookies,
                            headers=custom_headers)

        if r.status_code == 200:
            html_data = r.text.replace('\/',
                                       '/').replace('\\"',
                                                    '"').replace("\'", "'")
            soup = utils.soup(html_data)
            query_by_cities = []
            # custom here
            country_divs = soup.find_all("div", "class_sf")
            for div in country_divs:
                city_links = div.findChildren("a")
                for link in city_links:
                    query_by_cities.append({
                        'url':
                        self.base_url + link.get('href'),
                        'city':
                        link.text,
                    })

            # utils.print_debug(self.options, query_by_cities)
            return query_by_cities
        return False
Esempio n. 23
0
    def check_session(self):
        utils.print_debug(self.options, "Checking session for ZoomEye")
        sess_url = 'https://www.zoomeye.org/user'

        # get jwt if it was set
        if self.jwt.get('Cube-Authorization'
                        ) and self.jwt.get('Cube-Authorization') != 'None':
            self.headers['Cube-Authorization'] = self.jwt.get(
                'Cube-Authorization')

        r = sender.send_get(self.options, sess_url, headers=self.headers)

        if not r or 'login required' in r.text:
            utils.print_bad("Look like ZoomEye session is invalid.")
            # delete jwt header to prevent getting 401 code
            del self.headers['Cube-Authorization']
            return False
        elif 'uuid' in r.text or 'nickname' in r.text:
            utils.print_good("Getting result as authenticated user")
            return True

        return False
Esempio n. 24
0
    def sending(self, url):
        # sending request and return the response
        r = sender.send_get(self.options, url, self.cookies)
        if r:
            response = r.text
            if 'ratelimit' in response:
                utils.print_bad('Looks like you get block from Censys. Consider using Proxy')
                return False

            if self.options['store_content']:
                ts = str(int(time.time()))
                raw_file = self.options['raw'] + \
                    "/censys/{0}_{1}".format(utils.url_encode(
                        url.replace(self.base_url, '')).replace('/', '_'), ts)
                utils.just_write(raw_file, response)

            soup = utils.soup(response)
            self.analyze(soup)

            # checking if there is many pages or not
            if not self.options['disable_pages']:
                utils.print_info("Continue grab more pages")
                self.pages(self.get_num_pages(soup))
Esempio n. 25
0
    def sending(self, url):
        # sending request and return the response
        r1 = sender.send_get(self.options, url, headers=self.headers)
        if r1:
            response = r1.text

            if self.options['store_content']:
                ts = str(int(time.time()))
                raw_file = self.options['raw'] + \
                    "/zoomeye/{0}_{1}".format(utils.url_encode(
                        url.replace(self.base_url, '')).replace('/', '_'), ts)
                utils.just_write(raw_file, response)

            json_response = utils.get_json(response)
            self.analyze(json_response)

            # loop throuh pages if you're logged in
            page_num = self.get_num_pages(json_response)
            if self.logged_in and page_num and int(page_num) > 1:
                self.pages(page_num)

            # get aggs and found more result
            self.optimize(json_response)