コード例 #1
0
    def optimize(self, query):
        url = 'https://www.shodan.io/search/_summary?query={0}'.format(query)
        utils.print_good("Analyze first page for more result")
        r = sender.send_get(self.options, url, self.cookies)
        
        if r.status_code == 200:
            soup = utils.soup(r.text)
        else:
            return False
        
        query_by_cities = []
        # check if query have country filter or not
        if 'country' in query:
            links = soup.find_all("a")
            country = utils.get_country_code(utils.url_decode(query))

            for link in links:
                if 'city' in link.get('href'):
                    item = {
                        'url': link.get('href'),
                        'city': link.text,
                        'country': country
                    }
                    utils.print_debug(self.options, item)
                    query_by_cities.append(item)
        else:
            links = soup.find_all("a")
            countries = []
            for link in links:
                if 'country' in link.get('href'):
                    countries.append(utils.get_country_code(utils.url_decode(link.get('href'))))
            utils.print_debug(self.options, countries)

            for country in countries:
                # seding request again to get city
                country_query = utils.url_encode(' country:"{0}"'.format(country))
                url = 'https://www.shodan.io/search/_summary?query={0}{1}'.format(
                    query, country_query)
                r1 = sender.send_get(self.options, url, self.cookies)
                utils.random_sleep(5, 8)
                utils.print_info(
                    "Sleep for couple seconds because Shodan server is really strict")
                if r1.status_code == 200:
                    soup1 = utils.soup(r1.text)
                    links = soup1.find_all("a")
                    # countries = []
                    for link in links:
                        if 'city' in link.get('href'):
                            # countries.append(utils.get_city_name(
                            #     utils.url_decode(link.get('href'))))
                            item = {
                                'url': link.get('href'),
                                'city': link.text,
                                'country': country
                            }
                            utils.print_debug(self.options, item)
                            query_by_cities.append(item)

        utils.print_debug(self.options, query_by_cities)
        return query_by_cities
コード例 #2
0
    def sending(self, url):
        # sending request and return the response
        utils.print_debug(self.options, url)
        # catch error when session timeout
        try:
            r = sender.send_get(self.options, url, self.cookies)
        except:
            r = False
        if r:
            response = r.text
            if self.options['store_content']:
                ts = str(int(time.time()))
                raw_file = self.options['raw'] + \
                    "/fofa/{0}_{1}".format(utils.url_encode(
                        url.replace(self.base_url, '')).replace('/', '_'), ts)
                utils.just_write(raw_file, response)

            soup = utils.soup(response)
            self.analyze(soup)
            # checking if there is many pages or not
            page_num = self.check_pages(soup)
            # if you're log in and have many results
            if page_num and self.logged_in and not self.options[
                    'disable_pages']:
                utils.print_info("Continue grab more pages")
                self.pages(page_num)
コード例 #3
0
    def brute_country_code(self, query):
        utils.print_info("Brute the query with country code")
        if 'country' in query:
            raw_query = query.replace(utils.get_country_code(utils.url_decode(query)), '[replace]')
        else:
            raw_query += utils.url_encode(' country:"[replace]"')

        for country_code in utils.full_country_code:
            query = raw_query.replace('[replace]', country_code)
            url = 'https://www.shodan.io/search?query={1}&page={0}'.format(
                str(1), query)
            self.sending(url)
コード例 #4
0
ファイル: censys.py プロジェクト: zshell/Metabigor
    def brute_country_code(self, query):
        utils.print_info("Brute the query with country code")
        # clean the country filter
        raw_query = utils.url_decode(query)
        if 'location.country' in raw_query:
            country = utils.get_country_code(raw_query, source='censys')
            query = raw_query.replace(country, '').replace(
                'AND ' + country, '').replace('and ' + country, '')

        raw_query += ' and location.country_code:"[replace]"'
        for country_code in utils.full_country_code:
            query = utils.url_encode(
                raw_query.replace('[replace]', country_code))
            url = 'https://censys.io/ipv4/_search?q={1}&page={0}'.format(str(1), query)
            self.sending(url)
コード例 #5
0
ファイル: writeups.py プロジェクト: nhmichael/Metabigor
    def tweet(self, tag):
        results = []
        query = '#{0} #{1}'.format(self.query, tag)
        # @TODO improve by increase the the position
        url = 'https://twitter.com/search?vertical=default&q={0}&src=unkn'.format(
            utils.url_encode(query))
        r = sender.send_get(self.options, url, cookies=None)
        if r.status_code == 200:
            response = r.text

            # store raw json
            raw_file_path = self.options['raw'] + '/tweets_{1}_{0}.html'.format(
                self.query.replace(' ', '_'), tag)
            if self.options.get('store_content'):
                utils.just_write(raw_file_path, response)
                utils.print_debug(
                    self.options,
                    "Writing raw response to: {0}".format(raw_file_path))
            soup = utils.soup(response)

            # Custom here
            divs = soup.find_all('div', 'original-tweet')
            for div in divs:
                content = div.findChildren('p',
                                           'TweetTextSize')[0].text.strip()
                links = [
                    x.get('data-expanded-url') for x in div.findChildren('a')
                    if 't.co' in x.get('href')
                ]
                # print(links)
                if len(links) == 0:
                    external_url = 'N/A'
                else:
                    external_url = '|'.join([str(x) for x in links])

                item = {
                    'Query': self.query,
                    'Title': query,
                    'Content': content,
                    'External_url': external_url,
                    'Source': url,
                    'Warning': 'Tweet',
                    'Raw': raw_file_path
                }
                utils.print_debug(self.options, item)
                results.append(item)

        return results
コード例 #6
0
ファイル: zoomeye.py プロジェクト: nhmichael/Metabigor
    def optimize(self, json_response):
        analytics = json_response.get('aggs')
        if not analytics:
            return False

        # get analytics respoonse
        url = 'https://www.zoomeye.org/aggs/{0}'.format(analytics)
        r = sender.send_get(self.options, url, headers=self.headers)

        if r.status_code == 200:
            analytics_json = utils.get_json(r.text)
        else:
            return False

        analytics_countries = analytics_json.get('country')

        raw_query = self.options['zoomeye_query']
        clean_query = self.options['zoomeye_query']

        if 'country' in raw_query:
            country_code = utils.get_country_code(utils.url_decode(raw_query))
            # clean country and subdivisions if it exist
            clean_query = raw_query.replace(' +country:', '').replace(
                '"{0}"'.format(str(country_code)), '')

        for country_item in analytics_countries:
            utils.print_info(
                "Optimize query by filter with coutry: {0}".format(
                    country_item.get('name')))
            # loop through city
            for city in country_item.get('subdivisions'):
                if 'country' in raw_query:
                    real_query = raw_query + ' +subdivisions:"{0}"'.format(
                        city.get('name'))
                else:
                    real_query = clean_query + \
                        ' +country:"{0}"'.format(country_item.get('name')) + \
                        ' +subdivisions:"{0}"'.format(city.get('name'))

                query = utils.url_encode(real_query)

                url = 'https://www.zoomeye.org/search?q={0}&t=host'.format(
                    query)
                r = sender.send_get(self.options, url, headers=self.headers)
                if r and r.status_code == 200:
                    json_response = utils.get_json(r.text)
                    self.analyze(json_response)
コード例 #7
0
    def brute_country_code(self, query):
        utils.print_info("Brute the query with country code")
        #  && country=US
        raw_query = utils.just_b64_decode(utils.url_decode(query))
        if 'country' in query:
            raw_query = query.replace(
                utils.get_country_code(raw_query, source='fofa'), '[replace]')
        else:
            raw_query += '&& country:"[replace]"'

        for country_code in utils.full_country_code:
            query = raw_query.replace('[replace]', country_code)
            query = utils.url_encode(utils.just_b64_encode(query))
            url = 'https://fofa.so/result?page={0}&qbase64={1}'.format(
                str(1), query)

            self.sending(url)
コード例 #8
0
    def initial(self):
        # prepare url
        query = utils.url_encode(self.options['shodan_query'])
        url = 'https://www.shodan.io/search?query={1}&page={0}'.format(str(1), query)

        self.sending(url)

        # brute the country
        if self.options['brute']:
            self.brute_country_code(query)

        # repeat the routine with filter by city
        query_by_cities = self.optimize(query)
        if query_by_cities:
            for item in query_by_cities:
                utils.print_info(
                    "Get more result by filter with {0} city".format(item.get('city')))
                self.sending(item.get('url'))
コード例 #9
0
    def pages(self, page_num):
        for i in range(2, int(page_num) + 1):
            utils.print_info("Get more result from page: {0}".format(str(i)))
            utils.random_sleep(1, 2)

            query = utils.url_encode(self.options['censys_query'])
            url = 'https://censys.io/ipv4/_search?q={1}&page={0}'.format(
                str(i), query)

            r = sender.send_get(self.options, url, self.cookies)
            if r.status_code == 200:
                response = r.text
                if 'class="alert alert-danger"' in response:
                    utils.print_bad("Reach to the limit at page {0}".format(
                        str(i)))
                    return
                else:
                    soup = utils.soup(response)
                    self.analyze(soup)
コード例 #10
0
    def sending(self, url):
        # sending request and return the response
        r = sender.send_get(self.options, url, self.cookies)
        if r:
            response = r.text
            if self.options['store_content']:
                ts = str(int(time.time()))
                raw_file = self.options['raw'] + \
                    "/shodan/{0}_{1}".format(utils.url_encode(
                        url.replace(self.base_url, '')).replace('/', '_'), ts)
                utils.just_write(raw_file, response)

            soup = utils.soup(response)
            self.analyze(soup)

            # checking if there is many pages or not
            if self.logged_in and not self.options['disable_pages']:
                utils.print_info("Continue grab more pages")
                self.pages(self.get_num_pages(url))
コード例 #11
0
ファイル: zoomeye.py プロジェクト: nhmichael/Metabigor
    def pages(self, page_num):
        for i in range(2, int(page_num) + 1):
            utils.random_sleep(1, 2)
            utils.print_info("Get more result from page: {0}".format(str(i)))

            query = utils.url_encode(self.options['zoomeye_query'])
            url = 'https://www.zoomeye.org/search?q={0}&t=host&p={1}'.format(
                query, str(i))
            r = sender.send_get(self.options, url, headers=self.headers)

            if r.status_code == 200:
                response = r.text
                if '"msg": "forbidden"' in response:
                    utils.print_bad("Reach to the limit at page {0}".format(
                        str(i)))
                    return
                else:
                    json_response = utils.get_json(response)
                    self.analyze(json_response)
                    self.optimize(json_response)
コード例 #12
0
    def pages(self, page_num):
        for i in range(2, int(page_num) + 1):
            utils.print_info("Get more result from page: {0}".format(str(i)))

            query = utils.url_encode(
                utils.just_b64_encode(self.options['fofa_query']))
            url = 'https://fofa.so/result?page={0}&qbase64={1}'.format(
                str(i), query)
            utils.print_debug(self.options, url)
            r = sender.send_get(self.options, url, self.cookies)

            if r.status_code == 200:
                response = r.text
                if 'class="error"' in response:
                    utils.print_bad("Reach to the limit at page {0}".format(
                        str(i)))
                    return
                else:
                    soup = utils.soup(response)
                    self.analyze(soup)
コード例 #13
0
    def pages(self, page_num):
        for i in range(2, int(page_num) + 1):
            utils.print_info("Sleep for couple seconds because Shodan server is really strict")
            utils.random_sleep(3, 6)
            utils.print_info("Get more result from page: {0}".format(str(i)))

            query = utils.url_encode(self.options['shodan_query'])
            url = 'https://www.shodan.io/search?query={1}&page={0}'.format(
                str(i), query)

            r = sender.send_get(self.options, url, self.cookies)

            if r.status_code == 200:
                response = r.text
                if 'class="alert alert-error text-center"' in response:
                    utils.print_bad(
                        "Reach to the limit at page {0}".format(str(i)))
                    return
                else:
                    soup = utils.soup(response)
                    self.analyze(soup)
コード例 #14
0
    def initial(self):
        product = utils.url_encode(self.query)
        url = 'https://www.cvedetails.com/product-search.php?vendor_id=0&search={0}'.format(
            product)

        # get summary table
        products = []
        r = sender.send_get(self.options, url, cookies=None)
        if r.status_code == 200:
            response = r.text
            if 'class="errormsg"' in response:
                utils.print_bad("No entry found for: {0}".format(self.query))
                return
            
            summary_table = utils.soup(response).find_all("table", "listtable")
            # <table class = "listtable"
            if summary_table:
                trs = summary_table[0].findChildren('tr')
                if len(trs) <= 1:
                    utils.print_bad(
                        "No entry found for: {0}".format(self.query))
                    return
                
                for tr in trs[1:]:
                    for td in tr.findChildren('td'):
                        if td.a:
                            if 'See all vulnerabilities' in td.a.get('title'):
                                products.append(td.a.get('href'))

        final = []
        # if found product and have vulnerabilities, go get it
        if products:
            for url in products:
                results = self.sending(self.baseURL + url)
                if results:
                    final.extend(results)
            # self.details(products)
        # print(final)
        # write final output
        self.conclude(final)
コード例 #15
0
    def initial(self):
        # prepare url
        query = utils.url_encode(
            utils.just_b64_encode(self.options['fofa_query']))
        url = 'https://fofa.so/result?page={0}&qbase64={1}'.format(
            str(1), query)

        self.sending(url)

        # brute the country
        if self.options['brute']:
            self.brute_country_code(query)

        # repeat the routine with filter by city
        query_by_cities = self.optimize(query)
        if query_by_cities:
            for item in query_by_cities:
                utils.print_info(
                    "Get more result by filter with {0} city".format(
                        item.get('city')))
                self.sending(item.get('url'))
                time.sleep(1)
コード例 #16
0
ファイル: zoomeye.py プロジェクト: nhmichael/Metabigor
    def sending(self, url):
        # sending request and return the response
        r1 = sender.send_get(self.options, url, headers=self.headers)
        if r1:
            response = r1.text

            if self.options['store_content']:
                ts = str(int(time.time()))
                raw_file = self.options['raw'] + \
                    "/zoomeye/{0}_{1}".format(utils.url_encode(
                        url.replace(self.base_url, '')).replace('/', '_'), ts)
                utils.just_write(raw_file, response)

            json_response = utils.get_json(response)
            self.analyze(json_response)

            # loop throuh pages if you're logged in
            page_num = self.get_num_pages(json_response)
            if self.logged_in and page_num and int(page_num) > 1:
                self.pages(page_num)

            # get aggs and found more result
            self.optimize(json_response)
コード例 #17
0
ファイル: censys.py プロジェクト: zshell/Metabigor
    def sending(self, url):
        # sending request and return the response
        r = sender.send_get(self.options, url, self.cookies)
        if r:
            response = r.text
            if 'ratelimit' in response:
                utils.print_bad('Looks like you get block from Censys. Consider using Proxy')
                return False

            if self.options['store_content']:
                ts = str(int(time.time()))
                raw_file = self.options['raw'] + \
                    "/censys/{0}_{1}".format(utils.url_encode(
                        url.replace(self.base_url, '')).replace('/', '_'), ts)
                utils.just_write(raw_file, response)

            soup = utils.soup(response)
            self.analyze(soup)

            # checking if there is many pages or not
            if not self.options['disable_pages']:
                utils.print_info("Continue grab more pages")
                self.pages(self.get_num_pages(soup))
コード例 #18
0
ファイル: config.py プロジェクト: xudongr/Osmedeus
def parsing_config(config_path, args):
    options = {}

    # some default path
    github_api_key = str(os.getenv("GITROB_ACCESS_TOKEN"))
    cwd = str(os.getcwd())

    # just hardcode if gopath not loaded
    go_path = cwd + "/plugins/go"
    # go_path = str(os.getenv("GOPATH")) + "/bin"
    # if "None" in go_path:
    #     go_path = cwd + "/plugins/go"

    if args.slack:
        bot_token = str(os.getenv("SLACK_BOT_TOKEN"))
    else:
        bot_token = None

    log_channel = str(os.getenv("LOG_CHANNEL"))
    status_channel = str(os.getenv("STATUS_CHANNEL"))
    report_channel = str(os.getenv("REPORT_CHANNEL"))
    stds_channel = str(os.getenv("STDS_CHANNEL"))
    verbose_report_channel = str(os.getenv("VERBOSE_REPORT_CHANNEL"))

    if os.path.isfile(config_path):
        utils.print_info('Config file detected: {0}'.format(config_path))
        # config to logging some output
        config = ConfigParser(interpolation=ExtendedInterpolation())
        config.read(config_path)
    else:
        utils.print_info('New config file created: {0}'.format(config_path))
        shutil.copyfile(cwd + '/template-config.conf', config_path)

        config = ConfigParser(interpolation=ExtendedInterpolation())
        config.read(config_path)

    if args.workspace:
        workspaces = os.path.abspath(args.workspace)
    else:
        workspaces = cwd + "/workspaces/"

    config.set('Enviroments', 'cwd', cwd)
    config.set('Enviroments', 'go_path', go_path)
    config.set('Enviroments', 'github_api_key', github_api_key)
    config.set('Enviroments', 'workspaces', str(workspaces))

    if args.debug:
        config.set('Slack', 'bot_token', 'bot_token')
        config.set('Slack', 'log_channel', 'log_channel')
        config.set('Slack', 'status_channel', 'status_channel')
        config.set('Slack', 'report_channel', 'report_channel')
        config.set('Slack', 'stds_channel', 'stds_channel')
        config.set('Slack', 'verbose_report_channel', 'verbose_report_channel')
    else:
        config.set('Slack', 'bot_token', str(bot_token))
        config.set('Slack', 'log_channel', log_channel)
        config.set('Slack', 'status_channel', status_channel)
        config.set('Slack', 'report_channel', report_channel)
        config.set('Slack', 'stds_channel', stds_channel)
        config.set('Slack', 'verbose_report_channel', verbose_report_channel)

    # Mode config of the tool
    if args.slow and args.slow == 'all':
        speed = "slow"
    else:
        speed = "quick"

    module = str(args.module)
    debug = str(args.debug)
    force = str(args.force)

    config.set('Mode', 'speed', speed)
    config.set('Mode', 'module', module)
    config.set('Mode', 'debug', debug)
    config.set('Mode', 'force', force)

    # Target stuff
    # parsing agument
    git_target = args.git if args.git else None
    burpstate_target = args.burp if args.burp else None
    target_list = args.targetlist if args.targetlist else None
    company = args.company if args.company else None
    output = args.output if args.output else None
    target = args.target if args.target else None
    strip_target = target if target else None
    ip = target if target else None
    workspace = target if target else None
    # get direct input as single or a file
    direct_input = args.input if args.input else None
    direct_input_list = args.inputlist if args.inputlist else None

    # target config
    if args.target:
        target = args.target

    # set target is direct input if not specific
    elif direct_input or direct_input_list:
        if direct_input:
            # direct_target = utils.url_encode(direct_input)
            direct_target = direct_input
        if direct_input_list:
            direct_target = os.path.basename(direct_input_list)

        target = direct_target
        output = args.output if args.output else utils.strip_slash(
            os.path.splitext(target)[0])
        company = args.company if args.company else utils.strip_slash(
            os.path.splitext(target)[0])
    else:
        target = None

    # parsing some stuff related to target
    if target:
        # get the main domain of the target
        strip_target = utils.get_domain(target)
        if '/' in strip_target:
            strip_target = utils.strip_slash(strip_target)

        output = args.output if args.output else strip_target
        company = args.company if args.company else strip_target

        # url encode to make sure it can be send through API
        workspace = workspaces + strip_target
        workspace = utils.url_encode(workspace)

        # check connection to target
        if not direct_input and not direct_input_list:
            try:
                ip = socket.gethostbyname(strip_target)
            except:
                ip = None
                utils.print_bad(
                    "Something wrong to connect to {0}".format(target))
        else:
            ip = None

    try:
        # getting proxy from args
        proxy = args.proxy if args.proxy else None
        proxy_file = args.proxy_file if args.proxy_file else None

        config.set('Proxy', 'proxy', str(proxy))
        config.set('Proxy', 'proxy_file', str(proxy_file))

        if config['Proxy']['proxy_cmd'] == 'None':
            # only works for Kali proxychains, change it if you on other OS
            proxy_cmd = "proxychains -f {0}".format(proxy_file)
            config.set('Proxy', 'proxy_cmd', str(proxy_cmd))
    except:
        utils.print_info(
            "Your config file seem to be outdated, Backup it and delete it to regenerate the new one"
        )

    config.set('Target', 'input', str(direct_input))
    config.set('Target', 'input_list', str(direct_input_list))
    config.set('Target', 'git_target', str(git_target))
    config.set('Target', 'burpstate_target', str(burpstate_target))
    config.set('Target', 'target_list', str(target_list))
    config.set('Target', 'output', str(output))
    config.set('Target', 'target', str(target))
    config.set('Target', 'strip_target', str(strip_target))
    config.set('Target', 'company', str(company))
    config.set('Target', 'ip', str(ip))

    config.set('Enviroments', 'workspace', str(workspace))

    # create workspace folder for the target
    utils.make_directory(workspace)

    # set the remote API
    if args.remote:
        remote_api = args.remote
        config.set('Server', 'remote_api', remote_api)

    # set credentials as you define from agurments
    if args.auth:
        # user:pass
        creds = args.auth.strip().split(":")
        username = creds[0]
        password = creds[1]

        config.set('Server', 'username', username)
        config.set('Server', 'password', password)
    else:
        # set random password if default password detect
        if config['Server']['password'] == 'super_secret':
            new_pass = hashlib.md5(str(int(
                time.time())).encode()).hexdigest()[:6]
            config.set('Server', 'password', new_pass)

    # save the config
    with open(config_path, 'w') as configfile:
        config.write(configfile)

    config = ConfigParser(interpolation=ExtendedInterpolation())
    config.read(config_path)
    sections = config.sections()
    options['CONFIG_PATH'] = os.path.abspath(config_path)

    for sec in sections:
        for key in config[sec]:
            options[key.upper()] = config.get(sec, key)

    #
    if args.slow and args.slow != 'all':
        options['SLOW'] = args.slow

    # parsing proxy stuff
    if options.get('PROXY') or options.get('PROXY_FILE'):
        proxy_parsing(options)
    else:
        # just for the old config
        options['PROXY'] = "None"
        options['PROXY_FILE'] = "None"

    options = clean_up(options)

    return options
コード例 #19
0
ファイル: zoomeye.py プロジェクト: nhmichael/Metabigor
 def initial(self):
     # prepare url
     query = utils.url_encode(self.options['zoomeye_query'])
     url = 'https://www.zoomeye.org/search?q={0}&t=host&p=1'.format(query)
     self.sending(url)