コード例 #1
0
ファイル: dnsdumpsterengine.py プロジェクト: orleven/srcscan
    async def run(self):
        async with ClientSession() as session:
            flag = await self.check_engine_available(session, self.engine)
            if not flag:
                self.logger.error(
                    "{engine_name} is not available, skipping!".format(
                        engine_name=self.engine_name))
                return
            self.logger.debug("{engine_name} is available, starting!".format(
                engine_name=self.engine_name))

            self.logger.debug("{engine} {url}".format(engine=self.engine_name,
                                                      url=self.base_url))
            async with session.post(self.base_url,
                                    proxy=self.proxy,
                                    data=self.data) as res:
                if res != None:
                    try:
                        content = await res.text()
                    except:
                        content = ""
                    ret = self.check_response_errors(content)
                    if not ret[0]:
                        self.deal_with_errors(ret[1])

                    self.extract(content)

                    self.logger.debug(self.engine_name + " " +
                                      str(len(self.results['subdomain'])))
コード例 #2
0
ファイル: threatcrowdengine.py プロジェクト: orleven/srcscan
    async def run(self):
        async with ClientSession() as session:
            url = self.format_base_url(self.target)
            url = await self.check_engine_available(session, url)
            if not url:
                self.logger.error(
                    "{engine_name} is not available, skipping!".format(
                        engine_name=self.engine_name))
                return
            self.logger.debug("{engine_name} is available, starting!".format(
                engine_name=self.engine_name))
            await self.should_sleep()
            self.logger.debug("{engine} {url}".format(engine=self.engine_name,
                                                      url=url))
            async with session.get(url, proxy=self.proxy) as res:
                if res != None:
                    try:
                        content = await res.text()
                    except:
                        content = ''

                    ret = self.check_response_errors(content)
                    if not ret[0]:
                        self.deal_with_errors(ret[1])
                        return

                    self.extract(content)
                    self.logger.debug(self.engine_name + " " +
                                      str(len(self.results['subdomain'])))
コード例 #3
0
async def go_request(req_list, source):
    async with ClientSession() as session:
        for req in req_list:
            url = req['url']
            method = req['method']
            headers = req['headers']
            logger.debug("Curling %s..." % (url))

            proxy = conf['config']['crawlergo']['http_proxy']
            username = conf['config']['crawlergo']['username']
            password = conf['config']['crawlergo']['password']

            if username.strip() != '' and password.strip() != '':
                proxy_auth = BasicAuth(username, password)
            else:
                proxy_auth = None
            try:
                logger.debug("Xray scan {}, from url {} ".format(url, source))
                async with session.request(method,
                                           url=url,
                                           headers=headers,
                                           proxy=proxy,
                                           proxy_auth=proxy_auth) as res:
                    pass
            except:
                pass
コード例 #4
0
    async def run(self):
        async with ClientSession() as session:
            flag = await self.check_engine_available(session, self.engine)
            if not flag:
                self.logger.error("{engine_name} is not available, skipping!" .format(engine_name=self.engine_name))
                return
            self.logger.debug("{engine_name} is available, starting!".format(engine_name=self.engine_name))

            self.generate_query()
            while len(self.queries):
                session.cookie_jar.clear()
                (query, self.pre_pageno) = self.queries.popleft()
                self.pre_query = query
                url = self.format_base_url(query, self.pre_pageno)

                self.logger.debug("{engine}: {url}".format(engine=self.engine_name, url=url))
                async with session.get(url,proxy=self.proxy) as res:
                    if res!=None:
                        try:
                            content = await res.text()
                        except:
                            content = ""

                        ret = self.check_response_errors(content)
                        if not ret[0]:
                            self.deal_with_errors(ret[1])
                            break
                        if self.extract(content):
                            self.generate_query()

                if len(self.queries) > 0:
                    await self.should_sleep()  # avoid being blocked
コード例 #5
0
async def get_title(req_list):
    ret = []
    async with ClientSession() as session:
        for subdomain in req_list:
            try:
                logger.debug("Curling %s..." % (subdomain))
                flag = False
                for pro in ['http://', "https://"]:
                    url = pro + subdomain + '/'
                    async with session.get(url=url) as response:
                        if response != None:
                            try:
                                res = await response.read()
                            except:
                                res = ""
                            status = response.status
                            try:
                                res = str(res, 'utf-8')
                            except UnicodeDecodeError:
                                res = str(res, 'gbk')
                            except:
                                res = "网页编码错误"

                            m = re.search('<title>(.*)<\/title>', res.lower())
                            if m != None and m.group(1):
                                title = m.group(1)
                            else:
                                title = '网页没有标题'

                            try:
                                length = int(
                                    response.headers['content-length'])
                            except:
                                length = len(str(response.headers)) + len(res)

                            ret.append([subdomain, url, title, status, length])
                            flag = True
                            break
                if not flag:
                    ret.append([subdomain, "", "", 0, 0])
            except Exception as e:
                logger.error(str(e))
    return ret
コード例 #6
0
ファイル: googleengine.py プロジェクト: orleven/srcscan
    async def run(self):
        try:
            developer_key = conf['config']['google_api']['developer_key']
            search_enging = conf['config']['google_api']['search_enging']
        except KeyError:
            self.logger.error("Load srcscan config error: google_api, please check the config in tentacle.conf,skipping!")
            return
        async with ClientSession() as session:
            flag = await self.check_engine_available(session,self.engine)
            if not flag:
                self.logger.error("{engine_name} is not available, skipping!"
                                  .format(engine_name=self.engine_name))
                return
            self.logger.debug("{engine_name} is available, starting!"
                             .format(engine_name=self.engine_name))

            self.generate_query()
            while len(self.queries):
                session.cookie_jar.clear()
                (query, self.pre_pageno) = self.queries.popleft()
                self.pre_query = query
                url = self.format_base_url(query, self.pre_pageno, search_enging, developer_key)
                self.logger.debug("{engine} {url}".format(engine=self.engine_name, url=url))

                async with session.get(url, proxy=self.proxy) as res:
                    if res != None:
                        try:
                            content = await res.text()
                        except:
                            content = ''
                        ret = self.check_response_errors(content)
                        if not ret[0]:
                            self.deal_with_errors(ret[1])
                            break

                        if self.extract(content):
                            self.generate_query()
                        if len(self.queries) > 0:
                            await self.should_sleep()  # avoid being blocked
                        self.logger.debug("%s for %s: %d" % (self.engine_name, self.target, len(self.results['subdomain'])))
コード例 #7
0
ファイル: netcraftengine.py プロジェクト: orleven/srcscan
    async def run(self):
        # cookies = {'netcraft_js_verification_response': ''}
        async with ClientSession() as session:
            async with session.get(self.engine, proxy=self.proxy) as res:
                if res != None:
                    self.logger.error(
                        "{engine_name} is not available, skipping!".format(
                            engine_name=self.engine_name))
                    return
                self.logger.debug(
                    "{engine_name} is available, starting!".format(
                        engine_name=self.engine_name))
            try:
                filtered = session.cookie_jar.filter_cookies(self.engine)
                netcraft_js_verification_challenge = filtered[
                    'netcraft_js_verification_challenge'].value

                async with session.get(self.js_url, proxy=self.proxy) as res:
                    if res != None:
                        try:
                            _js = await res.text()
                        except:
                            return
                        cont_js = (_js + self.js_function.replace(
                            "{{netcraft_js_verification_challenge}}",
                            netcraft_js_verification_challenge))
                        s = execjs.compile(cont_js)
                        netcraft_js_verification_response = s.call(
                            'get_netcraft_js_verification_response')
                        cookies = {
                            'netcraft_js_verification_challenge':
                            netcraft_js_verification_challenge,
                            'netcraft_js_verification_response':
                            netcraft_js_verification_response
                        }
                        session.cookie_jar.update_cookies(
                            cookies, URL(self.engine))
            except:
                self.logger.error(
                    "{engine_name} is not available, skipping!".format(
                        engine_name=self.engine_name))
                return
            self.generate_query()
            while len(self.queries):
                # session.cookie_jar.clear()
                # print(session.cookie_jar.filter_cookies(self.engine))
                (query, self.pre_pageno) = self.queries.popleft()
                self.pre_query = query
                url = self.format_base_url(query, self.pre_pageno)
                self.logger.debug("{engine} {url}".format(
                    engine=self.engine_name, url=url))
                async with session.get(url, proxy=self.proxy) as res:
                    if res != None:
                        try:
                            content = await res.text()
                        except:
                            content = ''

                        ret = self.check_response_errors(content)
                        if not ret[0]:
                            self.deal_with_errors(ret[1])
                            break

                        if self.extract(content):
                            self.generate_query()
                        if len(self.queries) > 0:
                            await self.should_sleep()  # avoid being blocked
                        self.logger.debug("%s for %s: %d" %
                                          (self.engine_name, self.target,
                                           len(self.results['subdomain'])))