Exemple #1
0
class HttpScan(BaseScan):
    def webscan(self):
        pool = CoroutinePool(3)
        for payload in BaseWebPlugin.payloads():
            pool.spawn(self.payloadverify,payload,self.crawle.website)
        pool.join()

    def httpscan(self):
        while self.crawle.ISSTART or not self.crawle.ResQueue.empty():
            try:
                req,res = self.crawle.ResQueue.get(block=False)
                #print(res.status_code,req.url)
                req = copy.deepcopy(req)
                res = copy.deepcopy(res)
                for payload in BaseHttpPlugin.payloads():
                    try:
                        payload.filter(self.crawle,req,res) \
                        and payload.verify(self.crawle,req,res) \
                        and self.callback_bug(payload)
                    except Exception as e:
                        print(e)
            except queue.Empty:
                pass
            except Exception as e:
                type,value,tb = sys.exc_info()
                e = '\n'.join(set(traceback.format_exception(type,value,tb)))
                logging.error(str(e))

    def scan(self):
        headers = json.loads(self.args.get('headers',"{}"))
        proxy= json.loads(self.args.get('proxy',"{}"))
        level = int(self.args.get('level',1)) #post 扫描
        threads = int(self.args.get('threads',10))
        timeout = int(self.args.get('timeout',60))
        sleep = int(self.args.get('sleep',2))

        if not self.target.startswith(('http','HTTP')):
            self.target = 'http://' + self.target
        if not self.target.endswith('/'):
            self.target += '/'

        for target in gethosts(self.target):
            self.portscan(target)
        self.crawle = Crawler(self.target,
            level=1,headers=headers,proxy=proxy,threads=threads,timeout=timeout,sleep=sleep)
        #self.crawle.settings.update(self.args)
        #self.crawle.settings.update(proxy={'http':'http://127.0.0.1:1111','https':'http://127.0.0.1:1111'})
        self.crawle.start()
        th=[]
        #th.append(threading.Thread(target=self.crawle.run1))
        th.append(threading.Thread(target=self.webscan))
        th.append(threading.Thread(target=self.httpscan))
        for t in th:
            #t.daemon = True
            t.start()
        for t in th:
            t.join()

        #扫描完成写入httpret结果
        self.writewebsite(self.crawle.website)
Exemple #2
0
    def scan(self):
        headers = json.loads(self.args.get('headers',"{}"))
        proxy= json.loads(self.args.get('proxy',"{}"))
        level = int(self.args.get('level',1)) #post 扫描
        threads = int(self.args.get('threads',10))
        timeout = int(self.args.get('timeout',60))
        sleep = int(self.args.get('sleep',2))

        if not self.target.startswith(('http','HTTP')):
            self.target = 'http://' + self.target
        if not self.target.endswith('/'):
            self.target += '/'

        for target in gethosts(self.target):
            self.portscan(target)
        self.crawle = Crawler(self.target,
            level=1,headers=headers,proxy=proxy,threads=threads,timeout=timeout,sleep=sleep)
        #self.crawle.settings.update(self.args)
        #self.crawle.settings.update(proxy={'http':'http://127.0.0.1:1111','https':'http://127.0.0.1:1111'})
        self.crawle.start()
        th=[]
        #th.append(threading.Thread(target=self.crawle.run1))
        th.append(threading.Thread(target=self.webscan))
        th.append(threading.Thread(target=self.httpscan))
        for t in th:
            #t.daemon = True
            t.start()
        for t in th:
            t.join()

        #扫描完成写入httpret结果
        self.writewebsite(self.crawle.website)
Exemple #3
0
    def scan(self):
        level = int(self.args.get('level', 1))  #post 扫描
        headers = json.loads(self.args.get('headers', "{}"))
        proxy = json.loads(self.args.get('proxy', "{}"))

        if not self.target.startswith(('http', 'HTTP')):
            self.target = 'http://' + self.target
        if not self.target.endswith('/'):
            self.target += '/'

        for target in gethosts(self.target):
            self.portscan(target)
        self.crawle = Crawler(self.target)
        self.crawle.settings.update(self.args)
        #self.crawle.settings.update(proxy={'http':'http://127.0.0.1:1111','https':'http://127.0.0.1:1111'})

        th = []
        th.append(threading.Thread(target=self.crawle.run1))
        th.append(threading.Thread(target=self.webscan))
        th.append(threading.Thread(target=self.httpscan))
        for t in th:
            #t.daemon = True
            t.start()
        for t in th:
            t.join()

        #扫描完成写入httpret结果
        self.writewebsite(self.crawle.website)