Пример #1
0
    def scan(self):
        headers = json.loads(self.args.get('headers',"{}"))
        proxy= json.loads(self.args.get('proxy',"{}"))
        level = int(self.args.get('level',1)) #post 扫描
        threads = int(self.args.get('threads',10))
        timeout = int(self.args.get('timeout',60))
        sleep = int(self.args.get('sleep',2))

        if not self.target.startswith(('http','HTTP')):
            self.target = 'http://' + self.target
        if not self.target.endswith('/'):
            self.target += '/'

        for target in gethosts(self.target):
            self.portscan(target)
        self.crawle = Crawler(self.target,
            level=1,headers=headers,proxy=proxy,threads=threads,timeout=timeout,sleep=sleep)
        #self.crawle.settings.update(self.args)
        #self.crawle.settings.update(proxy={'http':'http://127.0.0.1:1111','https':'http://127.0.0.1:1111'})
        self.crawle.start()
        th=[]
        #th.append(threading.Thread(target=self.crawle.run1))
        th.append(threading.Thread(target=self.webscan))
        th.append(threading.Thread(target=self.httpscan))
        for t in th:
            #t.daemon = True
            t.start()
        for t in th:
            t.join()

        #扫描完成写入httpret结果
        self.writewebsite(self.crawle.website)
Пример #2
0
    def scan(self):
        level = int(self.args.get('level', 1))  #post 扫描
        headers = json.loads(self.args.get('headers', "{}"))
        proxy = json.loads(self.args.get('proxy', "{}"))

        if not self.target.startswith(('http', 'HTTP')):
            self.target = 'http://' + self.target
        if not self.target.endswith('/'):
            self.target += '/'

        for target in gethosts(self.target):
            self.portscan(target)
        self.crawle = Crawler(self.target)
        self.crawle.settings.update(self.args)
        #self.crawle.settings.update(proxy={'http':'http://127.0.0.1:1111','https':'http://127.0.0.1:1111'})

        th = []
        th.append(threading.Thread(target=self.crawle.run1))
        th.append(threading.Thread(target=self.webscan))
        th.append(threading.Thread(target=self.httpscan))
        for t in th:
            #t.daemon = True
            t.start()
        for t in th:
            t.join()

        #扫描完成写入httpret结果
        self.writewebsite(self.crawle.website)