def __init__(self, info, bases, instance, parent=None): super(SQLiExploiter, self).__init__(parent) self._pages_to_exploit = info['links'] self.exploited = { } # filled like this: [[index, url, type, successful_payload]] self.count = 0 self.total = info['total_crawled'] self.window = instance self.info = bases if self.info['logged_in']: self._logged_in = False else: self._logged_in = True self._dynamic_urls = info['dynamic'] self._elapsed = 0 self.delay = 15 self._requests = 0 self.forms = [] self._browser = browser.RoboBrowser(parser="html.parser", user_agent="WASecBot") self._browser.session.verify = False self.faults = [] self.current.connect(self.window.lbltargetsql.setText) self.tested.connect(self._show_tested) self.update_tsp.connect(self.window.pbsqltotal.setValue) self.finish.connect(self.window.after_sqli) self.tested.emit(0) self.window.btnsqlcancel.clicked.connect(self.pause) self.finished = False self.running = True
def __init__(self, info, instance, parent=None): super(CrawlerWorker, self).__init__(parent) self._instance = instance self.running = True self.base_url = info['base_url'] # main url of website self._links_to_crawl = [] # list of links yet to open self.crawled_links = {} # dictionary of links opened/all links self.__parsed_crawled = {} # list of urls and their html pages self.total = 0 # total number of found links self.total_crawled = 0 # total number of valid crawled links in website self.max_pages = info['max_crawl'] # max pages to crawl self.invalid_links_count = 0 # number of broken links found self.invalid_links_list = [] # list of broken links found self.dynamic = [] self.info = info self.login_url = info['login_url'] # login page url if available if info['robo_url']: self._rb_parser = RobotExclusionRulesParser() self._rb_parser.fetch(info['robo_url']) self._user_agent = 'WASecBot' else: self._rb_parser = None self.browser = browser.RoboBrowser(parser="html.parser", user_agent="WASecBot") self.browser.session.verify = False self._logged_in = False self.running = True self._instance.btncrawlcancel.clicked.connect(self.pause) self._elapsed = 0 self.delay = 15 self._requests = 0 self.start = None urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def __begin(self): if not self.__empty_fields() and self.__url_good( ) and not self.dialog_ui: self.lblstate.setText("") self.info = self.__set_luggage() if len(self.tbloginURL.text()) > 0: browse = browser.RoboBrowser(parser="html.parser", user_agent="WASecBot") self.handel = BeyondLogin(browse) self.dialog_ui = Work_LoginDialog(self, parent=self) self.after_login.connect(self.close_login) self.dialog_ui.show() else: self.are_you_sure()
def are_you_sure(self): choice = QtWidgets.QMessageBox.question( self, "You left the Login URL box empty!", "Are You sure you don't want to login to the website?\n" "3W-Scanner may need to be authenticated to the website" " in order to scan them.", QtWidgets.QMessageBox.No | QtWidgets.QMessageBox.Yes) if choice == QtWidgets.QMessageBox.Yes: self.info['login_url'] = None self.info['logged_in'] = None self.close_login(self.info) else: browse = browser.RoboBrowser(parser="html.parser", user_agent="WASecBot") self.handel = BeyondLogin(browse) self.dialog_ui = Work_LoginDialog(self, parent=self) self.after_login.connect(self.close_login) self.dialog_ui.show()