def start(self, call): c = self.__check_call(call) if c == 1: # Search option. if self.__is_valid_url(): self.__check_modules() url_list = URLlist() db = DBAdapter() process = db.new_process(self.url, self.user, 1, 1) # Status: 1, processing. db.close_connection() if process == 0: # The user has a search going on. return {"response": False} url_list.put_url(self.url) for action in self.actions: # Going through the required modules by the API. if action[ 'module']: # Looking if the required module is active. if action['number'] == 1: from backend.modules.crawler.module import main url_list = main(process, self.url) else: if action['number'] == 2: from backend.modules.sqlinjection.module import main main(url_list, process, self.user) elif action['number'] == 3: from backend.modules.csrf.module import main main(url_list, process, self.user) else: continue db = DBAdapter() db.update_process(process, 5) # Status: 5, finished. db.close_connection() elif c == 2: # Get status option. db = DBAdapter() process = db.get_current_process_status(self.user) db.close_connection() if process is None: return {"response": False} data = { "web": process[1], "date": process[2], "stype": process[0], "status": process[3] } return data else: # Wrong call. return {"response": False} return True # If we get here, everything was right.
def start(self, call): c = self.__check_call(call) if c == 1: # Search option. if self.__is_valid_url(): self.__check_modules() url_list = URLlist() db = DBAdapter() process = db.new_process(self.url, self.user, 1, 1) # Status: 1, processing. db.close_connection() if process == 0: # The user has a search going on. return {"response": False} url_list.put_url(self.url) for action in self.actions: # Going through the required modules by the API. if action['module']: # Looking if the required module is active. if action['number'] == 1: from backend.modules.crawler.module import main url_list = main(process, self.url) else: if action['number'] == 2: from backend.modules.sqlinjection.module import main main(url_list, process, self.user) elif action['number'] == 3: from backend.modules.csrf.module import main main(url_list, process, self.user) else: continue db = DBAdapter() db.update_process(process, 5) # Status: 5, finished. db.close_connection() elif c == 2: # Get status option. db = DBAdapter() process = db.get_current_process_status(self.user) db.close_connection() if process is None: return {"response": False} data = { "web": process[1], "date": process[2], "stype": process[0], "status": process[3] } return data else: # Wrong call. return {"response": False} return True # If we get here, everything was right.
class Crawler(object): def __init__(self, process, root): self.queue = URLlist() self.queue.put_url(root) self.final_list = URLlist() self.final_list.put_url(root) db = DBAdapter() db.update_process(process, 2) # Status: 2, crawling. db.close_connection() def crawl(self): while True: url = self.queue.get_url() if url is None: break self.__fetch_urls(url.get_url()) return self.final_list def __fetch_urls(self, url): page = Fetcher(url) response = page.fetch() for link in response: self.queue.put_url(link) self.final_list.put_url(link)