def __get_whois(self): """ Description: ------------ This method is used to get the who-is information of the remote server :return: None """ self.__thread_semaphore.acquire() print("[+] GETTING WHO-IS") whois = WhoIs(domain=self.__domain).get_whois() if whois is not None: file_location = "projects/project-" + str( self.__project_id) + "/whois.json" File.write(file_location=file_location, content=whois) self.__thread_semaphore.release()
def __init__(self, project_id, url, thread_semaphore, database_semaphore, connection): """ :param project_id: The id of the project :param url: The website for which the administrator page is to be found :param thread_semaphore: :param database_semaphore: """ self.__project_id = project_id self.__url = url self.__thread_semaphore = thread_semaphore self.__database_semaphore = database_semaphore self.__connection = connection admin_contents = File.read_to_list("admin.txt") for admin_page in tqdm(admin_contents, ncols=100): self.__thread_semaphore.acquire() admin_url = URL.join_urls(self.__url, admin_page) t = Thread(target=self.add_if_page_found, args=(admin_url, )) t.start() print("[+] WAITING FOR THE THREADS TO COMPLETE THEIR TASKS") for thread in self.__threads: if thread.is_alive(): thread.join() # Now display and add the admin pages in database table named "admin_table" for admin_page in self.__admin_pages: print("[+] ADMIN PAGE: ", admin_page) self.update_admin_page( project_id=project_id, url=admin_page, connection=self.__connection, database_semaphore=self.__database_semaphore)
def __get_robots(self): """ Description: ------------ This method is used to get the robots.txt file from the remote server :return: """ self.__thread_semaphore.acquire() robots_url = URL.join_urls(self.__url, "/robots.txt") print("[+] GETTING ROBOTS.TXT AT ", robots_url) r = URL().get_head_request(url=self.__url, user_agent=UserAgent.get_user_agent()) if r is not None: if r.status_code == 200: robots_file_location = "projects/project-" + str( self.__project_id) + "/robots.txt" File.download_file(local_file_location=robots_file_location, remote_file_location=robots_url) else: print("[-] NO robots.txt FOUND IN THE SERVER") self.__thread_semaphore.release()
def __init__(self, project_id, thread_semaphore, database_semaphore, webserver_name, programming_language): """ :param project_id: The id for the project :param thread_semaphore: The semaphore for the no of threads used for the project :param webserver_name: The name of the webserver :param programming_language: The programming language itself :param database_semaphore: The semaphore for the count of the threads. """ print("[+] PHASE - 3 ANALYSIS HAS BEEN STARTED") self.__project_id = project_id self.__thread_semaphore = thread_semaphore self.__database_semaphore = database_semaphore self.__webserver_name = webserver_name self.__programming_language = programming_language # Now get the exploits self.__get_info_from_exploit_db() # and store the local copy of the exploits print("[+] SAVING A LOCAL COPY OF EXPLOITS PRESENT IN THE WEBSITE") exploit_copy_loc = "projects/project-" + str( project_id) + "/exploits.txt" File.write_to_list(exploit_copy_loc, self.__exploits)