def do_use(self, line): """ This command will define which of the framework's utilities will be loaded. The available options are the following: - domainfy - entify - mailfy - phonefy - searchfy - usufy For example, type 'use usufy' to load the usufy util. You can always use the <TAB> to be helped using the autocomplete options. """ if line not in UTILS: print(general.warning("[!] Util is not correct. Try 'help use' to check the available options.")) return False elif line == "domainfy": OSRFConsoleDomainfy().cmdloop() elif line == "entify": OSRFConsoleEntify().cmdloop() elif line == "mailfy": OSRFConsoleMailfy().cmdloop() elif line == "phonefy": OSRFConsolePhonefy().cmdloop() elif line == "searchfy": OSRFConsoleSearchfy().cmdloop() elif line == "usufy": OSRFConsoleUsufy().cmdloop() else: print(general.warning("[!] Not implemented yet. Try 'help use' to check the available options."))
def getConfiguration(configuration_file="server.cfg", package="osrframework_server", section="osrframework_server"): """ Method that recovers the configuration information osrframework_server. TODO: Grab the default file from the package data instead of storing it in the main folder. Args: ----- configuration_file: The configuration file to be read. package: The package name where the default files are stored. section: The secction's name that will be read. Returns: -------- A dictionary containing the default configuration. """ VALUES = {} # If a api_keys.cfg has not been found, creating it by copying from default configPath = os.path.join(configuration.getConfigPath()["appPath"], configuration_file) # Checking if the configuration file exists if not os.path.exists(configPath): print( warning( "[*] No configuration files found for OSRFramework Server.")) print( warning( "[*] Copying default configuration files to OSRFramework configuration folder at '" + configPath + "'.")) # Copy the data from the default folder defaultConfigPath = pkg_resources.resource_filename( package, os.path.join('config', configuration_file)) copyfile(defaultConfigPath, configPath) # Reading the configuration file config = ConfigParser.ConfigParser() config.read(configPath) # Iterating through all the sections, which contain the platforms for s in config.sections(): incomplete = False if s.lower() == section.lower(): # Iterating through parameters for (param, value) in config.items(s): if value != '': VALUES[param] = value return VALUES
def weCanCheckTheseDomains(email): """ Method that verifies if a domain can be safely verified. Args: ----- email: the email whose domain will be verified. Returns: -------- bool: it represents whether the domain can be verified. """ # Known platform not to be working... notWorking = [ "@aol.com", "@bk.ru", "@breakthru.com", "@gmx.", "@hotmail.co", "@inbox.com", "@latinmail.com", "@libero.it", "@mail.ru", "@mail2tor.com", "@outlook.com", "@rambler.ru", "@rocketmail.com", "@starmedia.com", "@ukr.net" "@yahoo.", "@ymail." ] #notWorking = [] for n in notWorking: if n in email: print(general.warning("WARNING: the domain of '" + email + "' has been blacklisted by mailfy.py as it CANNOT BE VERIFIED.")) return False emailDomains = EMAIL_DOMAINS safe = False for e in emailDomains: if e in email: safe = True break if not safe: print(general.warning("WARNING: the domain of '" + email + "' will not be safely verified.")) return True
def pool_function(args): """ A wrapper for being able to launch all the threads. We will use python-emailahoy library for the verification in non-Windows systems as it is faster than validate_email. In Windows systems the latter is preferred. Args: ----- args: reception of the parameters for getPageWrapper as a tuple. Returns: -------- A dictionary representing whether the verification was ended successfully. The format is as follows: ``` {"platform": "str(domain["value"])", "status": "DONE", "data": aux} ``` """ is_valid = True try: if sys.platform == 'win32': is_valid = validate_email.validate_email(args, verify=True) else: is_valid = emailahoy.verify_email_address(args) except Exception, e: print( general.warning( "WARNING. An error was found when performing the search. You can omit this message.\n" + str(e))) is_valid = False
def pool_function(args): """ A wrapper for being able to launch all the threads. We will use python-emailahoy library for the verification. Args: ----- args: reception of the parameters for getPageWrapper as a tuple. Returns: -------- A dictionary representing whether the verification was ended successfully. The format is as follows: ``` {"platform": "str(domain["value"])", "status": "DONE", "data": aux} ``` """ is_valid = True try: checker = emailahoy.VerifyEmail() status, message = checker.verify_email_smtp(args, from_host='gmail.com', from_email='*****@*****.**') if status == 250: print("\t[*] Verification of '{}' status: {}. Details:\n{}".format(general.success(args), general.success("SUCCESS ({})".format(str(status))), message)) is_valid = True else: print("\t[*] Verification of '{}' status: {}. Details:\n{}".format(general.error(args), general.error("FAILED ({})".format(str(status))), message)) is_valid = False except Exception, e: print(general.warning("WARNING. An error was found when performing the search. You can omit this message.\n" + str(e))) is_valid = False
def check_mailfy(self, query, **kwargs): """Verifying a mailfy query in this platform This might be redefined in any class inheriting from Platform. The only condition is that any of this should return a dictionary as defined. Args: query (str): The element to be searched. Returns: String. Returns the collected data if exists or None if not. """ import re import requests s = requests.Session() # Getting the first response to grab the csrf_token resp = s.get(f"http://keys.gnupg.net/pks/lookup?search={query}") if resp.status_code == 200 or resp.status_code == 404: if ' 0 keys found..' in resp.text: return None else: return resp.text else: print( general.warning( f"\t[*] Something happened. keyserver.io returned status '{resp.status_code}' for '{query}'." )) return None
def getInfo(self, query=None, process=False, mode="usufy"): ''' Method that checks the presence of a given query and recovers the first list of complains. :param query: Phone number to verify. :param proces: Calling the processing function. :param mode: Mode to be executed. :return: Python structure for the html processed. ''' # Defining variables for this process results = [] data = "" if not self.modeIsValid(mode=mode): # TO-DO: InvalidModeException return json.dumps(results) try: logger = logging.getLogger("osrframework.wrappers") # Verifying if the nick is a correct nick if self._isValidQuery(query, mode): logger.debug("Starting Skype client...") logger.warning( "A Skype client must be set up... Note that the program will need a valid session of Skype having been started. If you were performing too many searches, the server may block or ban your account depending on the ToS. Please run this program under your own responsibility." ) # Instantiate Skype object, all further actions are done # using this object. # Dealing with UTF8 import codecs import sys UTF8Writer = codecs.getwriter('utf8') sys.stdout = UTF8Writer(sys.stdout) # Search for users and display their Skype name, full name # and country. #print "In skype.py, before sending the query: '" + query + "'" data = skype.checkInSkype(query) #print "In skype.py, printing the 'data' variable:\n" + json.dumps(data, indent=2) except Exception as e: print( general.warning( "[!] In skype.py, exception caught when checking information in Skype!\n" )) # No information was found, then we return a null entity return json.dumps(results) # Verifying if the platform exists if mode == "usufy": for user in data: if user["value"] == "Skype - " + query.lower(): results.append(user) elif mode == "searchfy": results = data #print "In skype.py, printing the 'results' variable:\n" + json.dumps(results, indent=2) return json.dumps(results)
def weCanCheckTheseDomains(email): """ Method that verifies if a domain can be safely verified. Args: ----- email: the email whose domain will be verified. Returns: -------- bool: it represents whether the domain can be verified. """ # Known platform not to be working... notWorking = [ "@aol.com", "@bk.ru", "@breakthru.com", "@gmx.", "@hotmail.co", "@inbox.com", "@latinmail.com", "@libero.it", "@mail.ru", "@mail2tor.com", "@outlook.com", "@rambler.ru", "@rocketmail.com", "@starmedia.com", "@ukr.net" "@yahoo.", "@ymail." ] #notWorking = [] for n in notWorking: if n in email: print( "\t[*] Verification of '{}' aborted. Details:\n\t\t{}".format( general.warning(email), "This domain CANNOT be verified using mailfy.")) return False emailDomains = EMAIL_DOMAINS safe = False for e in EMAIL_DOMAINS: if e in email: safe = True if not safe: print("\t[*] Verification of '{}' aborted. Details:\n\t\t{}".format( general.warning(email), "This domain CANNOT be verified using mailfy.")) return False return True
def __init__(self, platform, *args, **kwargs): msg = """ [*] Warning:\t{}. Details: No valid credentials provided for '{}'. Update the configuration file at: '{}'. """.format( self.__class__.__name__, platform, os.path.join(configuration.getConfigPath()["appPath"], "accounts.cfg"), general.emphasis("-x " + platform)) OSRFrameworkException.__init__(self, general.warning(msg)) self.generic = "The credentials for some platforms where NOT provided."
def createCommandLine(self): """ Method to build the command line command This method will build the command to run the same actions defined in using this tool. Returns: -------- String: the string to type in the terminal """ if self._checkIfRequiredAreSet(): command = self.UNAME # Getting the params params = self._getParams() for p in params: command += " " + p # Returning the command return command else: return self.UNAME + " -h" + general.warning( " # Option '-h' shown since not all the required parameters are set. " )
def performSearch(emails=[], nThreads=16, secondsBeforeTimeout=5): """ Method to perform the mail verification process. Args: ----- emails: list of emails to be verified. nThreads: the number of threads to be used. Default: 16 threads. secondsBeforeTimeout: number of seconds to wait before raising a timeout. Default: 5 seconds. Returns: -------- The results collected. """ # Getting starting time _startTime = time.time() def hasRunOutOfTime(oldEpoch): """ Function that checks if a given time has passed. It verifies whether the oldEpoch has passed or not by checking if the seconds passed are greater. Arguments --------- oldepoch: Seconds passed since 1970 as returned by `time.time()`. Returns ------- A boolean representing whether it has run out of time. """ now = time.time() return now - oldEpoch >= secondsBeforeTimeout results = [] args = [] # Grabbing all the emails that would be validated for e in emails: if weCanCheckTheseDomains(e): args.append((e)) # Returning None if no valid domain has been returned if len(args) == 0: return results # If the process is executed by the current app, we use the Processes. It is faster than pools. if nThreads <= 0 or nThreads > len(args): nThreads = len(args) # Launching the Pool # ------------------ # Example catched from: https://stackoverflow.com/questions/11312525/catch-ctrlc-sigint-and-exit-multiprocesses-gracefully-in-python try: original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN) pool = Pool(nThreads) signal.signal(signal.SIGINT, original_sigint_handler) except ValueError: # To avoid: ValueError: signal only works in main thread pool = Pool(nThreads) poolResults = [] try: def log_result(result): # This is called whenever foo_pool(i) returns a result. # result_list is modified only by the main process, not the pool workers. poolResults.append(result) for m in emails: # We need to create all the arguments that will be needed parameters = (m, ) pool.apply_async(pool_function, args=parameters, callback=log_result) # Waiting for results to be finished or time to pass while len(poolResults) < len(emails) and not hasRunOutOfTime( _startTime): pass # Closing normal termination pool.close() except KeyboardInterrupt: print( general.warning( "\n[!] Process manually stopped by the user. Terminating workers.\n" )) pool.terminate() pending = "" print( general.warning("[!] The following platforms were not processed:")) for m in emails: processed = False for result in poolResults: if str(m) in json.dumps(result["data"]): processed = True break if not processed: print("\t- " + str(p)) pending += " " + str(m) print("\n") print( general.warning( "If you want to relaunch the app with these platforms you can always run the command with: " )) print("\t mailfy.py ... -p " + general.emphasis(pending)) print("\n") print( general.warning( "If you prefer to avoid these platforms you can manually evade them for whatever reason with: " )) print("\t mailfy.py ... -x " + general.emphasis(pending)) print("\n") pool.join() # Processing the results # ---------------------- for serArray in poolResults: data = serArray["data"] # We need to recover the results and check if they are not an empty json or None if data != None and data != {}: results.append(data) pool.close() return results
def pool_function(args): """A wrapper for being able to launch all the threads We will use python-emailahoy library for the verification. Args: args: reception of the parameters for getPageWrapper as a tuple. Returns: dict. A dictionary representing whether the verification was ended successfully. The format is as follows: ``` {"platform": "str(domain["value"])", "status": "DONE", "data": aux} ``` """ def confirm_found(email, status): """Method that confirms the existence of an email This method is needed since different email providers may behave differently. Args: email (str): The email to search. status (int): emailahoy3 returned status. Returns: bool. """ # Dependening on the email ok_is_1 = [ "protonmail.ch", # Expected value on successful matches "protonmail.com", ] for domain in ok_is_1: if domain in args and status == 1: return True ok_is_0 = [ "ya.ru", "yandex.com", ] if args in ok_is_0: if domain in args and status == 0: return True return False try: status = verify_email_address(args) is_valid = confirm_found(args, status) if is_valid: print( f"\t[*] Verification of '{general.success(args)}' status: {general.success(f'Email found ({status})')}" ) else: print( f"\t[*] Verification of '{general.error(args)}' status: {general.error(f'Email not found ({status})')}" ) except Exception as e: print( general.warning( "WARNING. An error was found when performing the search. You can omit this message.\n" + str(e))) is_valid = False entities = general.expand_entities_from_email(args) platform = entities[2]["value"].title() if is_valid: aux = {} aux["type"] = "com.i3visio.Profile" aux["value"] = "Email - " + args aux["attributes"] = entities aux["attributes"].append({ "type": "com.i3visio.Platform", "value": platform, "attributes": [] }) return {"platform": platform, "status": "DONE", "data": aux} else: return {"platform": platform, "status": "ERROR", "data": {}}
def processNickList(nicks, platforms=None, rutaDescarga="./", avoidProcessing=True, avoidDownload=True, nThreads=12, maltego=False, verbosity=1, logFolder="./logs"): '''Method that receives as a parameter a series of nicks and verifies whether those nicks have a profile associated in different social networks. List of parameters that the method receives: :param nicks: list of nicks to process. :param platforms: list of <Platform> objects to be processed. :param rutaDescarga: local file where saving the obtained information. :param avoidProcessing: boolean var that defines whether the profiles will NOT be processed. :param avoidDownload: boolean var that defines whether the profiles will NOT be downloaded (stored in this version). :param maltego: parameter to tell usufy.py that he has been invoked by Malego. :param verbosity: the level of verbosity to be used. :param logFolder: the path to the log folder. :return: Returns a dictionary where the key is the nick and the value another dictionary where the keys are the social networks and te value is the corresponding URL. ''' osrframework.utils.logger.setupLogger(loggerName="osrframework.usufy", verbosity=verbosity, logFolder=logFolder) logger = logging.getLogger("osrframework.usufy") if platforms == None: platforms = platform_selection.getAllPlatformNames("usufy") # Defining the output results variable res = [] # Processing the whole list of terms... for nick in nicks: logger.info("Looking for '" + nick + "' in " + str(len(platforms)) + " different platforms:\n" +str( [ str(plat) for plat in platforms ] ) ) # If the process is executed by the current app, we use the Processes. It is faster than pools. if nThreads <= 0 or nThreads > len(platforms): nThreads = len(platforms) logger.info("Launching " + str(nThreads) + " different threads...") # Using threads in a pool if we are not running the program in main # Example catched from: https://stackoverflow.com/questions/11312525/catch-ctrlc-sigint-and-exit-multiprocesses-gracefully-in-python try: original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN) pool = Pool(nThreads) signal.signal(signal.SIGINT, original_sigint_handler) except ValueError: # To avoid: ValueError: signal only works in main thread pool = Pool(nThreads) poolResults = [] try: def log_result(result): # This is called whenever foo_pool(i) returns a result. # result_list is modified only by the main process, not the pool workers. poolResults.append(result) for plat in platforms: # We need to create all the arguments that will be needed parameters = ( plat, nick, rutaDescarga, avoidProcessing, avoidDownload, ) pool.apply_async (pool_function, args= parameters, callback = log_result ) # Waiting for results to be finished while len(poolResults) < len(platforms): pass # Closing normal termination pool.close() except KeyboardInterrupt: print(general.warning("\n[!] Process manually stopped by the user. Terminating workers.\n")) pool.terminate() print(general.warning("[!] The following platforms were not processed:")) pending = "" for p in platforms: processed = False for processedPlatform in poolResults: if str(p) == processedPlatform["platform"]: processed = True break if not processed: print()"\t- " + str(p)) pending += " " + str(p).lower() print("\n") print("[!] If you want to relaunch the app with these platforms you can always run the command with: ") print("\t usufy.py ... -p " + pending) print("\n") print("[!] If you prefer to avoid these platforms you can manually evade them for whatever reason with: ") print("\t usufy.py ... -x " + pending) print("\n") pool.join() profiles = [] # Processing the results # ---------------------- for serArray in poolResults: data = serArray["data"] # We need to recover the results and check if they are not an empty json or None if data != None: array = json.loads(data) for r in array: if r != "{}": profiles.append(r) res+=profiles return res
def verify_with_emailahoy_step_1(emails=[], num_threads=16, seconds_before_timeout=5): """Method to perform the mail verification process Args: emails (list): list of emails to be verified. platforms (list): list of strings representing the wrappers to be used. num_threads (int): the number of threads to be used. Default: 16 threads. seconds_before_timeout (int): number of seconds to wait before raising a timeout. Default: 5 seconds. Returns: The results collected. """ results = [] args = [] # Grabbing all the emails that would be validated for email in emails: if email_is_verifiable(email): args.append((email)) # Returning None if no valid domain has been returned if len(args) == 0: return results # If the process is executed by the current app, we use the Processes. It is faster than pools. if num_threads <= 0 or num_threads > len(args): num_threads = len(args) # Launching the Pool # ------------------ # Example catched from: https://stackoverflow.com/questions/11312525/catch-ctrlc-sigint-and-exit-multiprocesses-gracefully-in-python try: original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN) pool = Pool(num_threads) signal.signal(signal.SIGINT, original_sigint_handler) except ValueError: # To avoid: ValueError: signal only works in main thread pool = Pool(num_threads) pool_results = [] def log_result(result): """Callback to log the results by apply_async""" pool_results.append(result) for email in emails: parameters = (email, ) res = pool.apply_async(pool_function, args=parameters, callback=log_result) try: res.get(3) except TimeoutError: general.warning(f"\n[!] Process timeouted for '{parameters}'.\n") pool.close() pool.join() # Processing the results # ---------------------- for serArray in pool_results: data = serArray["data"] # We need to recover the results and check if they are not an empty json or None if data is not None and data != {}: results.append(data) pool.close() return results
def __init__(self, msg, *args, **kwargs): Exception.__init__(self, general.warning(msg)) self.generic = "Generic OSRFramework exception."
def main(params=None): """Main function to launch phonefy The function is created in this way so as to let other applications make use of the full configuration capabilities of the application. The parameters received are used as parsed by this modules `get_parser()`. Args: params: A list with the parameters as grabbed by the terminal. It is None when this is called by an entry_point. If it is called by osrf the data is already parsed. Returns: list: Returns a list with i3visio entities. """ if params is None: parser = get_parser() args = parser.parse_args(params) else: args = params results = [] if not args.quiet: print(general.title(banner.text)) saying_hello = f""" Domainfy | Copyright (C) Yaiza Rubio & Félix Brezo (i3visio) 2014-2020 This program comes with ABSOLUTELY NO WARRANTY. This is free software, and you are welcome to redistribute it under certain conditions. For additional info, visit <{general.LICENSE_URL}>. """ print(general.info(saying_hello)) if args.license: general.showLicense() else: # Processing the options returned to remove the "all" option tlds = [] if "all" in args.tlds: for type_tld in TLD.keys(): for tld in TLD[type_tld]: if tld not in args.exclude: tlds.append({"tld": tld, "type": type_tld}) elif "none" in args.tlds: pass else: for type_tld in TLD.keys(): if type_tld in args.tlds: for tld in TLD[type_tld]: if tld not in args.exclude: tlds.append({"tld": tld, "type": type_tld}) for new in args.user_defined: if new not in args.exclude: if new[0] == ".": tlds.append({"tld": new, "type": "user_defined"}) else: tlds.append({"tld": "." + new, "type": "user_defined"}) if args.nicks: domains = create_domains(tlds, nicks=args.nicks) else: # nicks_file domains = create_domains(tlds, nicks_file=args.nicks_file) # Showing the execution time... if not args.quiet: startTime = dt.datetime.now() print( f"{startTime}\tTrying to get information about {general.emphasis(str(len(domains)))} domain(s)…\n" ) if len(domains) > 200: print( """ Note that a full '-t all' search may take around 3.5 mins. If that's too long for you, try narrowing the search using '-t cc' or similar arguments. Otherwise, just wait and keep calm! """) print(general.emphasis("\tPress <Ctrl + C> to stop...\n")) # Perform searches, using different Threads results = perform_search(domains, args.threads, args.whois) # Trying to store the information recovered if args.output_folder is not None: if not os.path.exists(args.output_folder): os.makedirs(args.output_folder) # Grabbing the results file_header = os.path.join(args.output_folder, args.file_header) for ext in args.extension: # Generating output files general.export_usufy(results, ext, file_header) # Showing the information gathered if requested if not args.quiet: now = dt.datetime.now() print( f"\n{now}\t{general.success(len(results))} results obtained:\n" ) try: print(general.success(general.osrf_to_text_export(results))) except Exception: print( general.warning( "\nSomething happened when exporting the results. The Json will be shown instead:\n" )) print(general.warning(json.dumps(results, indent=2))) now = dt.datetime.now() print( f"\n{now}\tYou can find all the information collected in the following files:" ) for ext in args.extension: # Showing the output files print(f"\t{general.emphasis(file_header + '.' + ext)}") # Showing the execution time... if not args.quiet: # Showing the execution time... endTime = dt.datetime.now() print("\n{}\tFinishing execution...\n".format(endTime)) print("Total time used:\t" + general.emphasis(str(endTime - startTime))) print("Average seconds/query:\t" + general.emphasis( str((endTime - startTime).total_seconds() / len(domains))) + " seconds\n") # Urging users to place an issue on Github... print(banner.footer) if params: return results
def main(params=None): """ Main function to launch phonefy. The function is created in this way so as to let other applications make use of the full configuration capabilities of the application. The parameters received are used as parsed by this modules `getParser()`. Args: ----- params: A list with the parameters as grabbed by the terminal. It is None when this is called by an entry_point. Results: -------- list: Returns a list with i3visio entities. """ # Grabbing the parser parser = getParser() if params != None: args = parser.parse_args(params) else: args = parser.parse_args() results = [] if not args.quiet: print(general.title(banner.text)) sayingHello = """ Domainfy | Copyright (C) F. Brezo and Y. Rubio (i3visio) 2016-2018 This program comes with ABSOLUTELY NO WARRANTY. This is free software, and you are welcome to redistribute it under certain conditions. For additional info, visit """ + general.LICENSE_URL + "\n" print(general.title(sayingHello)) if args.license: general.showLicense() else: # Processing the options returned to remove the "all" option tlds = [] if "all" in args.tlds: for typeTld in TLD.keys(): for tld in TLD[typeTld]: if tld not in args.exclude: tlds.append({"tld": tld, "type": typeTld}) elif "none" in args.tlds: pass else: for typeTld in TLD.keys(): if typeTld in args.tlds: for tld in TLD[typeTld]: if tld not in args.exclude: tlds.append({"tld": tld, "type": typeTld}) for new in args.user_defined: if new not in args.exclude: tlds.append({"tld": new, "type": "user_defined"}) if args.nicks: domains = createDomains(tlds, nicks=args.nicks) else: # nicks_file domains = createDomains(tlds, nicksFile=args.nicks_file) # Showing the execution time... if not args.quiet: startTime = dt.datetime.now() print( str(startTime) + "\tTrying to identify the existence of " + general.emphasis(str(len(domains))) + " domain(s)... Relax!\n") print(general.emphasis("\tPress <Ctrl + C> to stop...\n")) # Perform searches, using different Threads results = performSearch(domains, args.threads, args.whois) # Trying to store the information recovered if args.output_folder != None: if not os.path.exists(args.output_folder): os.makedirs(args.output_folder) # Grabbing the results fileHeader = os.path.join(args.output_folder, args.file_header) for ext in args.extension: # Generating output files general.exportUsufy(results, ext, fileHeader) # Showing the information gathered if requested if not args.quiet: print( "A summary of the results obtained are shown in the following table:\n" ) try: print(general.success(general.usufyToTextExport(results))) except: print( general.warning( "\nSomething happened when exporting the results. The Json will be shown instead:\n" )) print(general.warning(json.dumps(results, indent=2))) now = dt.datetime.now() print( "\n" + str(now) + "\tYou can find all the information collected in the following files:" ) for ext in args.extension: # Showing the output files print("\t" + general.emphasis(fileHeader + "." + ext)) # Showing the execution time... if not args.quiet: # Showing the execution time... endTime = dt.datetime.now() print("\n" + str(endTime) + "\tFinishing execution...\n") print("Total time used:\t" + general.emphasis(str(endTime - startTime))) print("Average seconds/query:\t" + general.emphasis( str((endTime - startTime).total_seconds() / len(domains))) + " seconds\n") # Urging users to place an issue on Github... print(banner.footer) if params: return results
def main(args): """ Main function to launch phonefy. The function is created in this way so as to let other applications make use of the full configuration capabilities of the application. The parameters received are used as parsed by this modules `getParser()`. Args: ----- args: The parameters as processed by this modules `getParser()`. Results: -------- Returns a list with i3visio entities. """ results = [] if not args.quiet: print(general.title(banner.text)) sayingHello = """ mailfy.py Copyright (C) F. Brezo and Y. Rubio (i3visio) 2016-2017 This program comes with ABSOLUTELY NO WARRANTY. This is free software, and you are welcome to redistribute it under certain conditions. For additional info, visit """ + general.LICENSE_URL + "\n" print(general.title(sayingHello)) # Displaying a warning if this is being run in a windows system if sys.platform == 'win32': print( general.warning( """OSRFramework has detected that you are running mailfy.py in a Windows system. As the "emailahoy" library is NOT working properly there, "validate_email" will be used instead. Verification may be slower though.""")) if args.license: general.showLicense() else: # Grabbing the list of global domains if args.is_leaked: domains = LEAKED_DOMAINS # Processing the options returned to remove the "all" option elif "all" in args.domains: domains = EMAIL_DOMAINS else: # processing only the given domains and excluding the ones provided domains = [] for d in args.domains: if d not in args.exclude: domains.append(d) if args.create_emails: emails = grabEmails(nicksFile=args.create_emails, domains=domains, excludeDomains=args.exclude) else: emails = grabEmails(emails=args.emails, emailsFile=args.emails_file, nicks=args.nicks, nicksFile=args.nicks_file, domains=domains, excludeDomains=args.exclude) startTime = dt.datetime.now() if not args.is_leaked: # Showing the execution time... if not args.quiet: print( str(startTime) + "\tStarting search in " + general.emphasis(str(len(emails))) + " different emails:\n" + json.dumps(emails, indent=2, sort_keys=True) + "\n") print(general.emphasis("\tPress <Ctrl + C> to stop...\n")) # Perform searches, using different Threads tmp = performSearch(emails, args.threads) # We make a strict copy of the object results = list(tmp) if not args.quiet: now = dt.datetime.now() print( str(now) + "\tMailfy has found " + general.emphasis(str(len(results))) + " existing email(s). Has it been leaked somewhere?") # Verify the existence of the mails found as leaked emails. for r in tmp: # We assume that the first attribute is always the email query = r["attributes"][0]["value"] leaks = hibp.checkIfEmailWasHacked(query) if len(leaks) > 0: if not args.quiet: print( general.success("\t" + query + " has been found in at least " + str(len(leaks)) + " different leaks.")) email, alias, domain = getMoreInfo(query) for leak in leaks: # Creating a new full entity from scratch new = {} new["type"] = "i3visio.profile" new["value"] = leak["value"] + " - " + alias["value"] new["attributes"] = [] new["attributes"].append(email) new["attributes"].append(alias) new["attributes"].append(domain) # leak contains a i3visio.platform built by HIBP new["attributes"].append(leak) results.append(new) else: if not args.quiet: print( general.warning( "\t" + query + " has NOT been found on any leak yet.")) else: if not args.quiet: print( "\n" + str(startTime) + "\tStarting search of " + general.emphasis(str(len(emails))) + " different emails in leaked databases.\nNote that this will take between 1 and 2 seconds per query due to HIBP API restrictions:\n" + json.dumps(emails, indent=2, sort_keys=True) + "\n") print(general.emphasis("\tPress <Ctrl + C> to stop...\n")) # Perform is_leaked function results = [] for i, e in enumerate(emails): if not args.quiet: print("\t" + str(i + 1) + "/" + str(len(emails)) + " - Searching if " + e + " has been leaked somewhere...") leaks = hibp.checkIfEmailWasHacked(e) if len(leaks) > 0: if not args.quiet: print( general.success("\t" + e + " has been found in at least " + str(len(leaks)) + " different leaks.")) email, alias, domain = getMoreInfo(e) for leak in leaks: # Creating a new full entity from scratch new = {} new["type"] = "i3visio.profile" new["value"] = leak["value"] + " - " + alias["value"] new["attributes"] = [] new["attributes"].append(email) new["attributes"].append(alias) new["attributes"].append(domain) # leak contains a i3visio.platform built by HIBP new["attributes"].append(leak) results.append(new) # Trying to store the information recovered if args.output_folder != None: if not os.path.exists(args.output_folder): os.makedirs(args.output_folder) # Grabbing the results fileHeader = os.path.join(args.output_folder, args.file_header) for ext in args.extension: # Generating output files general.exportUsufy(results, ext, fileHeader) # Showing the information gathered if requested if not args.quiet: now = dt.datetime.now() print( "\n" + str(now) + "\tA summary of the results obtained are shown in the following table:\n" ) print(general.success(general.usufyToTextExport(results))) now = dt.datetime.now() print( "\n" + str(now) + "\tYou can find all the information collected in the following files:" ) for ext in args.extension: # Showing the output files print(general.emphasis("\t" + fileHeader + "." + ext)) # Showing the execution time... if not args.quiet: endTime = dt.datetime.now() print("\n" + str(endTime) + "\tFinishing execution...\n") print("Total time used:\t" + general.emphasis(str(endTime - startTime))) print("Average seconds/query:\t" + general.emphasis( str((endTime - startTime).total_seconds() / len(emails))) + " seconds\n") if not args.quiet: # Urging users to place an issue on Github... print(banner.footer) return results
def main(params=None): """Main function to launch mailfy The function is created in this way so as to let other applications make use of the full configuration capabilities of the application. The parameters received are used as parsed by this modules `get_parser()`. Args: params: A list with the parameters as grabbed by the terminal. It is None when this is called by an entry_point. If it is called by osrf the data is already parsed. Returns: list. A list of i3visio entities. """ if params is None: parser = get_parser() args = parser.parse_args(params) else: args = params results = [] if not args.quiet: print(general.title(banner.text)) saying_hello = f""" Mailfy | Copyright (C) Yaiza Rubio & Félix Brezo (i3visio) 2014-2020 This program comes with ABSOLUTELY NO WARRANTY. This is free software, and you are welcome to redistribute it under certain conditions. For additional info, visit <{general.LICENSE_URL}>. """ print(general.info(saying_hello)) # Displaying a warning if this is being run in a windows system if sys.platform == 'win32': print( general.warning( """OSRFramework has detected that you are running mailfy in a Windows system. As the "emailahoy" library is NOT working properly there, "validate_email" will be used instead. Verification may be slower though.""")) if args.license: general.showLicense() else: # processing only the given domains and excluding the ones provided extra_domains = [] for d in args.domains: if d not in args.exclude and not d == "all": extra_domains.append(d) # Two different arrays are mantained since there are some domains that cannot be safely verified if args.create_emails: potentially_existing_emails = grab_emails( nicks_file=args.create_emails, domains=EMAIL_DOMAINS + extra_domains, exclude_domains=args.exclude) potentially_leaked_emails = grab_emails( nicks_file=args.create_emails, domains=LEAKED_DOMAINS + extra_domains, exclude_domains=args.exclude) else: potentially_existing_emails = grab_emails( emails=args.emails, emails_file=args.emails_file, nicks=args.nicks, nicks_file=args.nicks_file, domains=EMAIL_DOMAINS + extra_domains, exclude_domains=args.exclude) potentially_leaked_emails = grab_emails( emails=args.emails, emails_file=args.emails_file, nicks=args.nicks, nicks_file=args.nicks_file, domains=LEAKED_DOMAINS + extra_domains, exclude_domains=args.exclude) emails = list( set(potentially_leaked_emails + potentially_existing_emails)) if not args.quiet: start_time = dt.datetime.now() print( f"\n{start_time}\t{general.emphasis('Step 1/5')}. Trying to determine if any of the following {general.emphasis(str(len(potentially_existing_emails)))} emails exist using emailahoy3...\n{general.emphasis(json.dumps(potentially_existing_emails, indent=2))}\n" ) print( general.emphasis("\tPress <Ctrl + C> to skip this step...\n")) # Perform searches, using different Threads try: results = verify_with_emailahoy_step_1(potentially_existing_emails, num_threads=args.threads) except KeyboardInterrupt: print( general.warning("\tStep 1 manually skipped by the user...\n")) results = [] # Grabbing the <Platform> objects platforms = platform_selection.get_platforms_by_name(args.platforms, mode="mailfy") names = [p.platformName for p in platforms] if not args.quiet: now = dt.datetime.now() print( f"\n{now}\t{general.emphasis('Step 2/5')}. Checking if the emails have been used to register accounts in {general.emphasis(str(len(platforms)))} platforms...\n{general.emphasis(json.dumps(names, indent=2))}\n" ) print( general.emphasis("\tPress <Ctrl + C> to skip this step...\n")) try: registered = process_mail_list_step_2(platforms=platforms, emails=emails) except KeyboardInterrupt: print( general.warning("\tStep 2 manually skipped by the user...\n")) registered = [] results += registered if not args.quiet: if len(results) > 0: for r in registered: print( f"\t[*] Linked account found: {general.success(r['value'])}" ) else: print(f"\t[*] No account found.") now = dt.datetime.now() print( f"\n{now}\t{general.emphasis('Step 3/5')}. Verifying if the provided emails have been leaked somewhere using HaveIBeenPwned.com...\n" ) print( general.emphasis("\tPress <Ctrl + C> to skip this step...\n")) all_keys = config_api_keys.get_list_of_api_keys() try: # Verify the existence of the mails found as leaked emails. for query in potentially_leaked_emails: # Iterate through the different leak platforms leaks = hibp.check_if_email_was_hacked( query, api_key=all_keys["haveibeenpwned_com"]["api_key"]) if len(leaks) > 0: if not args.quiet: print( f"\t[*] '{general.success(query)}' has been found in at least {general.success(len(leaks))} different leaks." ) else: if not args.quiet: print( f"\t[*] '{general.error(query)}' has NOT been found on any leak yet." ) results += leaks except KeyError: # API_Key not found config_path = os.path.join( configuration.get_config_path()["appPath"], "api_keys.cfg") print( "\t[*] " + general.warning("No API found for HaveIBeenPwned") + f". Request one at <https://haveibeenpwned.com/API/Key> and add it to '{config_path}'." ) except KeyboardInterrupt: print( general.warning("\tStep 3 manually skipped by the user...\n")) if not args.quiet: now = dt.datetime.now() print( f"\n{now}\t{general.emphasis('Step 4/5')}. Verifying if the provided emails have been leaked somewhere using Dehashed.com...\n" ) print( general.emphasis("\tPress <Ctrl + C> to skip this step...\n")) try: # Verify the existence of the mails found as leaked emails. for query in emails: try: # Iterate through the different leak platforms leaks = dehashed.check_if_email_was_hacked(query) if len(leaks) > 0: if not args.quiet: print( f"\t[*] '{general.success(query)}' has been found in at least {general.success(len(leaks))} different leaks as shown by Dehashed.com." ) else: if not args.quiet: print( f"\t[*] '{general.error(query)}' has NOT been found on any leak yet." ) results += leaks except Exception as e: print( general.warning( f"Something happened when querying Dehashed.com about '{email}'. Omitting..." )) except KeyboardInterrupt: print( general.warning("\tStep 4 manually skipped by the user...\n")) if not args.quiet: now = dt.datetime.now() print( f"\n{now}\t{general.emphasis('Step 5/5')}. Verifying if the provided emails have registered a domain using ViewDNS.info...\n" ) print( general.emphasis("\tPress <Ctrl + C> to skip this step...\n")) try: # Verify the existence of the mails found as leaked emails. for query in potentially_leaked_emails: try: # Iterate through the different leak platforms domains = viewdns.check_reverse_whois(query) if len(domains) > 0: if not args.quiet: print( f"\t[*] '{general.success(query)}' has registered at least {general.success(len(domains))} different domains as shown by ViewDNS.info." ) else: if not args.quiet: print( f"\t[*] '{general.error(query)}' has NOT registered a domain yet." ) results += domains except Exception as e: print( general.warning( f"Something happened when querying Viewdns.info about '{query}'. Omitting..." )) except KeyboardInterrupt: print( general.warning("\tStep 5 manually skipped by the user...\n")) # Trying to store the information recovered if args.output_folder != None: if not os.path.exists(args.output_folder): os.makedirs(args.output_folder) # Grabbing the results fileHeader = os.path.join(args.output_folder, args.file_header) for ext in args.extension: # Generating output files general.export_usufy(results, ext, fileHeader) # Showing the information gathered if requested if not args.quiet: now = dt.datetime.now() print(f"\n{now}\tResults obtained:\n") print(general.success(general.osrf_to_text_export(results))) now = dt.datetime.now() print( f"\n{now}\tYou can find all the information collected in the following files:" ) for ext in args.extension: # Showing the output files print(general.emphasis("\t" + fileHeader + "." + ext)) # Showing the execution time... if not args.quiet: end_time = dt.datetime.now() print("\n{end_time}\tFinishing execution...\n") print("Total time used:\t" + general.emphasis(str(end_time - start_time))) if not args.quiet: # Urging users to place an issue on Github... print(banner.footer) if params: return results
def performSearch(domains=[], nThreads=16, launchWhois=False): """ Method to perform the mail verification process. Arguments --------- domains: List of domains to check. nThreads: Number of threads to use. launchWhois: Sets if whois queries will be launched. Returns ------- list: A list containing the results as i3visio entities. """ results = [] # Using threads in a pool if we are not running the program in main args = [] # Returning None if no valid domain has been returned if len(domains) == 0: return results # If the process is executed by the current app, we use the Processes. It is faster than pools. if nThreads <= 0 or nThreads > len(domains): nThreads = len(domains) # Launching the Pool # ------------------ # Example catched from: https://stackoverflow.com/questions/11312525/catch-ctrlc-sigint-and-exit-multiprocesses-gracefully-in-python try: original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN) pool = Pool(nThreads) signal.signal(signal.SIGINT, original_sigint_handler) except ValueError: # To avoid: ValueError: signal only works in main thread pool = Pool(nThreads) poolResults = [] try: def log_result(result): # This is called whenever foo_pool(i) returns a result. # result_list is modified only by the main process, not the pool workers. poolResults.append(result) for d in domains: # We need to create all the arguments that will be needed parameters = ( d, launchWhois, ) pool.apply_async(pool_function, args=parameters, callback=log_result) # Waiting for results to be finished while len(poolResults) < len(domains): pass # Closing normal termination pool.close() except KeyboardInterrupt: print( general.warning( "\nProcess manually stopped by the user. Terminating workers.\n" )) pool.terminate() print(general.warning("The following domains were not processed:")) pending_tld = "" for d in domains: processed = False for processedDomain in poolResults: if str(d) == processedDomain["platform"]: processed = True break if not processed: print(general.warning("\t- " + str(d["domain"]))) pending_tld += " " + str(d["tld"]) print( general.warning( "[!] If you want to relaunch the app with these domains you can always run the command with: " )) print(general.warning("\t domainfy ... -t none -u " + pending_tld)) print( general.warning( "[!] If you prefer to avoid these platforms you can manually evade them for whatever reason with: " )) print(general.warning("\t domainfy ... -x " + pending_tld)) pool.join() # Processing the results # ---------------------- for serArray in poolResults: data = serArray["data"] # We need to recover the results and check if they are not an empty json or None if data != None and data != {}: results.append(data) return results
def main(params=None): """ Main function to launch phonefy. The function is created in this way so as to let other applications make use of the full configuration capabilities of the application. The parameters received are used as parsed by this modules `getParser()`. Args: ----- params: A list with the parameters as grabbed by the terminal. It is None when this is called by an entry_point. If it is called by osrf the data is already parsed. Returns: -------- A list of i3visio entities. """ if params == None: parser = getParser() args = parser.parse_args(params) else: args = params results = [] if not args.quiet: print(general.title(banner.text)) sayingHello = """ Mailfy | Copyright (C) Yaiza Rubio & Félix Brezo (i3visio) 2014-2018 This program comes with ABSOLUTELY NO WARRANTY. This is free software, and you are welcome to redistribute it under certain conditions. For additional info, visit <{}>. """.format(general.LICENSE_URL) print(general.info(sayingHello)) # Displaying a warning if this is being run in a windows system if sys.platform == 'win32': print( general.warning( """OSRFramework has detected that you are running mailfy.py in a Windows system. As the "emailahoy" library is NOT working properly there, "validate_email" will be used instead. Verification may be slower though.""")) if args.license: general.showLicense() else: # processing only the given domains and excluding the ones provided extra_domains = [] for d in args.domains: if d not in args.exclude and not d == "all": extra_domains.append(d) # Two different arrays are mantained since there are some domains that cannot be safely verified if args.create_emails: potentially_existing_emails = grabEmails( nicksFile=args.create_emails, domains=EMAIL_DOMAINS + extra_domains, excludeDomains=args.exclude) potentially_leaked_emails = grabEmails( nicksFile=args.create_emails, domains=LEAKED_DOMAINS + extra_domains, excludeDomains=args.exclude) else: potentially_existing_emails = grabEmails( emails=args.emails, emailsFile=args.emails_file, nicks=args.nicks, nicksFile=args.nicks_file, domains=EMAIL_DOMAINS + extra_domains, excludeDomains=args.exclude) potentially_leaked_emails = grabEmails(emails=args.emails, emailsFile=args.emails_file, nicks=args.nicks, nicksFile=args.nicks_file, domains=LEAKED_DOMAINS + extra_domains, excludeDomains=args.exclude) emails = list( set(potentially_leaked_emails + potentially_existing_emails)) # Showing the execution time... if not args.quiet: startTime = dt.datetime.now() print("{}\tStarting search of {} different emails:\n{}\n".format( str(startTime), general.emphasis(str(len(emails))), json.dumps(emails, indent=2, sort_keys=True))) if not args.quiet: now = dt.datetime.now() print( "\n{}\tStep 1. Trying to determine if the emails provided do exist...\n" .format(str(now))) print(general.emphasis("\tPress <Ctrl + C> to stop...\n")) # Perform searches, using different Threads results = performSearch(potentially_existing_emails, nThreads=args.threads) if not args.quiet: now = dt.datetime.now() print( "\n{}\tStep 2. Checking if the emails have been used to register socialmedia accounts...\n" .format(str(now))) print(general.emphasis("\tPress <Ctrl + C> to stop...\n")) registered = processMailList(platformNames=args.platforms, emails=potentially_existing_emails) results += registered if not args.quiet: if len(results) > 0: for r in registered: print("\t[*] Registered account found: {}".format( general.success(r["value"]))) else: print("\t[*] Registered account found: {}".format( general.error("None"))) now = dt.datetime.now() print( "\n{}\tStep 3. Verifying if the provided emails have been leaked somewhere?\n" .format(str(now))) print(general.emphasis("\tPress <Ctrl + C> to stop...\n")) # Verify the existence of the mails found as leaked emails. for query in potentially_leaked_emails: # Iterate through the different leak platforms leaks = hibp.checkIfEmailWasHacked(query) if len(leaks) > 0: if not args.quiet: if len(leaks) > 0: print( "\t[*] '{}' has been found in at least {} different leaks." .format(general.success(query), general.success(str(len(leaks))))) else: print("\t[*] '{}' has NOT been found in any leak.". format(general.error(query))) else: if not args.quiet: print("\t[*] '{}' has NOT been found on any leak yet.". format(general.error(query))) results += leaks # Trying to store the information recovered if args.output_folder != None: if not os.path.exists(args.output_folder): os.makedirs(args.output_folder) # Grabbing the results fileHeader = os.path.join(args.output_folder, args.file_header) for ext in args.extension: # Generating output files general.exportUsufy(results, ext, fileHeader) # Showing the information gathered if requested if not args.quiet: now = dt.datetime.now() print("\n{}\tResults obtained:\n".format(str(now))) print(general.success(general.usufyToTextExport(results))) now = dt.datetime.now() print( "\n" + str(now) + "\tYou can find all the information collected in the following files:" ) for ext in args.extension: # Showing the output files print(general.emphasis("\t" + fileHeader + "." + ext)) # Showing the execution time... if not args.quiet: endTime = dt.datetime.now() print("\n" + str(endTime) + "\tFinishing execution...\n") print("Total time used:\t" + general.emphasis(str(endTime - startTime))) print("Average seconds/query:\t" + general.emphasis( str((endTime - startTime).total_seconds() / len(emails))) + " seconds\n") if not args.quiet: # Urging users to place an issue on Github... print(banner.footer) if params: return results
def process_nick_list(nicks, platforms=None, rutaDescarga="./", avoidProcessing=True, avoidDownload=True, nThreads=12, verbosity=1, logFolder="./logs"): """ Process a list of nicks to check whether they exist. This method receives as a parameter a series of nicks and verifies whether those nicks have a profile associated in different social networks. Args: nicks: List of nicks to process. platforms: List of <Platform> objects to be processed. rutaDescarga: Local file where saving the obtained information. avoidProcessing: A boolean var that defines whether the profiles will NOT be processed. avoidDownload: A boolean var that defines whether the profiles will NOT be downloaded. verbosity: The level of verbosity to be used. logFolder: The path to the log folder. Returns: A dictionary where the key is the nick and the value another dictionary where the keys are the social networks and the value is the corresponding URL. """ if platforms is None: platforms = platform_selection.get_all_platform_names("usufy") # Defining the output results variable res = [] # Processing the whole list of terms... for nick in nicks: # If the process is executed by the current app, we use the Processes. It is faster than pools. if nThreads <= 0 or nThreads > len(platforms): nThreads = len(platforms) # Using threads in a pool if we are not running the program in main # Example catched from: https://stackoverflow.com/questions/11312525/catch-ctrlc-sigint-and-exit-multiprocesses-gracefully-in-python try: original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN) pool = Pool(nThreads) signal.signal(signal.SIGINT, original_sigint_handler) except ValueError: # To avoid: ValueError: signal only works in main thread pool = Pool(nThreads) pool_results = [] try: def log_result(result): # This is called whenever foo_pool(i) returns a result. # result_list is modified only by the main process, not the pool workers. pool_results.append(result) for plat in platforms: # We need to create all the arguments that will be needed parameters = (plat, nick, rutaDescarga, avoidProcessing, avoidDownload, verbosity) pool.apply_async( pool_function, args=parameters, callback=log_result, ) # Waiting for results to be finished while len(pool_results) < len(platforms): time.sleep(1) # Closing normal termination pool.close() except KeyboardInterrupt: print( general.warning( "\n[!] Process manually stopped by the user. Terminating workers.\n" )) pool.terminate() print( general.warning( "[!] The following platforms were not processed:")) pending = "" for p in platforms: processed = False for processedPlatform in pool_results: if str(p) == processedPlatform["platform"]: processed = True break if not processed: print("\t- " + str(p)) pending += " " + str(p).lower() print("\n") print( general.warning( "If you want to relaunch the app with these platforms you can always run the command with: " )) print("\t usufy ... -p " + general.emphasis(pending)) print("\n") print( general.warning( "If you prefer to avoid these platforms you can manually evade them for whatever reason with: " )) print("\t usufy ... -x " + general.emphasis(pending)) print("\n") pool.join() # Collecting the results profiles = [] errors = {} warnings = {} for info in pool_results: if info["status"] == "Ok": array = json.loads(info["data"]) for r in array: if r != "{}": profiles.append(r) else: e = info["status"] if isinstance(e, OSRFrameworkError): aux = errors.get(e.__class__.__name__, {}) aux["info"] = info["data"] aux["counter"] = aux.get("counter", 0) + 1 errors[e.__class__.__name__] = aux else: aux = warnings.get(e.__class__.__name__, {}) aux["info"] = info["data"] aux["counter"] = aux.get("counter", 0) + 1 warnings[e.__class__.__name__] = aux res += profiles if errors: now = dt.datetime.now() print(f"\n{now}\tSome errors where found in the process:") for key, value in errors.items(): print( textwrap.fill("- {} (found: {}). Details:".format( general.error(key), general.error(value["counter"])), 90, initial_indent="\t")) print( textwrap.fill("\t{}".format(value["info"]), 80, initial_indent="\t")) if warnings and verbosity >= 2: now = dt.datetime.now() print( "\n{}\tSome warnings where found in the process:".format(now)) for key, value in warnings.items(): print( textwrap.fill("- {} (found: {}). Details:".format( general.warning(key), general.warning(value["counter"])), 90, initial_indent="\t")) print( textwrap.fill("\t{}".format(value["info"]), 80, initial_indent="\t")) return res
def pool_function(domain, launch_whois=False): """Wrapper for being able to launch all the threads of getPageWrapper. Args: domain: We receive the parameters as a dictionary. ``` { "domain" : ".com", "type" : "global" } ``` launch_whois: Whether the whois info will be launched. Returns: dict: A dictionary containing the following values: `{"platform" : str(domain), "status": "DONE", "data": aux}` """ try: if domain["type"] != "other" and launch_whois: whois_info = get_whois_info(domain["domain"]) print( f"[i] Whois data retrieved from '{general.info(domain['domain'])}'." ) else: whois_info = None except Exception: # If something happened... Log the answer whois_info = None print( general.warning( f"[!] Something happened when running whois of '{domain['domain']}'." )) try: aux = {} aux["type"] = "com.i3visio.Result" aux["value"] = "Domain Info - " + domain["domain"] if whois_info: aux["attributes"] = whois_info else: aux["attributes"] = [] # Performing whois info and adding if necessary tmp = {} tmp["type"] = "com.i3visio.Domain" tmp["value"] = domain["domain"] tmp["attributes"] = [] aux["attributes"].append(tmp) tmp = {} tmp["type"] = "com.i3visio.Domain.TLD.Type" tmp["value"] = domain["type"] tmp["attributes"] = [] aux["attributes"].append(tmp) ipv4 = socket.gethostbyname(domain["domain"]) # Check if this ipv4 normally throws false positives if is_blackListed(ipv4) and not whois_info: return {"platform": str(domain), "status": "ERROR", "data": {}} #If we arrive here... The domain resolves so we add the info: tmp = {} tmp["type"] = "com.i3visio.IPv4" tmp["value"] = ipv4 tmp["attributes"] = [] aux["attributes"].append(tmp) return {"platform": str(domain), "status": "DONE", "data": aux} except Exception: if whois_info: return {"platform": str(domain), "status": "DONE", "data": aux} return {"platform": str(domain), "status": "ERROR", "data": {}}