def Data_Type_Discovery(Data_to_Search): # Function responsible for determining the type of data found. Examples: Hash_Type, Credentials, Email, or URL. try: Dump_Types = [] Hash_Types = ["MD5", "SHA1", "SHA256"] Hash_Type_Dict = {} for Hash_Type in Hash_Types: Hash_Type_Dict[Hash_Type] = Common.Regex_Handler(Data_to_Search, Type=Hash_Type) for Hash_Key, Hash_Value in Hash_Type_Dict.items( ): # Hash_Type identification if Hash_Value: Hash_Type_Line = f"{Hash_Key} hash" if not Hash_Type_Line in Dump_Types: Dump_Types.append(Hash_Type_Line) else: pass if Common.Regex_Handler( Data_to_Search, Type="Credentials"): # Credentials identification if not "Credentials" in Dump_Types: Dump_Types.append("Credentials") else: if Common.Regex_Handler(Data_to_Search, Type="Email"): # Email Identification if not "Email" in Dump_Types: Dump_Types.append("Email") if Common.Regex_Handler(Data_to_Search, Type="URL"): # URL Indentification if not "URL" in Dump_Types: Dump_Types.append("URL") return Dump_Types except: logging.warning( f"{Common.Date()} - General Library - Failed to determine data type." )
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: BSB_Search_URL = f"https://www.{self.Domain}/{Query}.html" Responses = Common.Request_Handler( BSB_Search_URL, Filter=True, Host=f"https://www.{self.Domain}") Response = Responses["Filtered"] Error_Regex = Common.Regex_Handler( Response, Custom_Regex=r"Correct\sthe\sfollowing\serrors") Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) if not Error_Regex: if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Response, Query, self.The_File_Extension) if Output_file: Output_Connections.Output( [Output_file], BSB_Search_URL, General.Get_Title(BSB_Search_URL), self.Plugin_Name.lower()) Data_to_Cache.append(BSB_Search_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Query returned error, probably does not exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) for Query in Query_List: if Common.Regex_Handler(Query, Type="IP"): API_Key = Load_Configuration() Search_Response = Common.Request_Handler( f"http://api.{Domain}/{Query}?access_key={API_Key}") JSON_Object = Common.JSON_Handler(Search_Response) JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Output_Connections = General.Connections( Query, Plugin_Name, Domain, "IP Address Information", Task_ID, Plugin_Name.lower()) if Query not in Cached_Data and Query not in Data_to_Cache: Result_URL = f"https://{Domain}/?{Query}" Title = f"IP Stack | {Query}" Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, JSON_Output_Response, Title, The_File_Extensions["Main"]) HTML_Output_File_Data = General.JSONDict_to_HTML( JSON_Response, JSON_Output_Response, f"IPStack Query {Query}") HTML_Output_File = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, HTML_Output_File_Data, Title.replace(" ", "-"), The_File_Extensions["Main_Converted"]) if Output_file: Output_Connections.Output([Output_file], Result_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def __init__(self, Directory, Plugin_Name): Cache_File = f"{Plugin_Name}-cache.txt" General_Directory_Search = Common.Regex_Handler( Directory, Custom_Regex=r"(.*)\/\d{4}\/\d{2}\/\d{2}") if General_Directory_Search: self.Complete_File = os.path.join( General_Directory_Search.group(1), Cache_File)
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: if Common.Regex_Handler(Query, Type="Email"): Link = f"https://{self.Domain}/home/verify-as-guest/{Query}" JSON_Response = Common.Request_Handler(Link) JSON_Object = Common.JSON_Handler(JSON_Response) if JSON_Object.Is_JSON(): JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Table_JSON = {} for Key, Value in JSON_Response.items(): if Key != "response": Table_JSON[Key] = Value else: for Det_Key, Det_Val in JSON_Response["response"].items(): Table_JSON[Det_Key] = Det_Val Filter_JSON = [Table_JSON] Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) if Query not in Cached_Data and Query not in Data_to_Cache: Title = f"Email Verification | {Query}" Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Concat_Plugin_Name, JSON_Output_Response, Title, self.The_File_Extensions["Main"]) HTML_Output_File_Data = General.JSONDict_to_HTML(Filter_JSON, JSON_Output_Response, f"Email Verification Query {Query}") HTML_Output_File = General.Create_Query_Results_Output_File(Directory, Query, self.Concat_Plugin_Name, HTML_Output_File_Data, Title, self.The_File_Extensions["Main_Converted"]) if Output_file and HTML_Output_File: Output_Connections.Output([Output_file, HTML_Output_File], Link, Title, self.Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Convert_to_JSON(Data): Data = str(Data) Flickr_Regex = Common.Regex_Handler(Data, Custom_Regex=r"\[(.+)\]") if Flickr_Regex: New_Data = Flickr_Regex.group(1).replace("...", "").replace("id=b", "'id': ").replace("title=b", "'title': ").replace("(", "{").replace(")", "}").replace("\'}", "}").replace("}", "\'}") New_Data = New_Data.replace("Photo", "") New_Data = f"[{New_Data}]" New_Data = eval(New_Data) JSON_Object = Common.JSON_Handler(New_Data) New_Data = JSON_Object.Dump_JSON() return New_Data
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Location = self.Load_Configuration() Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: Main_URL = f"https://www.{self.Domain}/en-{Location}/search?q={Query}" Win_Store_Response = Common.Request_Handler(Main_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True) Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Win_Store_Response, Query, self.The_File_Extension) Win_Store_Regex = Common.Regex_Handler(Win_Store_Response, Custom_Regex=r"\/en\-au\/p\/([\w\-]+)\/([\w\d]+)", Findall=True) Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) if Win_Store_Regex: Current_Step = 0 for Regex_Group_1, Regex_Group_2 in Win_Store_Regex: Item_URL = f"https://www.microsoft.com/en-au/p/{Regex_Group_1}/{Regex_Group_2}" Win_Store_Responses = Common.Request_Handler(Item_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Win_Store_Response = Win_Store_Responses["Filtered"] Title = "Windows Store | " + General.Get_Title(Item_URL) if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(self.Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Win_Store_Response, Regex_Group_1, self.The_File_Extension) if Output_file: Output_Connections.Output([Main_File, Output_file], Item_URL, Title, self.Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Current_Step += 1 else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: # Query can be Title or ISBN Main_URL = f"http://{self.Domain}/search.php?req={Query}&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def" Lib_Gen_Response = Common.Request_Handler(Main_URL) Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Lib_Gen_Response, Query, self.The_File_Extension) Lib_Gen_Regex = Common.Regex_Handler(Lib_Gen_Response, Custom_Regex=r"book\/index\.php\?md5=[A-Fa-f0-9]{32}", Findall=True) if Lib_Gen_Regex: Current_Step = 0 for Regex in Lib_Gen_Regex: Item_URL = f"http://{self.Domain}/{Regex}" Title = General.Get_Title(Item_URL).replace("Genesis:", "Genesis |") Lib_Item_Responses = Common.Request_Handler(Item_URL, Filter=True, Host=f"http://{self.Domain}") Lib_Item_Response = Lib_Item_Responses["Filtered"] if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(self.Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Lib_Item_Response, Regex, self.The_File_Extension) if Output_file: Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) Output_Connections.Output([Main_File, Output_file], Item_URL, Title, self.Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Current_Step += 1 else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Logging(Directory, Plugin_Name): try: Main_File = f"{Plugin_Name}-log-file.log" General_Directory_Search = Common.Regex_Handler( Directory, Custom_Regex=r"(.*)\/\d{4}\/\d{2}\/\d{2}") if General_Directory_Search: Complete_File = os.path.join(General_Directory_Search.group(1), Main_File) return Complete_File except: logging.warning( f"{Common.Date()} - General Library - Failed to initialise logging." )
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: try: if self.Type == "CIK": Main_URL = f'https://www.{self.Domain}/cgi-bin/browse-edgar?action=getcompany&CIK={Query}&owner=exclude&count=40&hidefilings=0' Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://www.{self.Domain}") Response = Responses["Regular"] try: if 'No matching CIK.' not in Response: Query = str(int(Query)) Response = Responses["Filtered"] if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Response, f"edgar-american-business-search-{Query.lower()}", self.The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name) Output_Connections.Output( [Output_file], Main_URL, f"American Business Number (EDGAR) {Query}", self.Concat_Plugin_Name) Data_to_Cache.append(Main_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for CIK Search." ) elif self.Type == "ACN": Main_URL = f'https://www.{self.Domain}/cgi-bin/browse-edgar?company={Query}&owner=exclude&action=getcompany' Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://www.{self.Domain}") Response = Responses["Regular"] Filtered_Response = Responses["Filtered"] try: ACN = Common.Regex_Handler(Query, Type="Company_Name") if ACN: Main_File = General.Main_File_Create( Directory, self.Plugin_Name, Filtered_Response, Query, self.The_File_Extensions["Main"]) Current_Step = 0 CIKs_Regex = Common.Regex_Handler( Response, Custom_Regex= r"(\d{10})\<\/a\>\<\/td\>\s+\<td\sscope\=\"row\"\>(.*\S.*)\<\/td\>", Findall=True) if CIKs_Regex: Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name) for CIK_URL, ACN in CIKs_Regex: Full_CIK_URL = f'https://www.{self.Domain}/cgi-bin/browse-edgar?action=getcompany&CIK={CIK_URL}&owner=exclude&count=40&hidefilings=0' if Full_CIK_URL not in Cached_Data and Full_CIK_URL not in Data_to_Cache and Current_Step < int( self.Limit): Current_Responses = Common.Request_Handler( Full_CIK_URL, Filter=True, Host= f"https://www.{self.Domain}") Current_Response = Current_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, str(Current_Response), ACN.replace(' ', '-'), self. The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Full_CIK_URL, f"American Business Number (EDGAR) {CIK_URL} for Query {Query}", self.Concat_Plugin_Name) Data_to_Cache.append( Full_CIK_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Response did not match regular expression." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Query did not match regular expression." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for ACN Search." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid request type." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: CRT_Regex = Common.Regex_Handler(Query, Type="Domain") if CRT_Regex: Request = f"https://{self.Domain}/?q={Query}" Responses = Common.Request_Handler( Request, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://{self.Domain}") Response = Responses["Regular"] Filtered_Response = Responses["Filtered"] if "<TD class=\"outer\"><I>None found</I></TD>" not in Response: if Request not in Cached_Data and Request not in Data_to_Cache: try: if CRT_Regex: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name.lower(), Filtered_Response, CRT_Regex.group(1), self.The_File_Extension) if Output_file: Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) Output_Connections.Output( [Output_file], Request, f"Subdomain Certificate Search for {Query}", self.Plugin_Name.lower()) Data_to_Cache.append(Request) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create file." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Query does not exist." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def All_Extensions(self): try: Local_Plugin_Name = self.Plugin_Name + "-All-Extensions" Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) self.Cached_Data = Cached_Data_Object.Get_Cache() logging.info(f"{Common.Date()} {__name__.strip('plugins.')} - All Extensions Selected.") self.Query_List = General.Convert_to_List(self.Query_List) for Query in self.Query_List: URL_Components = Common.Regex_Handler(Query, Type="URL", Get_URL_Components=True) if URL_Components: self.URL_Prefix = URL_Components["Prefix"] self.URL_Body = URL_Components["Body"] self.URL_Extension = URL_Components["Extension"] else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query, please provide a valid URL.") Pool = mpool.ThreadPool(int(multiprocessing.cpu_count()) * int(multiprocessing.cpu_count())) Pool_Threads = [] for Extension in self.Generic_Extensions: for suffix in self.Global_Domain_Suffixes: suffix = suffix.replace(".com", "") suffix = suffix.replace(".co", "") if not self.URL_Extension == suffix: Thread = Pool.apply_async(self.Query_URL, args=(self.URL_Body, Extension + suffix,)) Pool_Threads.append(Thread) [Pool_Thread.wait() for Pool_Thread in Pool_Threads] URL_Domain = self.URL_Body + self.URL_Extension Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, "\n".join(self.Valid_Results), self.URL_Body, self.The_File_Extensions["Main"]) if Main_File: for Host in self.Valid_Hosts: Current_Domain = Host[0].strip('https://').strip('http://') try: Current_Responses = Common.Request_Handler(Host[0], Filter=True, Host=Host[0], Risky_Plugin=True) Current_Response = Current_Responses["Filtered"] Output_File = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Current_Response, Current_Domain, self.The_File_Extensions["Query"]) if Output_File: Output_File_List = [Main_File, Output_File] Output_Connections = General.Connections(Query, Local_Plugin_Name, Current_Domain, "Domain Spoof", self.Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output(Output_File_List, Host[0], f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}", Directory_Plugin_Name=self.Concat_Plugin_Name) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") except requests.exceptions.ConnectionError: Output_File_List = [Main_File] Output_Connections = General.Connections(Query, Local_Plugin_Name, Current_Domain, "Domain Spoof", self.Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output(Output_File_List, Host[0], f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}", Directory_Plugin_Name=self.Concat_Plugin_Name) Cached_Data_Object.Write_Cache(self.Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Character_Switch(self, Alphabets, Comprehensive_Search=False): try: Local_Plugin_Name = self.Plugin_Name + "-Character-Switch" Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) self.Cached_Data = Cached_Data_Object.Get_Cache() logging.info(f"{Common.Date()} {__name__.strip('plugins.')} - Character Switching Selected.") self.Query_List = General.Convert_to_List(self.Query_List) for Query in self.Query_List: URL_Components = Common.Regex_Handler(Query, Type="URL", Get_URL_Components=True) if URL_Components: self.URL_Prefix = URL_Components["Prefix"] self.URL_Body = URL_Components["Body"] self.URL_Extension = URL_Components["Extension"] else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query, please provide a valid URL.") logging.info(f'{Common.Date()} - Provided domain body - {self.URL_Body}') URL_List = list(self.URL_Body.lower()) Local_Plugin_Name = f"{Local_Plugin_Name}-{Alphabets}" Non_Comprehensive_Latin_Limit = 15 Other_Limit = 10 if Alphabets == "Latin": if not Comprehensive_Search: if len(self.URL_Body) > Non_Comprehensive_Latin_Limit: logging.error(f"{Common.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than {str(Non_Comprehensive_Latin_Limit)} characters in length. Condensed punycode domain fuzzing only allows a maximum of {str(Non_Comprehensive_Latin_Limit)} characters.") return None else: Altered_URLs = Rotor.Search(URL_List, Latin=True, Latin_Alternatives=True) else: if len(self.URL_Body) > Other_Limit: logging.error(f"{Common.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than {str(Other_Limit)} characters in length. Comprehensive punycode domain fuzzing searching only allows a maximum of {str(Other_Limit)} characters.") return None else: Altered_URLs = Rotor.Search(URL_List, Latin=True, Latin_Alternatives=True, Comprehensive=True) elif Alphabets == "Asian": if len(self.URL_Body) > Other_Limit: logging.error(f"{Common.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than {str(Other_Limit)} characters in length. Punycode domain fuzzing for Asian alphabets only allows a maximum of {str(Other_Limit)} characters.") return None else: Altered_URLs = Rotor.Search(URL_List, Asian=True) elif Alphabets == "Middle Eastern": if len(self.URL_Body) > Other_Limit: logging.error(f"{Common.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than {str(Other_Limit)} characters in length. Punycode domain fuzzing for Middle Eastern alphabets only allows a maximum of {str(Other_Limit)} characters.") return None else: Altered_URLs = Rotor.Search(URL_List, Middle_Eastern=True) elif Alphabets == "Native American": if len(self.URL_Body) > Other_Limit: logging.error(f"{Common.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than {str(Other_Limit)} characters in length. Punycode domain fuzzing for Asian alphabets only allows a maximum of {str(Other_Limit)} characters.") return None else: Altered_URLs = Rotor.Search(URL_List, Native_American=True) elif Alphabets == "North African": if len(self.URL_Body) > Other_Limit: logging.error(f"{Common.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than {str(Other_Limit)} characters in length. Punycode domain fuzzing for Middle Eastern alphabets only allows a maximum of {str(Other_Limit)} characters.") return None else: Altered_URLs = Rotor.Search(URL_List, North_African=True) logging.info(f'{Common.Date()} - Generated domain combinations - {", ".join(Altered_URLs)}') Pool = mpool.ThreadPool(int(multiprocessing.cpu_count())*int(multiprocessing.cpu_count())) Pool_Threads = [] for Altered_URL in Altered_URLs: if not Altered_URL == self.URL_Body: Thread = Pool.apply_async(self.Query_URL, args=(Altered_URL, self.URL_Extension,)) Pool_Threads.append(Thread) [Pool_Thread.wait() for Pool_Thread in Pool_Threads] logging.info(f'{Common.Date()} {Directory}') URL_Domain = self.URL_Body + self.URL_Extension Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, "\n".join(self.Valid_Results), self.URL_Body, self.The_File_Extensions["Main"]) Main_File_JSON_Data = General.CSV_to_JSON(Query, self.Valid_Results) Main_File_HTML_Data = General.CSV_to_HTML(self.Valid_Results, f"Domain Spoof Results for Query {Query}") Main_File_JSON = General.Main_File_Create(Directory, Local_Plugin_Name, Main_File_JSON_Data, self.URL_Body, self.The_File_Extensions["Main_Alternative"]) Main_File_HTML = General.Main_File_Create(Directory, Local_Plugin_Name, Main_File_HTML_Data, self.URL_Body, self.The_File_Extensions["Query"]) if Main_File and Main_File_HTML and Main_File_JSON: for Host in self.Valid_Hosts: Current_Domain = Host[0].strip('https://').strip('http://') try: Current_Responses = Common.Request_Handler(Host[0], Filter=True, Host=Host[0], Risky_Plugin=True) Current_Response = Current_Responses["Filtered"] Output_File = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Current_Response, Current_Domain, self.The_File_Extensions["Query"]) if Output_File: Output_File_List = [Main_File, Main_File_HTML, Main_File_JSON, Output_File] Output_Connections = General.Connections(Query, Local_Plugin_Name, Current_Domain, "Domain Spoof", self.Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output(Output_File_List, Host[0], f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}", Directory_Plugin_Name=self.Concat_Plugin_Name) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") except requests.exceptions.ConnectionError: Output_File_List = [Main_File, Main_File_HTML, Main_File_JSON] Output_Connections = General.Connections(Query, Local_Plugin_Name, Current_Domain, "Domain Spoof", self.Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output(Output_File_List, Host[0], f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}", Directory_Plugin_Name=self.Concat_Plugin_Name) Cached_Data_Object.Write_Cache(self.Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: URL_Components = Common.Regex_Handler(Query, Type="URL", Get_URL_Components=True) if URL_Components: BW_Info = builtwith(Query) if BW_Info: BW_JSON_Output = Common.JSON_Handler( BW_Info).Dump_JSON() Query_Domain = URL_Components["Body"] + URL_Components[ "Extension"] Title = f"Built With | {Query_Domain}" Main_File = General.Main_File_Create( Directory, self.Plugin_Name, BW_JSON_Output, Query_Domain, self.The_File_Extensions["Main"]) BW_Search_URL = f"https://{self.Domain}/{Query_Domain}" Responses = Common.Request_Handler( BW_Search_URL, Filter=True, Host=f"https://{self.Domain}") Response = Responses["Filtered"] Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) if BW_Search_URL not in Cached_Data and BW_Search_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Response, Query, self.The_File_Extensions['Query']) if Output_file: Output_Connections.Output( [Main_File, Output_file], BW_Search_URL, Title, self.Plugin_Name.lower()) Data_to_Cache.append(BW_Search_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to get result for provided query." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: URL_Body = f'https://{self.Domain}' Main_URL = URL_Body + '/' + Query.lower().replace(' ', '-') Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://www.{self.Domain}") Response = Responses["Regular"] Filtered_Response = Responses["Filtered"] Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Filtered_Response, Query, self.The_File_Extension) Regex = Common.Regex_Handler( Response, Custom_Regex= r"\<tr\>\s+\<td\sclass\=\"name\"\>\s+\<a\shref\=\"([\/\d\w\-\+\?\.]+)\"\>([\/\d\w\-\+\?\.\(\)\s\,\;\:\~\`\!\@\#\$\%\^\&\*\[\]\{\}]+)\<\/a\>\s+\<\/td\>", Findall=True) if Regex: Current_Step = 0 Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) for URL, Title in Regex: Item_URL = URL_Body + URL Current_Response = Common.Request_Handler(Item_URL) Current_Item_Regex = Common.Regex_Handler( Current_Response, Custom_Regex= r"\<button\sclass\=\"btn\sbtn\-primary\spassword\"\s+data\-data\=\"([\-\d\w\?\/]+)\"\s+data\-toggle\=\"modal\"\s+data\-target\=\"\#modal\"\s+\>show\sme\!\<\/button\>" ) if Current_Item_Regex: try: Detailed_Item_URL = URL_Body + Current_Item_Regex.group( 1) Detailed_Responses = Common.Request_Handler( Item_URL, Filter=True, Host=f"https://www.{self.Domain}") Detailed_Response = Detailed_Responses[ "Regular"] Output_Dict = Common.JSON_Handler( Detailed_Response).Is_JSON() if JSON_Response: Output_Response = "<head><title>" + JSON_Response[ "title"] + "</title></head>\n" Output_Response = Output_Response + JSON_Response[ "data"] else: Output_Response = Detailed_Responses[ "Filtered"] if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int( self.Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Output_Response, Title, self.The_File_Extension) if Output_file: Output_Connections.Output( [Main_File, Output_file], Item_URL, General.Get_Title(Item_URL), self.Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) Current_Step += 1 except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to generate output, may have a blank detailed response." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression for current result." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression for provided query." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: Main_URL = f"http://{self.Domain}/{Query}" Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://www.{self.Domain}") Response = Responses["Regular"] Filtered_Response = Responses["Filtered"] Kik_Item_Regex = Common.Regex_Handler( Response, Custom_Regex= rf"\<h1\sclass\=\"display\-name\"\>(.+)\<\/h1>\s+\<h2\sclass\=\"username\"\>{Query}\<\/h2\>" ) if Kik_Item_Regex and Kik_Item_Regex.group(1) != " ": Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) Title = f"Kik | {Kik_Item_Regex.group(1)}" if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Main_File_Create( Directory, self.Plugin_Name, Filtered_Response, Query, self.The_File_Extension) if Output_file: print(Main_URL, Title) Output_Connections.Output([Output_file], Main_URL, Title, self.Plugin_Name.lower()) Data_to_Cache.append(Main_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Query didn't match regex pattern." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: if self.Type == "Email": if Common.Regex_Handler(Query, Type=self.Type): Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/email/report/?email={Query}" Response = Common.Request_Handler(URL) Search_Response = Common.Request_Handler(Request_URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses[ "Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Account", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output( [Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match query to email regular expression." ) elif self.Type == "Domain": if Common.Regex_Handler(Query, Type=self.Type): Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/self.Domain/report/?self.Domain={Query}" Response = Common.Request_Handler(URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses[ "Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Domain Information", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output( [Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match query to self.Domain regular expression." ) elif self.Type == "IP Address": if Common.Regex_Handler(Query, Type="IP"): Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/ip/report/?ip={Query}" Response = Common.Request_Handler(URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses[ "Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Domain Information", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output( [Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match query to IP address regular expression." ) elif self.Type == "AV": Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/antivirus/report/?antivirus={Query}" Response = Common.Request_Handler(URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses["Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Virus", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output([Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) elif self.Type == "Virus Report": Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/file/report/?resource={Query}" Response = Common.Request_Handler(URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses["Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Virus Report", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output([Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid Type provided." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Results = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Reddit_Details = self.Load_Configuration() Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: try: Reddit_Connection = praw.Reddit( client_id=Reddit_Details[0], client_secret=Reddit_Details[1], user_agent=Reddit_Details[2], username=Reddit_Details[3], password=Reddit_Details[4]) All_Subreddits = Reddit_Connection.subreddit( Reddit_Details[5]) for Subreddit in All_Subreddits.search( Query, limit=self.Limit ): # self.Limit, subreddit and search to be controlled by the web app. Results.append(Subreddit.url) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to get results. Are you connected to the internet?" ) Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) for Result in Results: if Result not in Cached_Data and Result not in Data_to_Cache: try: Reddit_Regex = Common.Regex_Handler( Result[0], Custom_Regex= r"https\:\/\/www\.reddit\.com\/r\/(\w+)\/comments\/(\w+)\/([\w\d]+)\/" ) if Reddit_Regex: Reddit_Responses = Common.Request_Handler( Result, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Reddit_Response = Reddit_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Reddit_Response, Reddit_Regex.group(3), self.The_File_Extension) if Output_file: Output_Connections.Output( [Output_file], Result, General.Get_Title(Result[0]), self.Plugin_Name.lower()) Data_to_Cache.append(Result[0]) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create file." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID, Type, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: if Type == "ABN": Main_URL = f'https://{Domain}/ABN/View?id=' + Query Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://www.{Domain}") Response = Responses["Regular"] try: if 'Error searching ABN Lookup' not in Response: Query = str(int(Query)) Response = Responses["Filtered"] if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, General.Get_Title(Main_URL), The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Company Details", Task_ID, Plugin_Name) Output_Connections.Output( [Output_file], Main_URL, General.Get_Title(Main_URL).strip( " | ABN Lookup"), Concat_Plugin_Name) Data_to_Cache.append(Main_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - ABN Lookup returned error." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for ABN Search." ) elif Type == "ACN": Main_URL = f'https://{Domain}/Search/Run' Data = { 'SearchParameters.SearchText': Query, 'SearchParameters.AllNames': 'true', 'ctl00%24ContentPagePlaceholder%24SearchBox%24MainSearchButton': 'Search' } Responses = Common.Request_Handler( Main_URL, Method="POST", Filter=True, Host=f"https://www.{Domain}", Data=Data) Response = Responses["Regular"] Filtered_Response = Responses["Filtered"] Limit = General.Get_Limit(Limit) try: ACN_Regex = Common.Regex_Handler(Query, Type="Company_Name") if ACN_Regex: Main_File = General.Main_File_Create( Directory, Plugin_Name, Filtered_Response, Query, The_File_Extensions["Main"]) Current_Step = 0 ABNs_Regex = Common.Regex_Handler( Response, Custom_Regex= r"\<input\sid\=\"Results\_NameItems\_\d+\_\_Compressed\"\sname\=\"Results\.NameItems\[\d+\]\.Compressed\"\stype\=\"hidden\"\svalue\=\"(\d{11})\,\d{2}\s\d{3}\s\d{3}\s\d{3}\,0000000001\,Active\,active\,([\d\w\s\&\-\_\.]+)\,Current\,", Findall=True) if ABNs_Regex: Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Company Details", Task_ID, Plugin_Name) for ABN_URL, ACN in ABNs_Regex: Full_ABN_URL = f'https://{Domain}/ABN/View?abn={ABN_URL}' if Full_ABN_URL not in Cached_Data and Full_ABN_URL not in Data_to_Cache and Current_Step < int( Limit): ACN = ACN.rstrip() Current_Responses = Common.Request_Handler( Full_ABN_URL, Filter=True, Host=f"https://www.{Domain}") Current_Response = Current_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, str(Current_Response), ACN.replace(' ', '-'), The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Full_ABN_URL, General.Get_Title(Full_ABN_URL) .strip(" | ABN Lookup"), Concat_Plugin_Name) Data_to_Cache.append(Full_ABN_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Response did not match regular expression." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Query did not match regular expression." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for ACN Search." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid request type." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Shodan_API_Key = Load_Configuration() API_Session = PyHunter(Shodan_API_Key) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: try: if Type == "Domain": if Common.Regex_Handler(Query, Type="Domain"): Local_Plugin_Name = Plugin_Name + "-Domain" API_Response = API_Session.domain_search(Query) JSON_Object = Common.JSON_Handler(API_Response) JSON_Output_Response = JSON_Object.Dump_JSON() if API_Response["domain"] and API_Response['emails']: Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Account", Task_ID, Plugin_Name.lower()) Current_Step = 0 for Hunter_Item in API_Response["emails"]: Current_Email_Address = Hunter_Item["value"] Current_Hunter_Item_Host = f"https://{Domain}/verify/{Current_Email_Address}" Current_Hunter_Item_Responses = Common.Request_Handler( Current_Hunter_Item_Host, Filter=True, Host=f"https://{Domain}") Filtered_Response = Current_Hunter_Item_Responses[ "Filtered"] Title = "Hunter | " + Current_Email_Address if Current_Email_Address not in Cached_Data and Current_Email_Address not in Data_to_Cache and Current_Step < int( Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Filtered_Response, Current_Hunter_Item_Host, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Current_Hunter_Item_Host, Title, Plugin_Name.lower()) Data_to_Cache.append( Current_Email_Address) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 elif Type == "Email": if Common.Regex_Handler(Query, Type="Email"): Local_Plugin_Name = Plugin_Name + "-Email" API_Response = API_Session.email_verifier(Query) JSON_Object = Common.JSON_Handler(API_Response) JSON_Output_Response = JSON_Object.Dump_JSON() if API_Response["email"] and API_Response['sources']: Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Account Source", Task_ID, Plugin_Name.lower()) Current_Step = 0 for Hunter_Item in API_Response["sources"]: Current_Hunter_Item_Host = Hunter_Item["uri"] Current_Hunter_Item_Domain = Hunter_Item[ "domain"] if 'http://' in Current_Hunter_Item_Host: Current_Hunter_Item_Responses = Common.Request_Handler( Current_Hunter_Item_Host, Filter=True, Host= f"http://{Current_Hunter_Item_Domain}") Filtered_Response = Current_Hunter_Item_Responses[ "Filtered"] elif 'https://' in Current_Hunter_Item_Host: Current_Hunter_Item_Responses = Common.Request_Handler( Current_Hunter_Item_Host, Filter=True, Host= f"https://{Current_Hunter_Item_Domain}" ) Filtered_Response = Current_Hunter_Item_Responses[ "Filtered"] else: Filtered_Response = Common.Request_Handler( Current_Hunter_Item_Host) Title = "Hunter | " + Current_Hunter_Item_Host if Current_Hunter_Item_Host not in Cached_Data and Current_Hunter_Item_Host not in Data_to_Cache and Current_Step < int( Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Filtered_Response, Current_Hunter_Item_Host, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Current_Hunter_Item_Host, Title, Plugin_Name.lower()) Data_to_Cache.append( Current_Hunter_Item_Host) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to complete task - {str(e)}" ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Address_Search(self): try: Local_Plugin_Name = self.Plugin_Name + "-Address-Search" Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: if self.Type == "btc" or self.Type == "bch": Query_Regex = Common.Regex_Handler(Query, Custom_Regex=r"([\d\w]{26,34})") elif self.Type == "eth": Query_Regex = Common.Regex_Handler(Query, Custom_Regex=r"(0x[\w\d]{40})") else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid type provided.") if Query_Regex: Main_URL = f"https://www.{self.Domain}/{self.Type}/address/{Query}" Main_Response = Common.Request_Handler(Main_URL) if self.Type == "btc": Transaction_Regex = Common.Regex_Handler(Main_Response, Custom_Regex=r"\/btc\/tx\/([\d\w]{64})", Findall=True) elif self.Type == "bch": Transaction_Regex = Common.Regex_Handler(Main_Response, Custom_Regex=r"([\d\w]{64})", Findall=True) elif self.Type == "eth": Transaction_Regex = Common.Regex_Handler(Main_Response, Custom_Regex=r"(0x[\d\w]{64})", Findall=True) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid type provided.") if Transaction_Regex: Current_Step = 0 Output_Connections = General.Connections(Query, Local_Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) for Transaction in Transaction_Regex: Query_URL = f"https://www.{self.Domain}/{self.Type}/tx/{Transaction}" if Query_URL not in Cached_Data and Query_URL not in Data_to_Cache and Current_Step < int(self.Limit): Transaction_Responses = Common.Request_Handler(Query_URL, Filter=True, Host=f"https://www.{self.Domain}") Transaction_Response = Transaction_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Transaction_Response, Transaction, self.The_File_Extension) if Output_file: Output_Connections.Output([Output_file], Query_URL, General.Get_Title(Query_URL), self.Plugin_Name.lower()) Data_to_Cache.append(Query_URL) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Current_Step += 1 else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.") else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Ebay_API_Key = self.Load_Configuration() Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: try: API_Request = Connection(appid=Ebay_API_Key, config_file=None) API_Response = API_Request.execute('findItemsAdvanced', {'keywords': Query}) JSON_Output_Response = Common.JSON_Handler( API_Response.dict()).Dump_JSON() JSON_Object = Common.JSON_Handler(API_Response.dict()) JSON_Response = JSON_Object.Dump_JSON(Indentation=0, Sort=False) JSON_Response = JSON_Object.To_JSON_Loads() Main_File = General.Main_File_Create( Directory, self.Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) if JSON_Response["ack"] == "Success": Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) Current_Step = 0 for JSON_Line in JSON_Response['searchResult']['item']: Ebay_Item_URL = JSON_Line['viewItemURL'] Title = "Ebay | " + General.Get_Title( Ebay_Item_URL) if Ebay_Item_URL not in Cached_Data and Ebay_Item_URL not in Data_to_Cache and Current_Step < int( self.Limit): Ebay_Item_Regex = Common.Regex_Handler( Ebay_Item_URL, Custom_Regex= r"https\:\/\/www\.ebay\.com\/itm\/([\w\d\-]+)\-\/\d+" ) Ebay_Item_Responses = Common.Request_Handler( Ebay_Item_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Ebay_Item_Response = Ebay_Item_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Ebay_Item_Response, Ebay_Item_Regex.group(1).rstrip("-"), self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Ebay_Item_URL, Title, self.Plugin_Name.lower()) Data_to_Cache.append(Ebay_Item_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - No results found." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make API call." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: try: if self.Type == "NZBN": Main_URL = f'https://{self.Domain}/companies/app/ui/pages/companies/search?q={Query}&entityTypes=ALL&entityStatusGroups=ALL&incorpFrom=&incorpTo=&addressTypes=ALL&addressKeyword=&start=0&limit=1&sf=&sd=&advancedPanel=true&mode=advanced#results' Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://{self.Domain}") Response = Responses["Filtered"] try: if 'An error has occurred and the requested action cannot be performed.' not in Response: Query = str(int(Query)) if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Response, f"new-zealand-business-number-{Query.lower()}", self.The_File_Extension) if Output_file: Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name) Output_Connections.Output( [Output_file], Main_URL, f"New Zealand Business Number {Query}", self.Concat_Plugin_Name) Data_to_Cache.append(Main_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for NZBN Search." ) elif self.Type == "NZCN": try: URL_Query = urllib.parse.quote(Query) Main_URL = f'https://{self.Domain}/companies/app/ui/pages/companies/search?q={URL_Query}&entityTypes=ALL&entityStatusGroups=ALL&incorpFrom=&incorpTo=&addressTypes=ALL&addressKeyword=&start=0&limit={str(self.Limit)}&sf=&sd=&advancedPanel=true&mode=advanced#results' Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://{self.Domain}") Response = Responses["Filtered"] NZCN_Regex = Common.Regex_Handler( Query, Type="Company_Name") if NZCN_Regex: Main_File = General.Main_File_Create( Directory, self.Plugin_Name, Response, Query, self.The_File_Extension) NZBNs_Regex = Common.Regex_Handler( Response, Custom_Regex= r"\<span\sclass\=\"entityName\"\>([\w\d\s\-\_\&\|\!\@\#\$\%\^\*\(\)\.\,]+)\<\/span\>\s<span\sclass\=\"entityInfo\"\>\((\d+)\)\s\(NZBN\:\s(\d+)\)", Findall=True) if NZBNs_Regex: Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name) for NZCN, NZ_ID, NZBN_URL in NZBNs_Regex: Full_NZBN_URL = f'https://{self.Domain}/companies/app/ui/pages/companies/{NZ_ID}?backurl=H4sIAAAAAAAAAEXLuwrCQBCF4bfZNtHESIpBbLQwhWBeYNgddSF7cWai5O2NGLH7zwenyHgjKWwKGaOfSwjZ3ncPaOt1W9bbsmqaamMoqtepnzIJ7Ltu2RdFHeXIacxf9tEmzgdOAZbuExh0jknk%2F17gRNMrsQMjiqxQmsEHr7Aycp3NfY5PjJbcGSMNoDySCckR%2FPwNLgXMiL4AAAA%3D' if Full_NZBN_URL not in Cached_Data and Full_NZBN_URL not in Data_to_Cache: Current_Response = Common.Request_Handler( Full_NZBN_URL) Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, str(Current_Response), NZCN.replace(' ', '-'), self.The_File_Extension) if Output_file: Output_Connections.Output( [Main_File, Output_file], Full_NZBN_URL, f"New Zealand Business Number {NZ_ID} for Query {Query}", self.Concat_Plugin_Name) Data_to_Cache.append( Full_NZBN_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Response did not match regular expression." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Query did not match regular expression." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for NZCN Search." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid request type." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: if self.Query_List.index(Query) != 0: time.sleep(5) Main_URL = f"https://{self.Domain}/results_normal.php" body = {"ran": "", "username": Query} Responses = Common.Request_Handler( Main_URL, Method="POST", Data=body, Filter=True, Host=f"https://{self.Domain}", Optional_Headers={ "Content-Type": "application/x-www-form-urlencoded" }) Response = Responses["Regular"] Filtered_Response = Responses["Filtered"] Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Filtered_Response, Query, self.The_File_Extension) Link_Regex = Common.Regex_Handler( Response, Custom_Regex= r"\<a\sclass\=\"pretty-button results-button\"\shref\=\"(https?:\/\/(www\.)?[-a-zA-Z0-9@:%_\+~#=\.\/\?]+)\"\starget\=\"\_blank\"\>View Profile\<\/a\>", Findall=True) Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) if Link_Regex: Current_Step = 0 for Item_URL, WWW in Link_Regex: Responses = Common.Request_Handler( Item_URL, Filter=True, Host=f"https://{self.Domain}") Response = Responses["Filtered"] if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int( self.Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Response, Item_URL, self.The_File_Extension) if Output_file: Title = f"Username Search | {Item_URL}" Output_Connections.Output( [Main_File, Output_file], Item_URL, Title, self.Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: try: Pull_URL = f"https://{Domain}:2096/api/phishing?_where=(url,like,~{Query}~)&_sort=-id&_size={Limit}" JSON_Object = Common.JSON_Handler( Common.Request_Handler(Pull_URL)) Results = JSON_Object.To_JSON_Loads() Indented_Results = JSON_Object.Dump_JSON() Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Phishing", Task_ID, Plugin_Name.lower()) Main_File = General.Main_File_Create( Directory, Plugin_Name, Indented_Results, Query, The_File_Extensions["Main"]) for Result in Results: Current_Link = Result["url"] Current_Domain = Current_Link.strip("https://") Current_Domain = Current_Domain.strip("http://") Current_Domain = Current_Domain.strip("www.") Current_Title = Result["title"] try: Current_Result = Common.Request_Handler( Current_Link, Filter=True, Risky_Plugin=True, Host=Current_Link) Current_Result_Filtered = Current_Result["Filtered"] Response_Regex = Common.Regex_Handler( Current_Result, Custom_Regex=r"\<title\>([^\<\>]+)\<\/title\>") Output_file_Query = Query.replace(" ", "-") if Current_Link not in Cached_Data and Current_Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Output_file_Query, Plugin_Name, Current_Result_Filtered, Current_Domain, The_File_Extensions["Query"]) if Output_file: if Response_Regex: Current_Title = Response_Regex.group(1) Current_Title = Current_Title.strip() Output_Connections.Output( [Main_File, Output_file], Current_Link, Current_Title, Plugin_Name.lower()) else: if not "Phishstats" in Current_Title: Output_Connections.Output( [Main_File, Output_file], Current_Link, Current_Title, Plugin_Name.lower()) else: Output_Connections.Output( [Main_File, Output_file], Current_Link, General.Get_Title(Current_Link), Plugin_Name.lower()) Data_to_Cache.append(Current_Link) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request for result, link may no longer be available." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) VT_API_Key = self.Load_Configuration() Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: if self.Type == "Domain": if Common.Regex_Handler(Query, Type=self.Type): Response = Common.Request_Handler(f"https://www.{self.Domain}/api/v3/domains/{Query}", Optional_Headers={"x-apikey": VT_API_Key}, Full_Response=True) if Response.status_code == 200: JSON_Object = Common.JSON_Handler(Response.text) JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create(Directory, self.Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, "Domain Information", self.Task_ID, self.Plugin_Name.lower()) Link = f"https://www.{self.Domain}/gui/self.Domain/{Query}/detection" Main_URL_Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://www.{self.Domain}") Main_URL_Response = Main_URL_Responses["Filtered"] Title = f"Virus Total self.Domain | {Query}" if Link not in Cached_Data and Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name.lower(), Main_URL_Response, Link, self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Link, Title, self.Plugin_Name.lower()) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") elif self.Type == "IP": if Common.Regex_Handler(Query, Type=self.Type): Response = Common.Request_Handler(f"https://www.{self.Domain}/api/v3/ip_addresses/{Query}", Optional_Headers={"x-apikey": VT_API_Key}, Full_Response=True) if Response.status_code == 200: JSON_Object = Common.JSON_Handler(Response.text) JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create(Directory, self.Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, "IP Address Information", self.Task_ID, self.Plugin_Name.lower()) Link = f"https://www.{self.Domain}/gui/ip-address/{Query}/detection" Main_URL_Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://www.{self.Domain}") Main_URL_Response = Main_URL_Responses["Filtered"] Title = f"Virus Total IP Address | {Query}" if Link not in Cached_Data and Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name.lower(), Main_URL_Response, Link, self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Link, Title, self.Plugin_Name.lower()) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") elif self.Type == "URL": if Common.Regex_Handler(Query, Type=self.Type): import base64 Query_Encoded = base64.urlsafe_b64encode(Query.encode()).decode().strip("=") Response = Common.Request_Handler(f"https://www.{self.Domain}/api/v3/urls/{Query_Encoded}", Optional_Headers={"x-apikey": VT_API_Key}, Full_Response=True) if Response.status_code == 200: JSON_Object = Common.JSON_Handler(Response.text) JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create(Directory, self.Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, "Domain Information", self.Task_ID, self.Plugin_Name.lower()) Link = f"https://www.{self.Domain}/gui/url/{Query_Encoded}/detection" Main_URL_Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://www.{self.Domain}") Main_URL_Response = Main_URL_Responses["Filtered"] Title = f"Virus Total URL | {Query}" if Link not in Cached_Data and Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name.lower(), Main_URL_Response, Link, self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Link, Title, self.Plugin_Name.lower()) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") elif self.Type == "Hash": Response = Common.Request_Handler(f"https://www.{self.Domain}/api/v3/files/{Query}", Optional_Headers={"x-apikey": VT_API_Key}, Full_Response=True) if Response.status_code == 200: JSON_Object = Common.JSON_Handler(Response.text) JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create(Directory, self.Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, "Virus", self.Task_ID, self.Plugin_Name.lower()) Link = f"https://www.{self.Domain}/gui/file/{Query}/detection" Main_URL_Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://www.{self.Domain}") Main_URL_Response = Main_URL_Responses["Filtered"] Title = f"Virus Total File | {Query}" if Link not in Cached_Data and Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name.lower(), Main_URL_Response, Link, self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Link, Title, self.Plugin_Name.lower()) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Email_Rep_API_Key = Load_Configuration() for Query in Query_List: if Common.Regex_Handler(Query, Type="Email"): API = EmailRep(Email_Rep_API_Key) JSON_Output_Response = API.query(Query) Link = f"https://{Domain}/{Query}" JSON_Object = Common.JSON_Handler(JSON_Output_Response) JSON_Output_Response = JSON_Object.Dump_JSON() JSON_Response = JSON_Object.To_JSON_Loads() if JSON_Response["reputation"] != "none": Table_JSON = {} for Key, Value in JSON_Response.items(): if Key != "details": Table_JSON[Key] = Value else: for Det_Key, Det_Val in JSON_Response["details"].items(): Table_JSON[Det_Key] = Det_Val Filter_JSON = [Table_JSON] Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Email Information", Task_ID, Concat_Plugin_Name) if Query not in Cached_Data and Query not in Data_to_Cache: Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://{Domain}") Filtered_Response = Responses["Filtered"] Title = f"Email Reputation | {Query}" Main_File = General.Main_File_Create(Directory, Concat_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File(Directory, Query, Concat_Plugin_Name, Filtered_Response, Title, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Link, Title, Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: try: Pull_URL = f"https://{self.Domain}:2096/api/phishing?_where=(url,like,~{Query}~)&_sort=-id&_size={self.Limit}" JSON_Object = Common.JSON_Handler( Common.Request_Handler(Pull_URL)) Results = JSON_Object.To_JSON_Loads() Indented_Results = JSON_Object.Dump_JSON() Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) Main_File = General.Main_File_Create( Directory, self.Plugin_Name, Indented_Results, Query, self.The_File_Extensions["Main"]) for Result in Results: Current_Link = Result["url"] Current_Domain = urlparse(Current_Link).netloc Current_Title = Result["title"] try: Response = socket.gethostbyname(Current_Domain) except: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to resolve hostname {Current_Domain} to an IP address. Skipping." ) Response = None if Response: Current_Result = Common.Request_Handler( Current_Link, Filter=True, Risky_Plugin=True, Host=Current_Link) Current_Result_Filtered = Current_Result[ "Filtered"] Response_Regex = Common.Regex_Handler( Current_Result, Custom_Regex=r"\<title\>([^\<\>]+)\<\/title\>") Output_file_Query = Query.replace(" ", "-") if Current_Link not in Cached_Data and Current_Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Output_file_Query, self.Plugin_Name, Current_Result_Filtered, Current_Domain, self.The_File_Extensions["Query"]) if Output_file: if Response_Regex: Current_Title = Response_Regex.group(1) Current_Title = Current_Title.strip() Output_Connections.Output( [Main_File, Output_file], Current_Link, Current_Title, self.Plugin_Name.lower()) else: if not "Phishstats" in Current_Title: Output_Connections.Output( [Main_File, Output_file], Current_Link, Current_Title, self.Plugin_Name.lower()) else: Output_Connections.Output( [Main_File, Output_file], Current_Link, General.Get_Title( Current_Link), self.Plugin_Name.lower()) Data_to_Cache.append(Current_Link) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to resolve DNS, this link probably isn't live." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Location = Connectors.Load_Location_Configuration() Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: try: Request_Query = urllib.parse.quote(Query) Main_URL = f"http://{Domain}/search?term={Request_Query}&country={Location}&entity=software&limit={str(Limit)}" Response = Common.Request_Handler(Main_URL) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request, are you connected to the internet?" ) break JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Object.Dump_JSON(), Query, The_File_Extensions["Main"]) if 'resultCount' in JSON_Response: if JSON_Response['resultCount'] > 0: Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Application", Task_ID, Concat_Plugin_Name) for JSON_Object in JSON_Response['results']: JSON_Object_Responses = Common.Request_Handler( JSON_Object['artistViewUrl'], Filter=True, Host=f"https://{Domain}") JSON_Object_Response = JSON_Object_Responses[ "Filtered"] if JSON_Object[ 'artistViewUrl'] not in Cached_Data and JSON_Object[ 'artistViewUrl'] not in Data_to_Cache: Apple_Store_Regex = Common.Regex_Handler( JSON_Object['artistViewUrl'], Custom_Regex=r"https\:\/\/apps\.apple\.com\/" + rf"{Location}" + r"\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?.+") if Apple_Store_Regex: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, JSON_Object_Response, Apple_Store_Regex.group(1), The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], JSON_Object['artistViewUrl'], General.Get_Title( JSON_Object['artistViewUrl']), Concat_Plugin_Name) Data_to_Cache.append( JSON_Object['artistViewUrl']) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value provided, value not greater than 0." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Transaction_Search(Query_List, Task_ID, Type, Limit=10): try: Local_Plugin_Name = Plugin_Name + "-Transaction-Search" Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: if Type != "monero": if Type == "btc" or Type == "bch": Query_Regex = Common.Regex_Handler( Query, Custom_Regex=r"[\d\w]{64}") elif Type == "eth": Query_Regex = Common.Regex_Handler( Query, Custom_Regex=r"(0x[\d\w]{64})") else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid type provided." ) if Query_Regex: Main_URL = f"https://www.{Domain}/{Type}/tx/{Query}" Main_Response = Common.Request_Handler(Main_URL) if Type == "btc": Address_Regex = Common.Regex_Handler( Main_Response, Custom_Regex=r"\/btc\/address\/([\d\w]{26,34})", Findall=True) elif Type == "bch": Address_Regex = Common.Regex_Handler( Main_Response, Custom_Regex=r"([\d\w]{42})", Findall=True) elif Type == "eth": Address_Regex = Common.Regex_Handler( Main_Response, Custom_Regex=r"(0x[\w\d]{40})", Findall=True) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid type provided." ) if Address_Regex: Current_Step = 0 Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Blockchain Address", Task_ID, Plugin_Name.lower()) for Transaction in Address_Regex: Query_URL = f"https://www.{Domain}/{Type}/address/{Transaction}" if Query_URL not in Cached_Data and Query_URL not in Data_to_Cache and Current_Step < int( Limit): Transaction_Responses = Common.Request_Handler( Query_URL, Filter=True, Host=f"https://www.{Domain}") Transaction_Response = Transaction_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Transaction_Response, Transaction, The_File_Extension) if Output_file: Output_Connections.Output( [Output_file], Query_URL, General.Get_Title(Query_URL), Plugin_Name.lower()) Data_to_Cache.append(Query_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) else: Alt_Domain = "localmonero.co" Query_URL = f"https://{Alt_Domain}/blocks/search/{Query}" Transaction_Response = Common.Request_Handler(Query_URL) if "Whoops, looks like something went wrong." not in Transaction_Response and Query_URL not in Cached_Data and Query_URL not in Data_to_Cache: Transaction_Responses = Common.Request_Handler( Query_URL, Filter=True, Host=f"https://{Alt_Domain}") Transaction_Response = Transaction_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Transaction_Response, Query, The_File_Extension) if Output_file: Output_Connections = General.Connections( Query, Local_Plugin_Name, Alt_Domain, "Blockchain Transaction", Task_ID, Plugin_Name.lower()) Output_Connections.Output([Output_file], Query_URL, General.Get_Title(Query_URL, Requests=True), Plugin_Name.lower()) Data_to_Cache.append(Query_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")