def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: BSB_Search_URL = f"https://www.{self.Domain}/{Query}.html" Responses = Common.Request_Handler( BSB_Search_URL, Filter=True, Host=f"https://www.{self.Domain}") Response = Responses["Filtered"] Error_Regex = Common.Regex_Handler( Response, Custom_Regex=r"Correct\sthe\sfollowing\serrors") Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) if not Error_Regex: if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Response, Query, self.The_File_Extension) if Output_file: Output_Connections.Output( [Output_file], BSB_Search_URL, General.Get_Title(BSB_Search_URL), self.Plugin_Name.lower()) Data_to_Cache.append(BSB_Search_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Query returned error, probably does not exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) for Query in Query_List: if Common.Regex_Handler(Query, Type="IP"): API_Key = Load_Configuration() Search_Response = Common.Request_Handler( f"http://api.{Domain}/{Query}?access_key={API_Key}") JSON_Object = Common.JSON_Handler(Search_Response) JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Output_Connections = General.Connections( Query, Plugin_Name, Domain, "IP Address Information", Task_ID, Plugin_Name.lower()) if Query not in Cached_Data and Query not in Data_to_Cache: Result_URL = f"https://{Domain}/?{Query}" Title = f"IP Stack | {Query}" Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, JSON_Output_Response, Title, The_File_Extensions["Main"]) HTML_Output_File_Data = General.JSONDict_to_HTML( JSON_Response, JSON_Output_Response, f"IPStack Query {Query}") HTML_Output_File = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, HTML_Output_File_Data, Title.replace(" ", "-"), The_File_Extensions["Main_Converted"]) if Output_file: Output_Connections.Output([Output_file], Result_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: if Common.Regex_Handler(Query, Type="Email"): Link = f"https://{self.Domain}/home/verify-as-guest/{Query}" JSON_Response = Common.Request_Handler(Link) JSON_Object = Common.JSON_Handler(JSON_Response) if JSON_Object.Is_JSON(): JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Table_JSON = {} for Key, Value in JSON_Response.items(): if Key != "response": Table_JSON[Key] = Value else: for Det_Key, Det_Val in JSON_Response["response"].items(): Table_JSON[Det_Key] = Det_Val Filter_JSON = [Table_JSON] Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) if Query not in Cached_Data and Query not in Data_to_Cache: Title = f"Email Verification | {Query}" Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Concat_Plugin_Name, JSON_Output_Response, Title, self.The_File_Extensions["Main"]) HTML_Output_File_Data = General.JSONDict_to_HTML(Filter_JSON, JSON_Output_Response, f"Email Verification Query {Query}") HTML_Output_File = General.Create_Query_Results_Output_File(Directory, Query, self.Concat_Plugin_Name, HTML_Output_File_Data, Title, self.The_File_Extensions["Main_Converted"]) if Output_file and HTML_Output_File: Output_Connections.Output([Output_file, HTML_Output_File], Link, Title, self.Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def General_Pull(self, Handle, Directory, API): try: Data_to_Cache = [] JSON_Response = [] Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Latest_Tweets = API.user_timeline(screen_name=Handle, count=self.Limit) for Tweet in Latest_Tweets: try: JSON_Response.append({ 'id': Tweet.id, 'text': Tweet.text, 'author_name': Tweet.user.screen_name, 'url': Tweet.entities['urls'][0]["expanded_url"] }) except: JSON_Response.append({ 'id': Tweet.id, 'text': Tweet.text, 'author_name': Tweet.user.screen_name }) JSON_Output = Common.JSON_Handler(JSON_Response).Dump_JSON() Output_Connections = General.Connections(Handle, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) Main_File = General.Main_File_Create(Directory, self.Plugin_Name, JSON_Output, Handle, self.The_File_Extensions["Main"]) for JSON_Item in JSON_Response: if all(Item in JSON_Item for Item in ['id', 'url', 'text']): Link = JSON_Item['url'] if Link not in Cached_Data and Link not in Data_to_Cache: Title = "Twitter | " + JSON_Item['text'] Item_Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://{self.Domain}") Item_Response = Item_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File(Directory, Handle, self.Plugin_Name, Item_Response, str(JSON_Item['id']), self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Link, Title, self.Plugin_Name.lower()) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Output file not returned.") else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Insufficient parameters provided.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Location = self.Load_Configuration() Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: Main_URL = f"https://www.{self.Domain}/en-{Location}/search?q={Query}" Win_Store_Response = Common.Request_Handler(Main_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True) Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Win_Store_Response, Query, self.The_File_Extension) Win_Store_Regex = Common.Regex_Handler(Win_Store_Response, Custom_Regex=r"\/en\-au\/p\/([\w\-]+)\/([\w\d]+)", Findall=True) Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) if Win_Store_Regex: Current_Step = 0 for Regex_Group_1, Regex_Group_2 in Win_Store_Regex: Item_URL = f"https://www.microsoft.com/en-au/p/{Regex_Group_1}/{Regex_Group_2}" Win_Store_Responses = Common.Request_Handler(Item_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Win_Store_Response = Win_Store_Responses["Filtered"] Title = "Windows Store | " + General.Get_Title(Item_URL) if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(self.Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Win_Store_Response, Regex_Group_1, self.The_File_Extension) if Output_file: Output_Connections.Output([Main_File, Output_file], Item_URL, Title, self.Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Current_Step += 1 else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: # Query can be Title or ISBN Main_URL = f"http://{self.Domain}/search.php?req={Query}&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def" Lib_Gen_Response = Common.Request_Handler(Main_URL) Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Lib_Gen_Response, Query, self.The_File_Extension) Lib_Gen_Regex = Common.Regex_Handler(Lib_Gen_Response, Custom_Regex=r"book\/index\.php\?md5=[A-Fa-f0-9]{32}", Findall=True) if Lib_Gen_Regex: Current_Step = 0 for Regex in Lib_Gen_Regex: Item_URL = f"http://{self.Domain}/{Regex}" Title = General.Get_Title(Item_URL).replace("Genesis:", "Genesis |") Lib_Item_Responses = Common.Request_Handler(Item_URL, Filter=True, Host=f"http://{self.Domain}") Lib_Item_Response = Lib_Item_Responses["Filtered"] if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(self.Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Lib_Item_Response, Regex, self.The_File_Extension) if Output_file: Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) Output_Connections.Output([Main_File, Output_file], Item_URL, Title, self.Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Current_Step += 1 else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) for Query in Query_List: Headers_Custom = {"Referer": f"https://www.doingbusiness.org/en/data/exploreeconomies/{Query}"} Main_URL = f"https://wbgindicatorsqa.azure-api.net/DoingBusiness/api/GetEconomyByURL/{Query}" Doing_Business_Response = Common.Request_Handler(Main_URL, Optional_Headers=Headers_Custom) JSON_Object = Common.JSON_Handler(Doing_Business_Response) JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() if 'message' not in JSON_Response: Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Item_URL = f"https://www.{Domain}/en/data/exploreeconomies/{Query}" Title = f"Doing Business | {Query}" Current_Doing_Business_Responses = Common.Request_Handler(Item_URL, Filter=True, Host=f"https://www.{Domain}") Current_Doing_Business_Response = Current_Doing_Business_Responses["Filtered"] if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Current_Doing_Business_Response, Query, The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Economic Details", Task_ID, Concat_Plugin_Name) Output_Connections.Output([Main_File, Output_file], Item_URL, Title, Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: Response = Common.Request_Handler('https://tpbc.herokuapp.com/search/' + Query.replace(" ", "+") + '/?sort=seeds_desc') JSON_Object = Common.JSON_Handler(Response) Response = JSON_Object.To_JSON_Loads() JSON_Response = JSON_Object.Dump_JSON() Output_file = General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, The_File_Extension) if Output_file: Current_Step = 0 Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Torrent", Task_ID, Plugin_Name.lower()) for Search_Result in Response: Result_Title = Search_Result["title"] Result_URL = Search_Result["magnet"] if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, JSON_Response, Result_Title, The_File_Extension) if Output_file: Output_Connections.Output([Output_file], Result_URL, General.Get_Title(Result_URL), Plugin_Name.lower()) Data_to_Cache.append(Result_URL) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") Current_Step += 1 Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Shodan_API_Key = Load_Configuration() API_Session = PyHunter(Shodan_API_Key) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: try: if Type == "Domain": if Common.Regex_Handler(Query, Type="Domain"): Local_Plugin_Name = Plugin_Name + "-Domain" API_Response = API_Session.domain_search(Query) JSON_Object = Common.JSON_Handler(API_Response) JSON_Output_Response = JSON_Object.Dump_JSON() if API_Response["domain"] and API_Response['emails']: Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Account", Task_ID, Plugin_Name.lower()) Current_Step = 0 for Hunter_Item in API_Response["emails"]: Current_Email_Address = Hunter_Item["value"] Current_Hunter_Item_Host = f"https://{Domain}/verify/{Current_Email_Address}" Current_Hunter_Item_Responses = Common.Request_Handler( Current_Hunter_Item_Host, Filter=True, Host=f"https://{Domain}") Filtered_Response = Current_Hunter_Item_Responses[ "Filtered"] Title = "Hunter | " + Current_Email_Address if Current_Email_Address not in Cached_Data and Current_Email_Address not in Data_to_Cache and Current_Step < int( Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Filtered_Response, Current_Hunter_Item_Host, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Current_Hunter_Item_Host, Title, Plugin_Name.lower()) Data_to_Cache.append( Current_Email_Address) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 elif Type == "Email": if Common.Regex_Handler(Query, Type="Email"): Local_Plugin_Name = Plugin_Name + "-Email" API_Response = API_Session.email_verifier(Query) JSON_Object = Common.JSON_Handler(API_Response) JSON_Output_Response = JSON_Object.Dump_JSON() if API_Response["email"] and API_Response['sources']: Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Account Source", Task_ID, Plugin_Name.lower()) Current_Step = 0 for Hunter_Item in API_Response["sources"]: Current_Hunter_Item_Host = Hunter_Item["uri"] Current_Hunter_Item_Domain = Hunter_Item[ "domain"] if 'http://' in Current_Hunter_Item_Host: Current_Hunter_Item_Responses = Common.Request_Handler( Current_Hunter_Item_Host, Filter=True, Host= f"http://{Current_Hunter_Item_Domain}") Filtered_Response = Current_Hunter_Item_Responses[ "Filtered"] elif 'https://' in Current_Hunter_Item_Host: Current_Hunter_Item_Responses = Common.Request_Handler( Current_Hunter_Item_Host, Filter=True, Host= f"https://{Current_Hunter_Item_Domain}" ) Filtered_Response = Current_Hunter_Item_Responses[ "Filtered"] else: Filtered_Response = Common.Request_Handler( Current_Hunter_Item_Host) Title = "Hunter | " + Current_Hunter_Item_Host if Current_Hunter_Item_Host not in Cached_Data and Current_Hunter_Item_Host not in Data_to_Cache and Current_Step < int( Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Filtered_Response, Current_Hunter_Item_Host, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Current_Hunter_Item_Host, Title, Plugin_Name.lower()) Data_to_Cache.append( Current_Hunter_Item_Host) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to complete task - {str(e)}" ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: CRT_Regex = Common.Regex_Handler(Query, Type="Domain") if CRT_Regex: Request = f"https://{self.Domain}/?q={Query}" Responses = Common.Request_Handler( Request, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://{self.Domain}") Response = Responses["Regular"] Filtered_Response = Responses["Filtered"] if "<TD class=\"outer\"><I>None found</I></TD>" not in Response: if Request not in Cached_Data and Request not in Data_to_Cache: try: if CRT_Regex: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name.lower(), Filtered_Response, CRT_Regex.group(1), self.The_File_Extension) if Output_file: Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) Output_Connections.Output( [Output_file], Request, f"Subdomain Certificate Search for {Query}", self.Plugin_Name.lower()) Data_to_Cache.append(Request) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create file." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Query does not exist." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Transaction_Search(Query_List, Task_ID, Type, Limit=10): try: Local_Plugin_Name = Plugin_Name + "-Transaction-Search" Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: if Type != "monero": if Type == "btc" or Type == "bch": Query_Regex = Common.Regex_Handler( Query, Custom_Regex=r"[\d\w]{64}") elif Type == "eth": Query_Regex = Common.Regex_Handler( Query, Custom_Regex=r"(0x[\d\w]{64})") else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid type provided." ) if Query_Regex: Main_URL = f"https://www.{Domain}/{Type}/tx/{Query}" Main_Response = Common.Request_Handler(Main_URL) if Type == "btc": Address_Regex = Common.Regex_Handler( Main_Response, Custom_Regex=r"\/btc\/address\/([\d\w]{26,34})", Findall=True) elif Type == "bch": Address_Regex = Common.Regex_Handler( Main_Response, Custom_Regex=r"([\d\w]{42})", Findall=True) elif Type == "eth": Address_Regex = Common.Regex_Handler( Main_Response, Custom_Regex=r"(0x[\w\d]{40})", Findall=True) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid type provided." ) if Address_Regex: Current_Step = 0 Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Blockchain Address", Task_ID, Plugin_Name.lower()) for Transaction in Address_Regex: Query_URL = f"https://www.{Domain}/{Type}/address/{Transaction}" if Query_URL not in Cached_Data and Query_URL not in Data_to_Cache and Current_Step < int( Limit): Transaction_Responses = Common.Request_Handler( Query_URL, Filter=True, Host=f"https://www.{Domain}") Transaction_Response = Transaction_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Transaction_Response, Transaction, The_File_Extension) if Output_file: Output_Connections.Output( [Output_file], Query_URL, General.Get_Title(Query_URL), Plugin_Name.lower()) Data_to_Cache.append(Query_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) else: Alt_Domain = "localmonero.co" Query_URL = f"https://{Alt_Domain}/blocks/search/{Query}" Transaction_Response = Common.Request_Handler(Query_URL) if "Whoops, looks like something went wrong." not in Transaction_Response and Query_URL not in Cached_Data and Query_URL not in Data_to_Cache: Transaction_Responses = Common.Request_Handler( Query_URL, Filter=True, Host=f"https://{Alt_Domain}") Transaction_Response = Transaction_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Transaction_Response, Query, The_File_Extension) if Output_file: Output_Connections = General.Connections( Query, Local_Plugin_Name, Alt_Domain, "Blockchain Transaction", Task_ID, Plugin_Name.lower()) Output_Connections.Output([Output_file], Query_URL, General.Get_Title(Query_URL, Requests=True), Plugin_Name.lower()) Data_to_Cache.append(Query_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: if Type == "pin": Local_Plugin_Name = Plugin_Name + "-" + Type Request_URL = f"https://api.{Domain}/v1/pins/{Query}/?access_token=" + Load_Configuration( ) + "&fields=id%2Clink%2Cnote%2Curl%2Ccreated_at%2Cmedia%2Coriginal_link%2Cmetadata%2Ccounts%2Ccolor%2Cboard%2Cattribution" Search_Response = Common.Request_Handler(Request_URL) JSON_Object = Common.JSON_Handler(Search_Response) Search_Response = JSON_Object.To_JSON_Loads() if Search_Response.get( 'message' ) != "You have exceeded your rate limit. Try again later.": JSON_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create( Directory, Plugin_Name, JSON_Response, Query, The_File_Extensions["Main"]) Result_Title = "Pinterest | " + Search_Response["data"][ "metadata"]["link"]["title"] Result_URL = Search_Response["data"]["url"] Search_Result_Response = Common.Request_Handler(Result_URL) if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Search_Result_Response, Result_Title, The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Social Media - Media", Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output([Main_File, Output_file], Result_URL, Result_Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) elif Type == "board": Local_Plugin_Name = Plugin_Name + "-" + Type Request_URL = "https://api.pinterest.com/v1/boards/" + Query + "/pins/?access_token=" + Load_Configuration( ) + "&fields=id%2Clink%2Cnote%2Curl%2Coriginal_link%2Cmetadata%2Cmedia%2Cimage%2Ccreator%2Ccreated_at%2Ccounts%2Ccolor%2Cboard%2Cattribution&limit=" + str( Limit) + "" Search_Response = Common.Request_Handler(Request_URL) JSON_Object = Common.JSON_Handler(Search_Response) Search_Response = JSON_Object.To_JSON_Loads() if Search_Response.get( 'message' ) != "You have exceeded your rate limit. Try again later.": JSON_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create( Directory, Plugin_Name, JSON_Response, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, "pinterest.com", "Social Media - Page", Task_ID, Local_Plugin_Name.lower()) Current_Step = 0 for Response in Search_Response["data"]: Result_Title = "Pinterest | " + Response["note"] Result_URL = Response["url"] Search_Result_Response = Common.Request_Handler( Result_URL) if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int( Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Search_Result_Response, Result_Title, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Result_URL, Result_Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: vulners_api = vulners.Vulners(api_key=Load_Configuration()) Search_Response = vulners_api.search(Query, limit=int(self.Limit)) JSON_Response = Common.JSON_Handler( Search_Response).Dump_JSON() Main_File = General.Main_File_Create( Directory, self.Plugin_Name, JSON_Response, Query, self.The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) for Search_Result in Search_Response: if Search_Result[ "bulletinFamily"] not in self.Unacceptable_Bulletins: Result_Title = Search_Result["title"] Result_URL = Search_Result["vhref"] Search_Result_Responses = Common.Request_Handler( Result_URL, Filter=True, Host=f"https://{self.Domain}") Search_Result_Response = Search_Result_Responses[ "Filtered"] if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Search_Result_Response, Result_Title, self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Result_URL, Result_Title, self.Plugin_Name.lower()) Data_to_Cache.append(Result_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Skipping as bulletin type is not supported." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) for Query in Query_List: Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Vehicle Details", Task_ID, Concat_Plugin_Name) for State in States: Post_URL = f'https://{Domain}/bin/wesfarmers/search/vehiclerego' data = '''{"isRegoSearch":"YES","regoSearchCount":2,"regoMatchCount":1,"regoSearchFailureCount":0,"failPaymentAttempts":0,"pauseStep":"false","campaignBaseURL":"https://secure.colesinsurance.com.au/campaignimages/","sessionState":"OPEN","sessionStep":"0","policyHolders":[],"updateSessionURL":"http://dev.gtw.gp-mdl.auiag.corp:9000/sys/colessessionservice/motor/v1/update-session","insuranceType":"COMP","startDate":"03/07/2019","drivers":[{"driverRef":"MainDriver","yearsLicenced":{"vehRef":"veh1"}}],"priceBeatAttemptsRemaining":"2","currentInsurerOptions":[{"id":"AAMI","value":"AAMI","text":"AAMI"},{"id":"Allianz","value":"Allianz","text":"Allianz"},{"id":"Apia","value":"Apia","text":"Apia"},{"id":"Bingle","value":"Bingle","text":"Bingle"},{"id":"Broker","value":"Broker","text":"Broker"},{"id":"BudgDirect","value":"BudgDirect","text":"Budget Direct"},{"id":"Buzz","value":"Buzz","text":"Buzz"},{"id":"CGU","value":"CGU","text":"CGU"},{"id":"Coles","value":"Coles","text":"Coles"},{"id":"CommInsure","value":"CommInsure","text":"CommInsure"},{"id":"GIO","value":"GIO","text":"GIO"},{"id":"HBF","value":"HBF","text":"HBF"},{"id":"JustCar","value":"JustCar","text":"Just Car"},{"id":"NRMA","value":"NRMA","text":"NRMA"},{"id":"Progress","value":"Progress","text":"Progressive"},{"id":"QBE","value":"QBE","text":"QBE"},{"id":"RAA","value":"RAA","text":"RAA"},{"id":"RAC","value":"RAC","text":"RAC"},{"id":"RACQ","value":"RACQ","text":"RACQ"},{"id":"RACT","value":"RACT","text":"RACT"},{"id":"RACV","value":"RACV","text":"RACV"},{"id":"Real","value":"Real","text":"Real"},{"id":"SGIC","value":"SGIC","text":"SGIC"},{"id":"SGIO","value":"SGIO","text":"SGIO"},{"id":"Shannons","value":"Shannons","text":"Shannons"},{"id":"Suncorp","value":"Suncorp","text":"Suncorp"},{"id":"Youi","value":"Youi","text":"Youi"},{"id":"None","value":"None","text":"Car is not currently insured"},{"id":"Dontknow","value":"Dontknow","text":"Don't Know"},{"id":"Other","value":"Other","text":"Other"}],"coverLevelOptions":[{"id":"Gold","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"NRMA","code":"Gold","order":"1"},{"id":"Gold1","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"BudgDirect","code":"Gold","order":"1"},{"id":"Standard2","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIC","code":"Standard","order":"2"},{"id":"Gold6","value":"Comprehensive Advantages Car Insurance","text":"Comprehensive Advantages Car Insurance","flagname":"Suncorp","code":"Gold","order":"1"},{"id":"Standard","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"GIO","code":"Standard","order":"2"},{"id":"Standard0","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"NRMA","code":"Standard","order":"2"},{"id":"Gold4","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIC","code":"Gold","order":"1"},{"id":"Standard5","value":"Full Comprehensive Car Insurance","text":"Full Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Standard","order":"2"},{"id":"Gold5","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIO","code":"Gold","order":"1"},{"id":"Gold2","value":"Platinum Car Insurance","text":"Platinum Car Insurance","flagname":"GIO","code":"Gold","order":"1"},{"id":"Standard3","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIO","code":"Standard","order":"2"},{"id":"Gold3","value":"Complete Care Motor Insurance","text":"Complete Care Motor Insurance","flagname":"RACV","code":"Gold","order":"1"},{"id":"Standard4","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"Suncorp","code":"Standard","order":"2"},{"id":"Gold0","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Gold","order":"1"},{"id":"Standard1","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"RACV","code":"Standard","order":"2"}],"riskAddress":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"postcode":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"carRegistration":"''' + Query + '''","chooseValue":"","whatValueInsure":"Marketvalue","whatValueInsure_value":{"key":"Marketvalue","value":"Market Value"}}''' headers = { 'Content-Type': 'ext/plain;charset=UTF-8', 'Accept': '*/*', 'Accept-Encoding': 'gzip, deflate, br', 'Referer': f'https://{Domain}/motor/get-quote', 'Origin': f'https://{Domain}', 'Host': Domain } Registration_Response = Common.Request_Handler( Post_URL, Method="POST", Data=data, Optional_Headers=headers) JSON_Object = Common.JSON_Handler(Registration_Response) Registration_Response = JSON_Object.To_JSON_Loads() Indented_JSON_Response = JSON_Object.Dump_JSON() try: Title = "Vehicle Registration | " + Registration_Response[ 'vehicles'][0]['make'] + " " + Registration_Response[ 'vehicles'][0]['model'] Item_URL = Post_URL + "?" + Query if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Indented_JSON_Response, Title.replace(" ", "-"), The_File_Extensions["Main"]) HTML_Output_File_Data = General.JSONDict_to_HTML( Registration_Response["vehicles"], Indented_JSON_Response, f"Vehicle Registration Query {Query}") HTML_Output_File = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, HTML_Output_File_Data, Title.replace(" ", "-"), The_File_Extensions["Main_Converted"]) if Output_file and HTML_Output_File: Output_Connections.Output( [Output_file, HTML_Output_File], Item_URL, Title, Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except: logging.info( f"{Common.Date()} - {__name__.strip('plugins.')} - No result found for given query {Query} for state {State}." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: URL_Body = f'https://{self.Domain}' Main_URL = URL_Body + '/' + Query.lower().replace(' ', '-') Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://www.{self.Domain}") Response = Responses["Regular"] Filtered_Response = Responses["Filtered"] Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Filtered_Response, Query, self.The_File_Extension) Regex = Common.Regex_Handler( Response, Custom_Regex= r"\<tr\>\s+\<td\sclass\=\"name\"\>\s+\<a\shref\=\"([\/\d\w\-\+\?\.]+)\"\>([\/\d\w\-\+\?\.\(\)\s\,\;\:\~\`\!\@\#\$\%\^\&\*\[\]\{\}]+)\<\/a\>\s+\<\/td\>", Findall=True) if Regex: Current_Step = 0 Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) for URL, Title in Regex: Item_URL = URL_Body + URL Current_Response = Common.Request_Handler(Item_URL) Current_Item_Regex = Common.Regex_Handler( Current_Response, Custom_Regex= r"\<button\sclass\=\"btn\sbtn\-primary\spassword\"\s+data\-data\=\"([\-\d\w\?\/]+)\"\s+data\-toggle\=\"modal\"\s+data\-target\=\"\#modal\"\s+\>show\sme\!\<\/button\>" ) if Current_Item_Regex: try: Detailed_Item_URL = URL_Body + Current_Item_Regex.group( 1) Detailed_Responses = Common.Request_Handler( Item_URL, Filter=True, Host=f"https://www.{self.Domain}") Detailed_Response = Detailed_Responses[ "Regular"] Output_Dict = Common.JSON_Handler( Detailed_Response).Is_JSON() if JSON_Response: Output_Response = "<head><title>" + JSON_Response[ "title"] + "</title></head>\n" Output_Response = Output_Response + JSON_Response[ "data"] else: Output_Response = Detailed_Responses[ "Filtered"] if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int( self.Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Output_Response, Title, self.The_File_Extension) if Output_file: Output_Connections.Output( [Main_File, Output_file], Item_URL, General.Get_Title(Item_URL), self.Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) Current_Step += 1 except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to generate output, may have a blank detailed response." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression for current result." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression for provided query." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: URL_Components = Common.Regex_Handler(Query, Type="URL", Get_URL_Components=True) if URL_Components: BW_Info = builtwith(Query) if BW_Info: BW_JSON_Output = Common.JSON_Handler( BW_Info).Dump_JSON() Query_Domain = URL_Components["Body"] + URL_Components[ "Extension"] Title = f"Built With | {Query_Domain}" Main_File = General.Main_File_Create( Directory, self.Plugin_Name, BW_JSON_Output, Query_Domain, self.The_File_Extensions["Main"]) BW_Search_URL = f"https://{self.Domain}/{Query_Domain}" Responses = Common.Request_Handler( BW_Search_URL, Filter=True, Host=f"https://{self.Domain}") Response = Responses["Filtered"] Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) if BW_Search_URL not in Cached_Data and BW_Search_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Response, Query, self.The_File_Extensions['Query']) if Output_file: Output_Connections.Output( [Main_File, Output_file], BW_Search_URL, Title, self.Plugin_Name.lower()) Data_to_Cache.append(BW_Search_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to get result for provided query." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Yandex_Details = Load_Configuration() Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: Yandex_Response = Common.Request_Handler(f"https://{Domain}/search/xml?user={Yandex_Details[0]}&key={Yandex_Details[1]}&query={Query}&l10n=en&sortby=rlv&filter=none&maxpassages=five&groupby=attr% 3D% 22% 22.mode% 3Dflat.groups-on-page% 3D{str(Limit)}.docs-in-group% 3D1") JSON_Response = xmltodict.parse(Yandex_Response) JSON_Object = Common.JSON_Handler(JSON_Response) JSON_Output_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Search Result", Task_ID, Plugin_Name.lower()) New_JSON_Response = Recursive_Dict_Check(["yandexsearch", "response", "results", "grouping", "group"], JSON_Response) if New_JSON_Response: for Yandex_Item_Line in New_JSON_Response: try: if Recursive_Dict_Check(["doc", "url"], Yandex_Item_Line): Yandex_Item_Line = Yandex_Item_Line['doc'] Yandex_URL = Yandex_Item_Line['url'] Title = Recursive_Dict_Check(["title", "#text"], JSON_Response) if Title: Title = f"Yandex | {Title}" else: Title = General.Get_Title(Yandex_URL) Title = f"Yandex | {Title}" if Yandex_URL not in Cached_Data and Yandex_URL not in Data_to_Cache: Yandex_Item_Responses = Common.Request_Handler(Yandex_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://{Domain}") Yandex_Item_Response = Yandex_Item_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Yandex_Item_Response, Yandex_URL, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Yandex_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(Yandex_URL) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}") else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - No results found.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: if self.Type == "Email": if Common.Regex_Handler(Query, Type=self.Type): Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/email/report/?email={Query}" Response = Common.Request_Handler(URL) Search_Response = Common.Request_Handler(Request_URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses[ "Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Account", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output( [Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match query to email regular expression." ) elif self.Type == "Domain": if Common.Regex_Handler(Query, Type=self.Type): Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/self.Domain/report/?self.Domain={Query}" Response = Common.Request_Handler(URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses[ "Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Domain Information", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output( [Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match query to self.Domain regular expression." ) elif self.Type == "IP Address": if Common.Regex_Handler(Query, Type="IP"): Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/ip/report/?ip={Query}" Response = Common.Request_Handler(URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses[ "Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Domain Information", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output( [Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match query to IP address regular expression." ) elif self.Type == "AV": Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/antivirus/report/?antivirus={Query}" Response = Common.Request_Handler(URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses["Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Virus", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output([Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) elif self.Type == "Virus Report": Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/file/report/?resource={Query}" Response = Common.Request_Handler(URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses["Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Virus Report", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output([Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid Type provided." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Email_Rep_API_Key = Load_Configuration() for Query in Query_List: if Common.Regex_Handler(Query, Type="Email"): API = EmailRep(Email_Rep_API_Key) JSON_Output_Response = API.query(Query) Link = f"https://{Domain}/{Query}" JSON_Object = Common.JSON_Handler(JSON_Output_Response) JSON_Output_Response = JSON_Object.Dump_JSON() JSON_Response = JSON_Object.To_JSON_Loads() if JSON_Response["reputation"] != "none": Table_JSON = {} for Key, Value in JSON_Response.items(): if Key != "details": Table_JSON[Key] = Value else: for Det_Key, Det_Val in JSON_Response["details"].items(): Table_JSON[Det_Key] = Det_Val Filter_JSON = [Table_JSON] Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Email Information", Task_ID, Concat_Plugin_Name) if Query not in Cached_Data and Query not in Data_to_Cache: Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://{Domain}") Filtered_Response = Responses["Filtered"] Title = f"Email Reputation | {Query}" Main_File = General.Main_File_Create(Directory, Concat_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File(Directory, Query, Concat_Plugin_Name, Filtered_Response, Title, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Link, Title, Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: if Type == "User": from instagramy import InstagramUser Local_Plugin_Name = Plugin_Name + "-" + Type CSE_Response = InstagramUser(Query) JSON_Object = Common.JSON_Handler(vars(CSE_Response)) CSE_JSON_Output_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, CSE_JSON_Output_Response, Query, The_File_Extensions["Main"]) if not CSE_Response.is_private: Posts = CSE_Response.posts Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Social Media - Person", Task_ID, Local_Plugin_Name.lower()) Current_Step = 0 for Post in Posts: URL = Post['post_url'] Shortcode = URL.replace(f"https://www.{Domain}/p/", "").replace("/", "") Title = "IG | " + General.Get_Title(URL, Requests=True) if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int( Limit): Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}") Response = Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Response, Shortcode, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], URL, Title, Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.info( f"{Common.Date()} - {__name__.strip('plugins.')} - The provided user's profile is private and cannot be scraped." ) elif Type == "Tag": from instagramy import InstagramHashTag Local_Plugin_Name = Plugin_Name + "-" + Type CSE_Response = InstagramHashTag(Query) JSON_Object = Common.JSON_Handler(vars(CSE_Response)) CSE_JSON_Output_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, CSE_JSON_Output_Response, Query, The_File_Extensions["Main"]) Posts = vars( CSE_Response)['tag_data']['edge_hashtag_to_media']['edges'] Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Social Media - Person", Task_ID, Local_Plugin_Name.lower()) Current_Step = 0 for Post in Posts: Shortcode = Post['node']['shortcode'] URL = f"https://www.{Domain}/p/{Shortcode}/" Title = "IG | " + General.Get_Title(URL, Requests=True) if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int( Limit): Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}") Response = Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Response, Shortcode, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], URL, Title, Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 elif Type == "Post": from instagramy import InstagramPost Local_Plugin_Name = Plugin_Name + "-" + Type CSE_Response = InstagramPost(Query) JSON_Object = Common.JSON_Handler(vars(CSE_Response)) CSE_JSON_Output_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, CSE_JSON_Output_Response, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Social Media - Place", Task_ID, Local_Plugin_Name.lower()) URL = CSE_Response.url Shortcode = URL.replace(f"https://www.{Domain}/p/", "").replace("/", "") Title = "IG | " + General.Get_Title(URL, Requests=True) if URL not in Cached_Data and URL not in Data_to_Cache: Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}") Response = Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Response, Shortcode, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], URL, Title, Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid type provided." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: try: if self.Type == "UKBN": Authorization_Key = self.Load_Configuration() if Authorization_Key: Authorization_Key = "Basic " + Authorization_Key.decode('ascii') headers_auth = {"Authorization": Authorization_Key} Main_URL = f'https://api.{self.Domain}/company/{Query}' Response = Common.Request_Handler(Main_URL, Optional_Headers=headers_auth) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() Indented_JSON_Response = JSON_Object.Dump_JSON() try: Query = str(int(Query)) if Response and '{"errors":[{"error":"company-profile-not-found","self.Type":"ch:service"}]}' not in Response: if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Current_Company_Number = str(JSON_Response["company_number"]) Result_URL = f'https://beta.{self.Domain}/company/{Current_Company_Number}' Result_Responses = Common.Request_Handler(Result_URL, Filter=True, Host=f"https://beta.{self.Domain}") Result_Response = Result_Responses["Filtered"] UKCN = str(JSON_Response["company_name"]) Main_Output_File = General.Main_File_Create(Directory, self.Plugin_Name, Indented_JSON_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Result_Response, UKCN, self.The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name) Output_Connections.Output([Main_Output_File, Output_file], Result_URL, f"UK Business Number {Query}", self.Concat_Plugin_Name) Data_to_Cache.append(Main_URL) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") except: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for UKBN Search.") else: logging.info(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to retrieve API key.") elif self.Type == "UKCN": Authorization_Key = self.Load_Configuration() if Authorization_Key: Authorization_Key = "Basic " + Authorization_Key.decode('ascii') try: Main_URL = f'https://api.{self.Domain}/search/companies?q={Query}&items_per_page={self.Limit}' headers_auth = {"Authorization": Authorization_Key} Response = Common.Request_Handler(Main_URL, Optional_Headers=headers_auth) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() Indented_JSON_Response = JSON_Object.Dump_JSON() try: if JSON_Response['total_results'] > 0: Main_Output_File = General.Main_File_Create(Directory, self.Plugin_Name, Indented_JSON_Response, Query, self.The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name) for Item in JSON_Response['items']: UKBN_URL = Item['links']['self'] Full_UKBN_URL = f'https://beta.{self.Domain}{str(UKBN_URL)}' UKBN = UKBN_URL.strip("/company/") if Full_UKBN_URL not in Cached_Data and Full_UKBN_URL not in Data_to_Cache: UKCN = Item['title'] Current_Responses = Common.Request_Handler(Full_UKBN_URL, Filter=True, Host=f"https://beta.{self.Domain}") Current_Response = Current_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, str(Current_Response), UKCN, self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_Output_File, Output_file], Full_UKBN_URL, f"UK Business Number {UKBN} for Query {Query}", self.Concat_Plugin_Name) Data_to_Cache.append(Full_UKBN_URL) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") except: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Error during UKCN Search, perhaps the rate limit has been exceeded.") except: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for UKCN Search.") else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to retrieve API key.") else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid request self.Type.") except: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make request.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID, Type, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: if Type == "CBN": Main_API_URL = f'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B{Query}%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc' Response = Common.Request_Handler(Main_API_URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() Indented_JSON_Response = JSON_Object.Dump_JSON() Main_Output_File = General.Main_File_Create( Directory, Plugin_Name, Indented_JSON_Response, Query, The_File_Extensions["Main"]) try: if JSON_Response['count'] != 0: Query = str(int(Query)) Main_URL = f'https://{Domain}/search/results?search=%7B{Query}%7D&status=Active' Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://{Domain}") Response = Responses["Filtered"] if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, General.Get_Title(Main_URL), The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, Domain.strip("beta."), "Company Details", Task_ID, Plugin_Name) Output_Connections.Output( [Main_Output_File, Output_file], Main_URL, f"Canadian Business Number {Query}", Concat_Plugin_Name) Data_to_Cache.append(Main_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CBN Search." ) elif Type == "CCN": Total_Results = 0 Iterator = "page=0" while Limit > Total_Results and Iterator is not None: Main_URL = 'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B' + urllib.parse.quote( Query ) + f'%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc&{Iterator}' Response = Common.Request_Handler(Main_URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() Total_Results += len(JSON_Response["docs"]) if "paging" in JSON_Response and "next" in JSON_Response.get( "paging"): Iterator = JSON_Response["paging"]["next"] else: Iterator = None Indented_JSON_Response = JSON_Object.Dump_JSON() Limit = General.Get_Limit(Limit) try: Main_File = General.Main_File_Create( Directory, Plugin_Name, Indented_JSON_Response, Query, The_File_Extensions["Main"]) Current_Step = 0 Output_Connections = General.Connections( Query, Plugin_Name, Domain.strip("beta."), "Company Details", Task_ID, Plugin_Name) for JSON_Item in JSON_Response['docs']: if JSON_Item.get('BN'): CCN = JSON_Item['Company_Name'] CBN = str(int(JSON_Item['BN'])) Full_CCN_URL = f'https://{Domain}/search/results?search=%7B{CBN}%7D&status=Active' if Full_CCN_URL not in Cached_Data and Full_CCN_URL not in Data_to_Cache and Current_Step < int( Limit): Current_Responses = Common.Request_Handler( Full_CCN_URL, Filter=True, Host=f"https://{Domain}") Current_Response = Current_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, str(Current_Response), CCN.replace(' ', '-'), The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Full_CCN_URL, f"Canadian Business Number {CBN} for Query {Query}", Concat_Plugin_Name) Data_to_Cache.append(Full_CCN_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Unable to retrieve business numbers from the JSON response." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CCN Search." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid request type." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def All_Extensions(self): try: Local_Plugin_Name = self.Plugin_Name + "-All-Extensions" Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) self.Cached_Data = Cached_Data_Object.Get_Cache() logging.info(f"{Common.Date()} {__name__.strip('plugins.')} - All Extensions Selected.") self.Query_List = General.Convert_to_List(self.Query_List) for Query in self.Query_List: URL_Components = Common.Regex_Handler(Query, Type="URL", Get_URL_Components=True) if URL_Components: self.URL_Prefix = URL_Components["Prefix"] self.URL_Body = URL_Components["Body"] self.URL_Extension = URL_Components["Extension"] else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query, please provide a valid URL.") Pool = mpool.ThreadPool(int(multiprocessing.cpu_count()) * int(multiprocessing.cpu_count())) Pool_Threads = [] for Extension in self.Generic_Extensions: for suffix in self.Global_Domain_Suffixes: suffix = suffix.replace(".com", "") suffix = suffix.replace(".co", "") if not self.URL_Extension == suffix: Thread = Pool.apply_async(self.Query_URL, args=(self.URL_Body, Extension + suffix,)) Pool_Threads.append(Thread) [Pool_Thread.wait() for Pool_Thread in Pool_Threads] URL_Domain = self.URL_Body + self.URL_Extension Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, "\n".join(self.Valid_Results), self.URL_Body, self.The_File_Extensions["Main"]) if Main_File: for Host in self.Valid_Hosts: Current_Domain = Host[0].strip('https://').strip('http://') try: Current_Responses = Common.Request_Handler(Host[0], Filter=True, Host=Host[0], Risky_Plugin=True) Current_Response = Current_Responses["Filtered"] Output_File = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Current_Response, Current_Domain, self.The_File_Extensions["Query"]) if Output_File: Output_File_List = [Main_File, Output_File] Output_Connections = General.Connections(Query, Local_Plugin_Name, Current_Domain, "Domain Spoof", self.Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output(Output_File_List, Host[0], f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}", Directory_Plugin_Name=self.Concat_Plugin_Name) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") except requests.exceptions.ConnectionError: Output_File_List = [Main_File] Output_Connections = General.Connections(Query, Local_Plugin_Name, Current_Domain, "Domain Spoof", self.Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output(Output_File_List, Host[0], f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}", Directory_Plugin_Name=self.Concat_Plugin_Name) Cached_Data_Object.Write_Cache(self.Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: try: Pull_URL = f"https://{Domain}:2096/api/phishing?_where=(url,like,~{Query}~)&_sort=-id&_size={Limit}" JSON_Object = Common.JSON_Handler( Common.Request_Handler(Pull_URL)) Results = JSON_Object.To_JSON_Loads() Indented_Results = JSON_Object.Dump_JSON() Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Phishing", Task_ID, Plugin_Name.lower()) Main_File = General.Main_File_Create( Directory, Plugin_Name, Indented_Results, Query, The_File_Extensions["Main"]) for Result in Results: Current_Link = Result["url"] Current_Domain = Current_Link.strip("https://") Current_Domain = Current_Domain.strip("http://") Current_Domain = Current_Domain.strip("www.") Current_Title = Result["title"] try: Current_Result = Common.Request_Handler( Current_Link, Filter=True, Risky_Plugin=True, Host=Current_Link) Current_Result_Filtered = Current_Result["Filtered"] Response_Regex = Common.Regex_Handler( Current_Result, Custom_Regex=r"\<title\>([^\<\>]+)\<\/title\>") Output_file_Query = Query.replace(" ", "-") if Current_Link not in Cached_Data and Current_Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Output_file_Query, Plugin_Name, Current_Result_Filtered, Current_Domain, The_File_Extensions["Query"]) if Output_file: if Response_Regex: Current_Title = Response_Regex.group(1) Current_Title = Current_Title.strip() Output_Connections.Output( [Main_File, Output_file], Current_Link, Current_Title, Plugin_Name.lower()) else: if not "Phishstats" in Current_Title: Output_Connections.Output( [Main_File, Output_file], Current_Link, Current_Title, Plugin_Name.lower()) else: Output_Connections.Output( [Main_File, Output_file], Current_Link, General.Get_Title(Current_Link), Plugin_Name.lower()) Data_to_Cache.append(Current_Link) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request for result, link may no longer be available." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Shodan_API_Key = self.Load_Configuration() API_Session = Shodan(Shodan_API_Key) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: try: if self.Type == "Search": Local_Plugin_Name = self.Plugin_Name + "-Search" try: API_Response = API_Session.search(Query) except Exception as e: logging.error( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}." ) break JSON_Output_Response = Common.JSON_Handler( API_Response).Dump_JSON() Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) Current_Step = 0 for Shodan_Item in API_Response["matches"]: Shodan_Item_Module = Shodan_Item['_shodan'][ 'module'] Shodan_Item_Module = Shodan_Item_Module.replace( '-simple-new', '') if Shodan_Item_Module.startswith("http"): Shodan_Item_Host = "" Shodan_Item_Port = 0 if 'http' in Shodan_Item: Shodan_Item_Host = Shodan_Item['http'][ 'host'] Shodan_Item_Response = Shodan_Item['http'][ 'html'] elif 'ip_str' in Shodan_Item and 'domains' in Shodan_Item and len( Shodan_Item['domains']) > 0: Shodan_Item_Host = Shodan_Item['domains'][ 0] Shodan_Item_Response = Shodan_Item['data'] elif 'ip_str' in Shodan_Item and 'domains' not in Shodan_Item: Shodan_Item_Host = Shodan_Item['ip_str'] Shodan_Item_Response = Shodan_Item['data'] if Shodan_Item_Host: if 'port' in Shodan_Item_Host: if int(Shodan_Item['port']) not in [ 80, 443 ]: Shodan_Item_Port = Shodan_Item[ 'port'] if Shodan_Item_Port != 0: Shodan_Item_URL = f"{Shodan_Item_Module}://{Shodan_Item_Host}:{str(Shodan_Item_Port)}" else: Shodan_Item_URL = f"{Shodan_Item_Module}://{Shodan_Item_Host}" Title = f"Shodan | {str(Shodan_Item_Host)}" if Shodan_Item_URL not in Cached_Data and Shodan_Item_URL not in Data_to_Cache and Current_Step < int( self.Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Shodan_Item_Response, Shodan_Item_Host, self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Shodan_Item_URL, Title, self.Plugin_Name.lower()) Data_to_Cache.append( Shodan_Item_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) Current_Step += 1 elif self.Type == "Host": Local_Plugin_Name = self.Plugin_Name + "-Host" try: API_Response = API_Session.host(Query) except Exception as e: logging.error( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}." ) break JSON_Output_Response = Common.JSON_Handler( API_Response).Dump_JSON() Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) Shodan_URL = f"https://www.{self.Domain}/host/{Query}" Title = f"Shodan | {Query}" if Shodan_URL not in Cached_Data and Shodan_URL not in Data_to_Cache: Shodan_Responses = Common.Request_Handler( Shodan_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Shodan_Response = Shodan_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Shodan_Response, Query, self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Shodan_URL, Title, self.Plugin_Name.lower()) Data_to_Cache.append(Shodan_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - No results found." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to complete task." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Character_Switch(self, Alphabets, Comprehensive_Search=False): try: Local_Plugin_Name = self.Plugin_Name + "-Character-Switch" Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) self.Cached_Data = Cached_Data_Object.Get_Cache() logging.info(f"{Common.Date()} {__name__.strip('plugins.')} - Character Switching Selected.") self.Query_List = General.Convert_to_List(self.Query_List) for Query in self.Query_List: URL_Components = Common.Regex_Handler(Query, Type="URL", Get_URL_Components=True) if URL_Components: self.URL_Prefix = URL_Components["Prefix"] self.URL_Body = URL_Components["Body"] self.URL_Extension = URL_Components["Extension"] else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query, please provide a valid URL.") logging.info(f'{Common.Date()} - Provided domain body - {self.URL_Body}') URL_List = list(self.URL_Body.lower()) Local_Plugin_Name = f"{Local_Plugin_Name}-{Alphabets}" Non_Comprehensive_Latin_Limit = 15 Other_Limit = 10 if Alphabets == "Latin": if not Comprehensive_Search: if len(self.URL_Body) > Non_Comprehensive_Latin_Limit: logging.error(f"{Common.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than {str(Non_Comprehensive_Latin_Limit)} characters in length. Condensed punycode domain fuzzing only allows a maximum of {str(Non_Comprehensive_Latin_Limit)} characters.") return None else: Altered_URLs = Rotor.Search(URL_List, Latin=True, Latin_Alternatives=True) else: if len(self.URL_Body) > Other_Limit: logging.error(f"{Common.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than {str(Other_Limit)} characters in length. Comprehensive punycode domain fuzzing searching only allows a maximum of {str(Other_Limit)} characters.") return None else: Altered_URLs = Rotor.Search(URL_List, Latin=True, Latin_Alternatives=True, Comprehensive=True) elif Alphabets == "Asian": if len(self.URL_Body) > Other_Limit: logging.error(f"{Common.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than {str(Other_Limit)} characters in length. Punycode domain fuzzing for Asian alphabets only allows a maximum of {str(Other_Limit)} characters.") return None else: Altered_URLs = Rotor.Search(URL_List, Asian=True) elif Alphabets == "Middle Eastern": if len(self.URL_Body) > Other_Limit: logging.error(f"{Common.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than {str(Other_Limit)} characters in length. Punycode domain fuzzing for Middle Eastern alphabets only allows a maximum of {str(Other_Limit)} characters.") return None else: Altered_URLs = Rotor.Search(URL_List, Middle_Eastern=True) elif Alphabets == "Native American": if len(self.URL_Body) > Other_Limit: logging.error(f"{Common.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than {str(Other_Limit)} characters in length. Punycode domain fuzzing for Asian alphabets only allows a maximum of {str(Other_Limit)} characters.") return None else: Altered_URLs = Rotor.Search(URL_List, Native_American=True) elif Alphabets == "North African": if len(self.URL_Body) > Other_Limit: logging.error(f"{Common.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than {str(Other_Limit)} characters in length. Punycode domain fuzzing for Middle Eastern alphabets only allows a maximum of {str(Other_Limit)} characters.") return None else: Altered_URLs = Rotor.Search(URL_List, North_African=True) logging.info(f'{Common.Date()} - Generated domain combinations - {", ".join(Altered_URLs)}') Pool = mpool.ThreadPool(int(multiprocessing.cpu_count())*int(multiprocessing.cpu_count())) Pool_Threads = [] for Altered_URL in Altered_URLs: if not Altered_URL == self.URL_Body: Thread = Pool.apply_async(self.Query_URL, args=(Altered_URL, self.URL_Extension,)) Pool_Threads.append(Thread) [Pool_Thread.wait() for Pool_Thread in Pool_Threads] logging.info(f'{Common.Date()} {Directory}') URL_Domain = self.URL_Body + self.URL_Extension Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, "\n".join(self.Valid_Results), self.URL_Body, self.The_File_Extensions["Main"]) Main_File_JSON_Data = General.CSV_to_JSON(Query, self.Valid_Results) Main_File_HTML_Data = General.CSV_to_HTML(self.Valid_Results, f"Domain Spoof Results for Query {Query}") Main_File_JSON = General.Main_File_Create(Directory, Local_Plugin_Name, Main_File_JSON_Data, self.URL_Body, self.The_File_Extensions["Main_Alternative"]) Main_File_HTML = General.Main_File_Create(Directory, Local_Plugin_Name, Main_File_HTML_Data, self.URL_Body, self.The_File_Extensions["Query"]) if Main_File and Main_File_HTML and Main_File_JSON: for Host in self.Valid_Hosts: Current_Domain = Host[0].strip('https://').strip('http://') try: Current_Responses = Common.Request_Handler(Host[0], Filter=True, Host=Host[0], Risky_Plugin=True) Current_Response = Current_Responses["Filtered"] Output_File = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Current_Response, Current_Domain, self.The_File_Extensions["Query"]) if Output_File: Output_File_List = [Main_File, Main_File_HTML, Main_File_JSON, Output_File] Output_Connections = General.Connections(Query, Local_Plugin_Name, Current_Domain, "Domain Spoof", self.Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output(Output_File_List, Host[0], f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}", Directory_Plugin_Name=self.Concat_Plugin_Name) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") except requests.exceptions.ConnectionError: Output_File_List = [Main_File, Main_File_HTML, Main_File_JSON] Output_Connections = General.Connections(Query, Local_Plugin_Name, Current_Domain, "Domain Spoof", self.Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output(Output_File_List, Host[0], f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}", Directory_Plugin_Name=self.Concat_Plugin_Name) Cached_Data_Object.Write_Cache(self.Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) IX_Access_Token = Load_Configuration() Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: Data = {"term": Query, "buckets": [], "lookuplevel": 0, "maxresults": Limit, "timeout": 0, "datefrom": "", "dateto": "", "sort": 2, "media": 0, "terminate": []} IX_Response = Common.Request_Handler(f"https://2.{Domain}/intelligent/search?k={IX_Access_Token}", Method="POST", JSON_Data=Data) JSON_Object = Common.JSON_Handler(IX_Response) JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Main_File_1 = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query + "-Request-1", The_File_Extensions["Main"]) if "id" in JSON_Response: Search_ID = JSON_Response["id"] IX_Response = Common.Request_Handler(f"https://2.{Domain}/intelligent/search/result?k={IX_Access_Token}&id={Search_ID}") JSON_Object = Common.JSON_Handler(IX_Response) JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Main_File_2 = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query + "-Request-2", The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Data Leakage", Task_ID, Plugin_Name.lower()) if "records" in JSON_Response: for IX_Item in JSON_Response["records"]: if "systemid" in IX_Item and "name" in IX_Item: IX_URL = f"https://{Domain}/?did=" + IX_Item['systemid'] if IX_Item["name"] != "": Title = f"IntelligenceX Data Leak | " + IX_Item["name"] else: TItle = "IntelligenceX Data Leak | Untitled Document" if IX_URL not in Cached_Data and IX_URL not in Data_to_Cache: IX_Item_Responses = Common.Request_Handler(IX_URL, Filter=True, Host=f"https://{Domain}") IX_Item_Response = IX_Item_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, IX_Item_Response, IX_URL, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File_1, Main_File_2, Output_file], IX_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(IX_URL) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - No results found.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) try: pyhibp.set_api_key(key=Load_Configuration()) except: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to set API key, make sure it is set in the configuration file.") if self.Type == "email": Local_Plugin_Name = self.Plugin_Name + " " + self.Type Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: Query_Response = pyhibp.get_pastes(email_address=Query) logging.info(Query_Response) if Query_Response: Current_Domain = Query_Response[0]["Source"] ID = Query_Response[0]["Id"] Link = f"https://www.{Current_Domain}.com/{ID}" JSON_Query_Response = Common.JSON_Handler(Query_Response).Dump_JSON() if Link not in Cached_Data and Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, JSON_Query_Response, "email", self.The_File_Extension) if Output_file: Output_Connections = General.Connections(Query, Local_Plugin_Name, self.Domain, self.Result_Type_1, self.Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output([Output_file], Link, General.Get_Title(Link), self.Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Cached_Data_Object.Write_Cache(Data_to_Cache) elif self.Type == "breach": Local_Plugin_Name = self.Plugin_Name + " " + self.Type Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: Query_Response = pyhibp.get_single_breach(breach_name=Query) if Query_Response: Current_Domain = Query_Response["Domain"] Link = f"https://www.{Current_Domain}.com/" JSON_Query_Response = Common.JSON_Handler(Query_Response).Dump_JSON() if Link not in Cached_Data and Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, JSON_Query_Response, "breach", self.The_File_Extension) if Output_file: Output_Connections = General.Connections(Query, Local_Plugin_Name, self.Domain, self.Result_Type_2, self.Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output([Output_file], Link, General.Get_Title(Link), self.Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Cached_Data_Object.Write_Cache(Data_to_Cache) elif self.Type == "password": Local_Plugin_Name = self.Plugin_Name + " " + self.Type Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: Query_Response = pw.is_password_breached(password=Query) logging.info(Query_Response) if Query_Response: Link = f"https://{self.Domain}/Passwords?{Query}" if Link not in Cached_Data and Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, str(Query_Response), "password", ".txt") if Output_file: Output_Connections = General.Connections(Query, Local_Plugin_Name, self.Domain, self.Result_Type_2, self.Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output([Output_file], Link, General.Get_Title(Link), self.Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Cached_Data_Object.Write_Cache(Data_to_Cache) elif self.Type == "account": Local_Plugin_Name = self.Plugin_Name + " " + self.Type Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: Query_Response = pyhibp.get_account_breaches(account=Query, truncate_response=True) if Query_Response: Current_Step = 0 for Response in Query_Response: Current_Response = pyhibp.get_single_breach(breach_name=Response['Name']) JSON_Query_Response = Common.JSON_Handler(Query_Response).Dump_JSON() Link = "https://" + Current_Response['self.Domain'] if Current_Response['self.Domain'] not in Cached_Data and Current_Response['self.Domain'] not in Data_to_Cache and Current_Step < int(self.Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, JSON_Query_Response, "account", self.The_File_Extension) if Output_file: Output_Connections = General.Connections(Query, Local_Plugin_Name, Current_Response['self.Domain'], self.Result_Type_1, self.Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output([Output_file], Link, General.Get_Title(Link), self.Concat_Plugin_Name) Data_to_Cache.append(Current_Response['self.Domain']) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Current_Step += 1 Cached_Data_Object.Write_Cache(Data_to_Cache) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid Type provided.") except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Location = Connectors.Load_Location_Configuration() Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: try: Request_Query = urllib.parse.quote(Query) Main_URL = f"http://{Domain}/search?term={Request_Query}&country={Location}&entity=software&limit={str(Limit)}" Response = Common.Request_Handler(Main_URL) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request, are you connected to the internet?" ) break JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Object.Dump_JSON(), Query, The_File_Extensions["Main"]) if 'resultCount' in JSON_Response: if JSON_Response['resultCount'] > 0: Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Application", Task_ID, Concat_Plugin_Name) for JSON_Object in JSON_Response['results']: JSON_Object_Responses = Common.Request_Handler( JSON_Object['artistViewUrl'], Filter=True, Host=f"https://{Domain}") JSON_Object_Response = JSON_Object_Responses[ "Filtered"] if JSON_Object[ 'artistViewUrl'] not in Cached_Data and JSON_Object[ 'artistViewUrl'] not in Data_to_Cache: Apple_Store_Regex = Common.Regex_Handler( JSON_Object['artistViewUrl'], Custom_Regex=r"https\:\/\/apps\.apple\.com\/" + rf"{Location}" + r"\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?.+") if Apple_Store_Regex: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, JSON_Object_Response, Apple_Store_Regex.group(1), The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], JSON_Object['artistViewUrl'], General.Get_Title( JSON_Object['artistViewUrl']), Concat_Plugin_Name) Data_to_Cache.append( JSON_Object['artistViewUrl']) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value provided, value not greater than 0." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: try: if self.Type == "CIK": Main_URL = f'https://www.{self.Domain}/cgi-bin/browse-edgar?action=getcompany&CIK={Query}&owner=exclude&count=40&hidefilings=0' Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://www.{self.Domain}") Response = Responses["Regular"] try: if 'No matching CIK.' not in Response: Query = str(int(Query)) Response = Responses["Filtered"] if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Response, f"edgar-american-business-search-{Query.lower()}", self.The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name) Output_Connections.Output( [Output_file], Main_URL, f"American Business Number (EDGAR) {Query}", self.Concat_Plugin_Name) Data_to_Cache.append(Main_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for CIK Search." ) elif self.Type == "ACN": Main_URL = f'https://www.{self.Domain}/cgi-bin/browse-edgar?company={Query}&owner=exclude&action=getcompany' Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://www.{self.Domain}") Response = Responses["Regular"] Filtered_Response = Responses["Filtered"] try: ACN = Common.Regex_Handler(Query, Type="Company_Name") if ACN: Main_File = General.Main_File_Create( Directory, self.Plugin_Name, Filtered_Response, Query, self.The_File_Extensions["Main"]) Current_Step = 0 CIKs_Regex = Common.Regex_Handler( Response, Custom_Regex= r"(\d{10})\<\/a\>\<\/td\>\s+\<td\sscope\=\"row\"\>(.*\S.*)\<\/td\>", Findall=True) if CIKs_Regex: Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name) for CIK_URL, ACN in CIKs_Regex: Full_CIK_URL = f'https://www.{self.Domain}/cgi-bin/browse-edgar?action=getcompany&CIK={CIK_URL}&owner=exclude&count=40&hidefilings=0' if Full_CIK_URL not in Cached_Data and Full_CIK_URL not in Data_to_Cache and Current_Step < int( self.Limit): Current_Responses = Common.Request_Handler( Full_CIK_URL, Filter=True, Host= f"https://www.{self.Domain}") Current_Response = Current_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, str(Current_Response), ACN.replace(' ', '-'), self. The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Full_CIK_URL, f"American Business Number (EDGAR) {CIK_URL} for Query {Query}", self.Concat_Plugin_Name) Data_to_Cache.append( Full_CIK_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Response did not match regular expression." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Query did not match regular expression." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for ACN Search." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid request type." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")