def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: try: if self.Type == "NZBN": Main_URL = f'https://{self.Domain}/companies/app/ui/pages/companies/search?q={Query}&entityTypes=ALL&entityStatusGroups=ALL&incorpFrom=&incorpTo=&addressTypes=ALL&addressKeyword=&start=0&limit=1&sf=&sd=&advancedPanel=true&mode=advanced#results' Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://{self.Domain}") Response = Responses["Filtered"] try: if 'An error has occurred and the requested action cannot be performed.' not in Response: Query = str(int(Query)) if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Response, f"new-zealand-business-number-{Query.lower()}", self.The_File_Extension) if Output_file: Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name) Output_Connections.Output( [Output_file], Main_URL, f"New Zealand Business Number {Query}", self.Concat_Plugin_Name) Data_to_Cache.append(Main_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for NZBN Search." ) elif self.Type == "NZCN": try: URL_Query = urllib.parse.quote(Query) Main_URL = f'https://{self.Domain}/companies/app/ui/pages/companies/search?q={URL_Query}&entityTypes=ALL&entityStatusGroups=ALL&incorpFrom=&incorpTo=&addressTypes=ALL&addressKeyword=&start=0&limit={str(self.Limit)}&sf=&sd=&advancedPanel=true&mode=advanced#results' Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://{self.Domain}") Response = Responses["Filtered"] NZCN_Regex = Common.Regex_Handler( Query, Type="Company_Name") if NZCN_Regex: Main_File = General.Main_File_Create( Directory, self.Plugin_Name, Response, Query, self.The_File_Extension) NZBNs_Regex = Common.Regex_Handler( Response, Custom_Regex= r"\<span\sclass\=\"entityName\"\>([\w\d\s\-\_\&\|\!\@\#\$\%\^\*\(\)\.\,]+)\<\/span\>\s<span\sclass\=\"entityInfo\"\>\((\d+)\)\s\(NZBN\:\s(\d+)\)", Findall=True) if NZBNs_Regex: Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name) for NZCN, NZ_ID, NZBN_URL in NZBNs_Regex: Full_NZBN_URL = f'https://{self.Domain}/companies/app/ui/pages/companies/{NZ_ID}?backurl=H4sIAAAAAAAAAEXLuwrCQBCF4bfZNtHESIpBbLQwhWBeYNgddSF7cWai5O2NGLH7zwenyHgjKWwKGaOfSwjZ3ncPaOt1W9bbsmqaamMoqtepnzIJ7Ltu2RdFHeXIacxf9tEmzgdOAZbuExh0jknk%2F17gRNMrsQMjiqxQmsEHr7Aycp3NfY5PjJbcGSMNoDySCckR%2FPwNLgXMiL4AAAA%3D' if Full_NZBN_URL not in Cached_Data and Full_NZBN_URL not in Data_to_Cache: Current_Response = Common.Request_Handler( Full_NZBN_URL) Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, str(Current_Response), NZCN.replace(' ', '-'), self.The_File_Extension) if Output_file: Output_Connections.Output( [Main_File, Output_file], Full_NZBN_URL, f"New Zealand Business Number {NZ_ID} for Query {Query}", self.Concat_Plugin_Name) Data_to_Cache.append( Full_NZBN_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Response did not match regular expression." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Query did not match regular expression." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for NZCN Search." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid request type." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) try: File_Dir = os.path.dirname(os.path.realpath('__file__')) Configuration_File = os.path.join( File_Dir, 'plugins/common/configuration/RSS_Feeds.txt') Current_File = open( Configuration_File, "r") # Open the provided file and retrieve each client to test. URLs = Current_File.read().splitlines() Current_File.close() except: logging.warning( str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + " Please provide a valid file, failed to open the file which contains the data to search for." ) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: for URL in URLs: # URLs to be controlled by the web app. RSS = feedparser.parse(URL) Current_Step = 0 for Feed in RSS.entries: if Query in Feed.description: Dump_Types = General.Data_Type_Discovery(Feed.description) File_Link = Feed.link.replace("https://", "") File_Link = File_Link.replace("http://", "") File_Link = File_Link.replace("www.", "") File_Link = File_Link.replace("/", "-") Domain = URL.replace("https://", "") Domain = Domain.replace("http://", "") Domain = Domain.replace("www.", "") if Feed.link not in Cached_Data and Feed.link not in Data_to_Cache and Current_Step < int( Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Feed.description, File_Link, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Feed.link, Domain, "Data Leakage", Task_ID, General.Get_Title(Feed.link), Plugin_Name.lower(), Dump_Types=Dump_Types) Data_to_Cache.append(Feed.link) Current_Step += 1 if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 else: Limit = 10 Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Location = General.Load_Location_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: Response = requests.get("http://itunes.apple.com/search?term=" + Query + "&country=" + Location + "&entity=software&limit=" + str(Limit)).text except: logging.warning( General.Date() + " - " + __name__.strip('plugins.') + " - Failed to make request, are you connected to the internet?" ) JSON_Response = json.loads(Response) General.Main_File_Create( Directory, "iTunes", json.dumps(Response, indent=4, sort_keys=True), Query, ".json") if 'resultCount' in JSON_Response: if not JSON_Response['resultCount'] == 0: if JSON_Response['resultCount'] > 0: Output_Connections = General.Connections( Query, Plugin_Name, "instagram.com", "Data Leakage", Task_ID, Concat_Plugin_Name) for JSON_Object in JSON_Response['results']: JSON_Object_Response = requests.get( JSON_Object['artistViewUrl']).text if JSON_Object[ 'artistViewUrl'] not in Cached_Data and JSON_Object[ 'artistViewUrl'] not in Data_to_Cache: iTunes_Regex = re.search( "https\:\/\/itunes\.apple\.com\/" + Location + "\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?mt\=\d\&uo\=\d", JSON_Object['artistViewUrl']) if iTunes_Regex: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, JSON_Object_Response, iTunes_Regex.group(1), The_File_Extension) if Output_file: Output_Connections.Output( Output_file, JSON_Object['artistViewUrl'], General.Get_Title( JSON_Object['artistViewUrl'])) Data_to_Cache.append(JSON_Object['artistViewUrl']) else: logging.warning( General.Date() + " - " + __name__.strip('plugins.') + " - Invalid value provided, value less than 0.") else: logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Invalid value provided, value equal to 0.") else: logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Invalid value.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Yandex_Details = Load_Configuration() Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: Yandex_Response = Common.Request_Handler(f"https://{Domain}/search/xml?user={Yandex_Details[0]}&key={Yandex_Details[1]}&query={Query}&l10n=en&sortby=rlv&filter=none&maxpassages=five&groupby=attr% 3D% 22% 22.mode% 3Dflat.groups-on-page% 3D{str(Limit)}.docs-in-group% 3D1") JSON_Response = xmltodict.parse(Yandex_Response) JSON_Object = Common.JSON_Handler(JSON_Response) JSON_Output_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Search Result", Task_ID, Plugin_Name.lower()) New_JSON_Response = Recursive_Dict_Check(["yandexsearch", "response", "results", "grouping", "group"], JSON_Response) if New_JSON_Response: for Yandex_Item_Line in New_JSON_Response: try: if Recursive_Dict_Check(["doc", "url"], Yandex_Item_Line): Yandex_Item_Line = Yandex_Item_Line['doc'] Yandex_URL = Yandex_Item_Line['url'] Title = Recursive_Dict_Check(["title", "#text"], JSON_Response) if Title: Title = f"Yandex | {Title}" else: Title = General.Get_Title(Yandex_URL) Title = f"Yandex | {Title}" if Yandex_URL not in Cached_Data and Yandex_URL not in Data_to_Cache: Yandex_Item_Responses = Common.Request_Handler(Yandex_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://{Domain}") Yandex_Item_Response = Yandex_Item_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Yandex_Item_Response, Yandex_URL, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Yandex_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(Yandex_URL) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}") else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - No results found.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: if Type == "CBN": Main_API_URL = f'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B{Query}%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc' Response = requests.get(Main_API_URL).text JSON_Response = json.loads(Response) try: if JSON_Response['count'] != 0: Query = str(int(Query)) Main_URL = f'https://beta.canadasbusinessregistries.ca/search/results?search=%7B{Query}%7D&status=Active' Response = requests.get(Main_URL).text if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, General.Get_Title(Main_URL), The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, "canadasbusinessregistries.ca", "Company Details", Task_ID, Plugin_Name) Output_Connections.Output( [Output_file], Main_URL, f"Canadian Business Number {Query}", Concat_Plugin_Name) Data_to_Cache.append(Main_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CBN Search." ) elif Type == "CCN": Main_URL = 'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B' + urllib.parse.quote( Query ) + '%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc' Response = requests.get(Main_URL).text JSON_Response = json.loads(Response) Indented_JSON_Response = json.dumps(JSON_Response, indent=4, sort_keys=True) Limit = General.Get_Limit(kwargs) try: Main_File = General.Main_File_Create( Directory, Plugin_Name, Indented_JSON_Response, Query, The_File_Extensions["Main"]) Current_Step = 0 Output_Connections = General.Connections( Query, Plugin_Name, "canadasbusinessregistries.ca", "Company Details", Task_ID, Plugin_Name) for JSON_Item in JSON_Response['docs']: if JSON_Item.get('BN'): CCN = JSON_Item['Company_Name'] CBN = JSON_Item['BN'] Full_ABN_URL = f'https://beta.canadasbusinessregistries.ca/search/results?search=%7B{CBN}%7D&status=Active' if Full_ABN_URL not in Cached_Data and Full_ABN_URL not in Data_to_Cache and Current_Step < int( Limit): Current_Response = requests.get( Full_ABN_URL).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, str(Current_Response), CCN.replace(' ', '-'), The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Full_ABN_URL, f"Canadian Business Number {CBN} for Query {Query}", Concat_Plugin_Name) Data_to_Cache.append(Full_ABN_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Unable to retrieve business numbers from the JSON response." ) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CCN Search." ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Invalid request type." ) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make request." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: try: Pull_URL = f"https://{Domain}:2096/api/phishing?_where=(url,like,~{Query}~)&_sort=-id&_size={Limit}" JSON_Object = Common.JSON_Handler( Common.Request_Handler(Pull_URL)) Results = JSON_Object.To_JSON_Loads() Indented_Results = JSON_Object.Dump_JSON() Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Phishing", Task_ID, Plugin_Name.lower()) Main_File = General.Main_File_Create( Directory, Plugin_Name, Indented_Results, Query, The_File_Extensions["Main"]) for Result in Results: Current_Link = Result["url"] Current_Domain = Current_Link.strip("https://") Current_Domain = Current_Domain.strip("http://") Current_Domain = Current_Domain.strip("www.") Current_Title = Result["title"] try: Current_Result = Common.Request_Handler( Current_Link, Filter=True, Risky_Plugin=True, Host=Current_Link) Current_Result_Filtered = Current_Result["Filtered"] Response_Regex = Common.Regex_Handler( Current_Result, Custom_Regex=r"\<title\>([^\<\>]+)\<\/title\>") Output_file_Query = Query.replace(" ", "-") if Current_Link not in Cached_Data and Current_Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Output_file_Query, Plugin_Name, Current_Result_Filtered, Current_Domain, The_File_Extensions["Query"]) if Output_file: if Response_Regex: Current_Title = Response_Regex.group(1) Current_Title = Current_Title.strip() Output_Connections.Output( [Main_File, Output_file], Current_Link, Current_Title, Plugin_Name.lower()) else: if not "Phishstats" in Current_Title: Output_Connections.Output( [Main_File, Output_file], Current_Link, Current_Title, Plugin_Name.lower()) else: Output_Connections.Output( [Main_File, Output_file], Current_Link, General.Get_Title(Current_Link), Plugin_Name.lower()) Data_to_Cache.append(Current_Link) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request for result, link may no longer be available." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: if Type == "pin": Local_Plugin_Name = Plugin_Name + "-" + Type Request_URL = f"https://api.{Domain}/v1/pins/{Query}/?access_token=" + Load_Configuration( ) + "&fields=id%2Clink%2Cnote%2Curl%2Ccreated_at%2Cmedia%2Coriginal_link%2Cmetadata%2Ccounts%2Ccolor%2Cboard%2Cattribution" Search_Response = Common.Request_Handler(Request_URL) JSON_Object = Common.JSON_Handler(Search_Response) Search_Response = JSON_Object.To_JSON_Loads() if Search_Response.get( 'message' ) != "You have exceeded your rate limit. Try again later.": JSON_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create( Directory, Plugin_Name, JSON_Response, Query, The_File_Extensions["Main"]) Result_Title = "Pinterest | " + Search_Response["data"][ "metadata"]["link"]["title"] Result_URL = Search_Response["data"]["url"] Search_Result_Response = Common.Request_Handler(Result_URL) if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Search_Result_Response, Result_Title, The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Social Media - Media", Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output([Main_File, Output_file], Result_URL, Result_Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) elif Type == "board": Local_Plugin_Name = Plugin_Name + "-" + Type Request_URL = "https://api.pinterest.com/v1/boards/" + Query + "/pins/?access_token=" + Load_Configuration( ) + "&fields=id%2Clink%2Cnote%2Curl%2Coriginal_link%2Cmetadata%2Cmedia%2Cimage%2Ccreator%2Ccreated_at%2Ccounts%2Ccolor%2Cboard%2Cattribution&limit=" + str( Limit) + "" Search_Response = Common.Request_Handler(Request_URL) JSON_Object = Common.JSON_Handler(Search_Response) Search_Response = JSON_Object.To_JSON_Loads() if Search_Response.get( 'message' ) != "You have exceeded your rate limit. Try again later.": JSON_Response = JSON_Object.Dump_JSON() Main_File = General.Main_File_Create( Directory, Plugin_Name, JSON_Response, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, "pinterest.com", "Social Media - Page", Task_ID, Local_Plugin_Name.lower()) Current_Step = 0 for Response in Search_Response["data"]: Result_Title = "Pinterest | " + Response["note"] Result_URL = Response["url"] Search_Result_Response = Common.Request_Handler( Result_URL) if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int( Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Search_Result_Response, Result_Title, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Result_URL, Result_Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) YouTube_Details = self.Load_Configuration() Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: YouTube_Handler = discovery.build( YouTube_Details[1], YouTube_Details[2], developerKey=YouTube_Details[0], cache_discovery=False) Search_Response = YouTube_Handler.search().list( q=Query, type='video', part='id,snippet', maxResults=self.Limit, ).execute() JSON_Output_Response = Common.JSON_Handler( Search_Response.get('items', [])).Dump_JSON() Main_File = General.Main_File_Create( Directory, self.Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) for Search_Result in Search_Response.get('items', []): Full_Video_URL = f"https://www.{self.Domain}/watch?v=" + Search_Result[ 'id']['videoId'] Search_Video_Responses = Common.Request_Handler( Full_Video_URL, Filter=True, Host=f"https://www.{self.Domain}") Search_Video_Response = Search_Video_Responses["Filtered"] Title = "YouTube | " + Search_Result['snippet']['title'] if Full_Video_URL not in Cached_Data and Full_Video_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Search_Video_Response, Search_Result['id']['videoId'], self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Full_Video_URL, Title, self.Plugin_Name.lower()) Data_to_Cache.append(Full_Video_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: try: Pull_URL = f"https://{self.Domain}:2096/api/phishing?_where=(url,like,~{Query}~)&_sort=-id&_size={self.Limit}" JSON_Object = Common.JSON_Handler( Common.Request_Handler(Pull_URL)) Results = JSON_Object.To_JSON_Loads() Indented_Results = JSON_Object.Dump_JSON() Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) Main_File = General.Main_File_Create( Directory, self.Plugin_Name, Indented_Results, Query, self.The_File_Extensions["Main"]) for Result in Results: Current_Link = Result["url"] Current_Domain = urlparse(Current_Link).netloc Current_Title = Result["title"] try: Response = socket.gethostbyname(Current_Domain) except: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to resolve hostname {Current_Domain} to an IP address. Skipping." ) Response = None if Response: Current_Result = Common.Request_Handler( Current_Link, Filter=True, Risky_Plugin=True, Host=Current_Link) Current_Result_Filtered = Current_Result[ "Filtered"] Response_Regex = Common.Regex_Handler( Current_Result, Custom_Regex=r"\<title\>([^\<\>]+)\<\/title\>") Output_file_Query = Query.replace(" ", "-") if Current_Link not in Cached_Data and Current_Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Output_file_Query, self.Plugin_Name, Current_Result_Filtered, Current_Domain, self.The_File_Extensions["Query"]) if Output_file: if Response_Regex: Current_Title = Response_Regex.group(1) Current_Title = Current_Title.strip() Output_Connections.Output( [Main_File, Output_file], Current_Link, Current_Title, self.Plugin_Name.lower()) else: if not "Phishstats" in Current_Title: Output_Connections.Output( [Main_File, Output_file], Current_Link, Current_Title, self.Plugin_Name.lower()) else: Output_Connections.Output( [Main_File, Output_file], Current_Link, General.Get_Title( Current_Link), self.Plugin_Name.lower()) Data_to_Cache.append(Current_Link) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to resolve DNS, this link probably isn't live." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Transaction_Search(Query_List, Task_ID, Type, **kwargs): try: Local_Plugin_Name = Plugin_Name + "-Transaction-Search" Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: if Type != "monero": if Type == "btc" or Type == "bch": Query_Regex = re.search(r"[\d\w]{64}", Query) elif Type == "eth": Query_Regex = re.search(r"(0x[\d\w]{64})", Query) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Invalid type provided." ) if Query_Regex: Main_URL = f"https://www.blockchain.com/{Type}/tx/{Query}" Main_Response = requests.get(Main_URL).text if Type == "btc": Address_Regex = re.findall( r"\/btc\/address\/([\d\w]{26,34})", Main_Response) elif Type == "bch": Address_Regex = re.findall(r"([\d\w]{42})", Main_Response) elif Type == "eth": Address_Regex = re.findall(r"(0x[\w\d]{40})", Main_Response) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Invalid type provided." ) if Address_Regex: Current_Step = 0 Output_Connections = General.Connections( Query, Local_Plugin_Name, "blockchain.com", "Blockchain Address", Task_ID, Plugin_Name.lower()) for Transaction in Address_Regex: Query_URL = f"https://www.blockchain.com/{Type}/address/{Transaction}" if Query_URL not in Cached_Data and Query_URL not in Data_to_Cache and Current_Step < int( Limit): Transaction_Response = requests.get( Query_URL).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Transaction_Response, Transaction, The_File_Extension) if Output_file: Output_Connections.Output( [Output_file], Query_URL, General.Get_Title(Query_URL), Plugin_Name.lower()) Data_to_Cache.append(Query_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) else: Query_URL = f"https://moneroblocks.info/search/{Query}" Transaction_Response = requests.get(Query_URL).text if "Whoops, looks like something went wrong." not in Transaction_Response and Query_URL not in Cached_Data and Query_URL not in Data_to_Cache: Transaction_Response = requests.get(Query_URL).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Transaction_Response, Query, The_File_Extension) if Output_file: Output_Connections = General.Connections( Query, Local_Plugin_Name, "moneroblocks.info", "Blockchain Transaction", Task_ID, Plugin_Name.lower()) Output_Connections.Output([Output_file], Query_URL, General.Get_Title(Query_URL), Plugin_Name.lower()) Data_to_Cache.append(Query_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w") except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) YouTube_Details = Load_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: YouTube_Handler = discovery.build(YouTube_Details[1], YouTube_Details[2], developerKey=YouTube_Details[0]) Search_Response = YouTube_Handler.search().list( q=Query, type='video', location=YouTube_Details[3], locationRadius=YouTube_Details[4], part='id,snippet', maxResults=Limit, ).execute() General.Main_File_Create(Directory, Plugin_Name, json.dumps(Search_Response.get('items', []), indent=4, sort_keys=True), Query, ".json") Output_Connections = General.Connections(Query, Plugin_Name, "youtube.com", "Data Leakage", Task_ID, Plugin_Name.lower()) for Search_Result in Search_Response.get('items', []): Full_Video_URL = "https://www.youtube.com/watch?v=" + Search_Result['id']['videoId'] Search_Video_Response = requests.get(Full_Video_URL).text if Full_Video_URL not in Cached_Data and Full_Video_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Video_Response, Search_Result['id']['videoId'], The_File_Extension) if Output_file: Output_Connections.Output(Output_file, Full_Video_URL, General.Get_Title(Full_Video_URL)) Data_to_Cache.append(Full_Video_URL) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, Type, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) try: pyhibp.set_api_key(key=Load_Configuration()) except: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to set API key, make sure it is set in the configuration file.") Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) if Type == "email": Local_Plugin_Name = Plugin_Name + "-" + Type Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in Query_List: Query_Response = pyhibp.get_pastes(email_address=Query) logging.info(Query_Response) if Query_Response: Current_Domain = Query_Response[0]["Source"] ID = Query_Response[0]["Id"] Link = f"https://www.{Current_Domain}.com/{ID}" JSON_Object = Common.JSON_Handler(Query_Response) JSON_Query_Response = JSON_Object.Dump_JSON() if Link not in Cached_Data and Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, JSON_Query_Response, "email", The_File_Extension) if Output_file: Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Account", Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output([Output_file], Link, General.Get_Title(Link), Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") Cached_Data_Object.Write_Cache(Data_to_Cache) elif Type == "breach": Local_Plugin_Name = Plugin_Name + "-" + Type Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in Query_List: Query_Response = pyhibp.get_single_breach(breach_name=Query) if Query_Response: Current_Domain = Query_Response["Domain"] Link = f"https://www.{Current_Domain}.com/" JSON_Object = Common.JSON_Handler(Query_Response) JSON_Query_Response = JSON_Object.Dump_JSON() if Link not in Cached_Data and Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, JSON_Query_Response, "breach", The_File_Extension) if Output_file: Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Credentials", Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output([Output_file], Link, General.Get_Title(Link), Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") Cached_Data_Object.Write_Cache(Data_to_Cache) elif Type == "password": Local_Plugin_Name = Plugin_Name + "-" + Type Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in Query_List: Query_Response = pw.is_password_breached(password=Query) logging.info(Query_Response) if Query_Response: Link = f"https://{Domain}/Passwords?{Query}" if Link not in Cached_Data and Link not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, str(Query_Response), "password", ".txt") if Output_file: Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Credentials", Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output([Output_file], Link, General.Get_Title(Link), Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") Cached_Data_Object.Write_Cache(Data_to_Cache) elif Type == "account": Local_Plugin_Name = Plugin_Name + "-" + Type Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in Query_List: Query_Response = pyhibp.get_account_breaches(account=Query, truncate_response=True) if Query_Response: Current_Step = 0 for Response in Query_Response: Current_Response = pyhibp.get_single_breach(breach_name=Response['Name']) JSON_Object = Common.JSON_Handler(Query_Response) JSON_Query_Response = JSON_Object.Dump_JSON() Link = "https://" + Current_Response['Domain'] if Current_Response['Domain'] not in Cached_Data and Current_Response['Domain'] not in Data_to_Cache and Current_Step < int(Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, JSON_Query_Response, "account", The_File_Extension) if Output_file: Output_Connections = General.Connections(Query, Local_Plugin_Name, Current_Response['Domain'], "Account", Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output([Output_file], Link, General.Get_Title(Link), Concat_Plugin_Name) Data_to_Cache.append(Current_Response['Domain']) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") Current_Step += 1 Cached_Data_Object.Write_Cache(Data_to_Cache) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid type provided.") except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Address_Search(self): try: Local_Plugin_Name = self.Plugin_Name + "-Address-Search" Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: if self.Type == "btc" or self.Type == "bch": Query_Regex = Common.Regex_Handler(Query, Custom_Regex=r"([\d\w]{26,34})") elif self.Type == "eth": Query_Regex = Common.Regex_Handler(Query, Custom_Regex=r"(0x[\w\d]{40})") else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid type provided.") if Query_Regex: Main_URL = f"https://www.{self.Domain}/{self.Type}/address/{Query}" Main_Response = Common.Request_Handler(Main_URL) if self.Type == "btc": Transaction_Regex = Common.Regex_Handler(Main_Response, Custom_Regex=r"\/btc\/tx\/([\d\w]{64})", Findall=True) elif self.Type == "bch": Transaction_Regex = Common.Regex_Handler(Main_Response, Custom_Regex=r"([\d\w]{64})", Findall=True) elif self.Type == "eth": Transaction_Regex = Common.Regex_Handler(Main_Response, Custom_Regex=r"(0x[\d\w]{64})", Findall=True) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid type provided.") if Transaction_Regex: Current_Step = 0 Output_Connections = General.Connections(Query, Local_Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) for Transaction in Transaction_Regex: Query_URL = f"https://www.{self.Domain}/{self.Type}/tx/{Transaction}" if Query_URL not in Cached_Data and Query_URL not in Data_to_Cache and Current_Step < int(self.Limit): Transaction_Responses = Common.Request_Handler(Query_URL, Filter=True, Host=f"https://www.{self.Domain}") Transaction_Response = Transaction_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Transaction_Response, Transaction, self.The_File_Extension) if Output_file: Output_Connections.Output([Output_file], Query_URL, General.Get_Title(Query_URL), self.Plugin_Name.lower()) Data_to_Cache.append(Query_URL) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Current_Step += 1 else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.") else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Ebay_API_Key = self.Load_Configuration() Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: try: API_Request = Connection(appid=Ebay_API_Key, config_file=None) API_Response = API_Request.execute('findItemsAdvanced', {'keywords': Query}) JSON_Output_Response = Common.JSON_Handler( API_Response.dict()).Dump_JSON() JSON_Object = Common.JSON_Handler(API_Response.dict()) JSON_Response = JSON_Object.Dump_JSON(Indentation=0, Sort=False) JSON_Response = JSON_Object.To_JSON_Loads() Main_File = General.Main_File_Create( Directory, self.Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) if JSON_Response["ack"] == "Success": Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) Current_Step = 0 for JSON_Line in JSON_Response['searchResult']['item']: Ebay_Item_URL = JSON_Line['viewItemURL'] Title = "Ebay | " + General.Get_Title( Ebay_Item_URL) if Ebay_Item_URL not in Cached_Data and Ebay_Item_URL not in Data_to_Cache and Current_Step < int( self.Limit): Ebay_Item_Regex = Common.Regex_Handler( Ebay_Item_URL, Custom_Regex= r"https\:\/\/www\.ebay\.com\/itm\/([\w\d\-]+)\-\/\d+" ) Ebay_Item_Responses = Common.Request_Handler( Ebay_Item_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Ebay_Item_Response = Ebay_Item_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Ebay_Item_Response, Ebay_Item_Regex.group(1).rstrip("-"), self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Ebay_Item_URL, Title, self.Plugin_Name.lower()) Data_to_Cache.append(Ebay_Item_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - No results found." ) except: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make API call." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Ebay_API_Key = Load_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: API_Request = Connection(appid=Ebay_API_Key, config_file=None) API_Response = API_Request.execute('findItemsAdvanced', {'keywords': Query}) JSON_Output_Response = json.dumps(API_Response.dict(), indent=4, sort_keys=True) JSON_Response = json.dumps(API_Response.dict()) JSON_Response = json.loads(JSON_Response) General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, ".json") if JSON_Response["ack"] == "Success": Current_Step = 0 for JSON_Line in JSON_Response['searchResult']['item']: Ebay_Item_URL = JSON_Line['viewItemURL'] if Ebay_Item_URL not in Cached_Data and Ebay_Item_URL not in Data_to_Cache and Current_Step < int( Limit): Ebay_Item_Regex = re.search( r"http\:\/\/www\.ebay\.com\/itm\/([\w\d\-]+)\-\/\d+", Ebay_Item_URL) headers = { 'Content-Type': 'application/json', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0', 'Accept': 'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5' } Ebay_Item_Response = requests.get(Ebay_Item_URL, headers=headers).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Ebay_Item_Response, Ebay_Item_Regex.group(1), The_File_Extension) if Output_file: General.Connections( Output_file, Query, Plugin_Name, Ebay_Item_URL, "ebay.com", "Data Leakage", Task_ID, General.Get_Title(Ebay_Item_URL), Plugin_Name.lower()) Data_to_Cache.append(Ebay_Item_URL) Current_Step += 1 else: logging.warning( str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + " No results found.") except: logging.info( str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + " Failed to make API call.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Location = Connectors.Load_Location_Configuration() Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: Main_URL = f"https://www.{Domain}/en-{Location}/search?q={Query}" Win_Store_Response = Common.Request_Handler( Main_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True) Main_File = General.Main_File_Create(Directory, Plugin_Name, Win_Store_Response, Query, The_File_Extension) Win_Store_Regex = Common.Regex_Handler( Win_Store_Response, Custom_Regex=r"\/en\-au\/p\/([\w\-]+)\/([\w\d]+)", Findall=True) Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Application", Task_ID, Concat_Plugin_Name) if Win_Store_Regex: Current_Step = 0 for Regex_Group_1, Regex_Group_2 in Win_Store_Regex: Item_URL = f"https://www.microsoft.com/en-au/p/{Regex_Group_1}/{Regex_Group_2}" Win_Store_Responses = Common.Request_Handler( Item_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}") Win_Store_Response = Win_Store_Responses["Filtered"] Title = "Windows Store | " + General.Get_Title(Item_URL) if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int( Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Win_Store_Response, Regex_Group_1, The_File_Extension) if Output_file: Output_Connections.Output([Main_File, Output_file], Item_URL, Title, Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Transaction_Search(Query_List, Task_ID, Type, **kwargs): Local_Plugin_Name = Plugin_Name + "-Transaction-Search" Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: if Type == "btc" or Type == "bch": Query_Regex = re.search(r"[\d\w]{64}", Query) elif Type == "eth": Query_Regex = re.search(r"(0x[\d\w]{64})", Query) else: logging.warning(General.Date() + " Invalid type provided.") if Query_Regex: Main_URL = "https://www.blockchain.com/" + Type + "/tx/" + Query Main_Response = requests.get(Main_URL).text if Type == "btc": Address_Regex = re.findall(r"\/btc\/address\/([\d\w]{26,34})", Main_Response) elif Type == "bch": Address_Regex = re.findall(r"([\d\w]{42})", Main_Response) elif Type == "eth": Address_Regex = re.findall(r"(0x[\w\d]{40})", Main_Response) else: logging.warning(General.Date() + " Invalid type provided.") if Address_Regex: Current_Step = 0 for Transaction in Address_Regex: Query_URL = "https://www.blockchain.com/" + Type + "/address/" + Transaction if Query_URL not in Cached_Data and Query_URL not in Data_to_Cache and Current_Step < int( Limit): Transaction_Response = requests.get(Query_URL).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Transaction_Response, Transaction, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Local_Plugin_Name, Query_URL, "blockchain.com", "Blockchain Address", Task_ID, General.Get_Title(Query_URL), Plugin_Name.lower()) Data_to_Cache.append(Query_URL) Current_Step += 1 else: logging.warning(General.Date() + " Failed to match regular expression.") else: logging.warning(General.Date() + " Failed to match regular expression.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) try: Flickr_Details = Load_Configuration() flickr_api.set_keys(api_key=Flickr_Details[0], api_secret=Flickr_Details[1]) except: logging.info(General.Date() + " Failed to establish API identity.") for Query in Query_List: Email_Regex = re.search(r"[^@]+@[^\.]+\..+", Query) if Email_Regex: try: User = flickr_api.Person.findByEmail(Query) Photos = User.getPhotos() General.Main_File_Create(Directory, Plugin_Name, Photos, Query, ".txt") for Photo in Photos: Photo_URL = "https://www.flickr.com/photos/" + Query + "/" + Photo["id"] Current_Step = 0 if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int(Limit): headers = {'Content-Type': 'application/json', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0', 'Accept': 'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5'} Photo_Response = requests.get(Photo_URL, headers=headers).text Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Photo_Response, Photo, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Photo_URL, "flickr.com", "Data Leakage", Task_ID, General.Get_Title(Photo_URL), Plugin_Name.lower()) Data_to_Cache.append(Photo_URL) Current_Step += 1 except: logging.info(General.Date() + " Failed to make API call.") else: try: print(Query) User = flickr_api.Person.findByUserName(Query) Photos = User.getPhotos() General.Main_File_Create(Directory, Plugin_Name, Photos, Query, ".txt") for Photo in Photos: Photo_URL = "https://www.flickr.com/photos/" + Query + "/" + Photo["id"] Current_Step = 0 if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int(Limit): headers = {'Content-Type': 'application/json', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0', 'Accept': 'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5'} Photo_Response = requests.get(Photo_URL, headers=headers).text Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Photo_Response, str(Photo['id']), The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Photo_URL, "flickr.com", "Data Leakage", Task_ID, General.Get_Title(Photo_URL), Plugin_Name.lower()) Data_to_Cache.append(Photo_URL) Current_Step += 1 except: logging.info(General.Date() + " Failed to make API call.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, Type, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: if Type == "User": Local_Plugin_Name = Plugin_Name + "-" + Type CSE_Response = instagram_explore.user(Query) CSE_JSON_Output_Response = json.dumps(CSE_Response, indent=4, sort_keys=True) Output_file = General.Main_File_Create(Directory, Local_Plugin_Name, CSE_JSON_Output_Response, Query, ".json") Posts = CSE_Response[0]["edge_owner_to_timeline_media"]["edges"] Output_Connections = General.Connections(Query, Local_Plugin_Name, "instagram.com", "Data Leakage", Task_ID, Local_Plugin_Name.lower()) Current_Step = 0 for Post in Posts: Shortcode = Post["node"]["shortcode"] URL = "https://www.instagram.com/p/" + Shortcode + "/" if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(Limit): if Output_file: Output_Connections.Output(Output_file, URL, General.Get_Title(URL)) Data_to_Cache.append(URL) Current_Step += 1 elif Type == "Tag": Local_Plugin_Name = Plugin_Name + "-" + Type CSE_Response = instagram_explore.tag(Query) CSE_JSON_Output_Response = json.dumps(CSE_Response, indent=4, sort_keys=True) Output_file = General.Main_File_Create(Directory, Local_Plugin_Name, CSE_JSON_Output_Response, Query, ".json") Posts = CSE_Response[0]["edge_hashtag_to_media"]["edges"] Output_Connections = General.Connections(Query, Local_Plugin_Name, "instagram.com", "Data Leakage", Task_ID, Local_Plugin_Name.lower()) Current_Step = 0 for Post in Posts: Shortcode = Post["node"]["shortcode"] URL = "https://www.instagram.com/p/" + Shortcode + "/" if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(Limit): if Output_file: Output_Connections.Output(Output_file, URL, General.Get_Title(URL)) Data_to_Cache.append(URL) Current_Step += 1 elif Type == "Location": Local_Plugin_Name = Plugin_Name + "-" + Type CSE_Response = location(Query) CSE_JSON_Output_Response = json.dumps(CSE_Response, indent=4, sort_keys=True) Output_file = General.Main_File_Create(Directory, Local_Plugin_Name, CSE_JSON_Output_Response, Query, ".json") Posts = CSE_Response[0]["edge_location_to_media"]["edges"] Output_Connections = General.Connections(Query, Local_Plugin_Name, "instagram.com", "Data Leakage", Task_ID, Local_Plugin_Name.lower()) Current_Step = 0 for Post in Posts: Shortcode = Post["node"]["shortcode"] URL = "https://www.instagram.com/p/" + Shortcode + "/" if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(Limit): if Output_file: Output_Connections.Output(Output_file, URL, General.Get_Title(URL)) Data_to_Cache.append(URL) Current_Step += 1 elif Type == "Media": Local_Plugin_Name = Plugin_Name + "-" + Type CSE_Response = instagram_explore.media(Query) if CSE_Response: CSE_JSON_Output_Response = json.dumps(CSE_Response, indent=4, sort_keys=True) Output_file = General.Main_File_Create(Directory, Local_Plugin_Name, CSE_JSON_Output_Response, Query, ".json") URL = "https://www.instagram.com/p/" + Query + "/" if URL not in Cached_Data and URL not in Data_to_Cache: if Output_file: Output_Connections = General.Connections(Query, Local_Plugin_Name, "instagram.com", "Data Leakage", Task_ID, Local_Plugin_Name.lower()) Output_Connections.Output(Output_file, URL, General.Get_Title(URL)) Data_to_Cache.append(URL) else: logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Invalid response.") else: logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Invalid type provided.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Email_Rep_API_Key = self.Load_Configuration() for Query in self.Query_List: if Common.Regex_Handler(Query, Type="Email"): API = EmailRep(Email_Rep_API_Key) JSON_Output_Response = API.query(Query) Link = f"https://{self.Domain}/{Query}" JSON_Object = Common.JSON_Handler(JSON_Output_Response) JSON_Output_Response = JSON_Object.Dump_JSON() JSON_Response = JSON_Object.To_JSON_Loads() if JSON_Response["reputation"] != "none": Table_JSON = {} for Key, Value in JSON_Response.items(): if Key != "details": Table_JSON[Key] = Value else: for Det_Key, Det_Val in JSON_Response[ "details"].items(): Table_JSON[Det_Key] = Det_Val Filter_JSON = [Table_JSON] Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) if Query not in Cached_Data and Query not in Data_to_Cache: Responses = Common.Request_Handler( Link, Filter=True, Host=f"https://{self.Domain}") Filtered_Response = Responses["Filtered"] Title = f"Email Reputation | {Query}" Main_File = General.Main_File_Create( Directory, self.Concat_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Concat_Plugin_Name, Filtered_Response, Title, self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Link, Title, self.Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: if self.Type == "Email": if Common.Regex_Handler(Query, Type=self.Type): Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/email/report/?email={Query}" Response = Common.Request_Handler(URL) Search_Response = Common.Request_Handler(Request_URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses[ "Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Account", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output( [Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match query to email regular expression." ) elif self.Type == "Domain": if Common.Regex_Handler(Query, Type=self.Type): Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/self.Domain/report/?self.Domain={Query}" Response = Common.Request_Handler(URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses[ "Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Domain Information", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output( [Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match query to self.Domain regular expression." ) elif self.Type == "IP Address": if Common.Regex_Handler(Query, Type="IP"): Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/ip/report/?ip={Query}" Response = Common.Request_Handler(URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses[ "Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Domain Information", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output( [Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match query to IP address regular expression." ) elif self.Type == "AV": Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/antivirus/report/?antivirus={Query}" Response = Common.Request_Handler(URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses["Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Virus", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output([Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) elif self.Type == "Virus Report": Local_Plugin_Name = self.Plugin_Name + " " + self.Type URL = f"https://www.threatcrowd.org/searchApi/v2/file/report/?resource={Query}" Response = Common.Request_Handler(URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() if int(JSON_Response.get("response_code")) != 0: JSON_Output_Response = JSON_Object.Dump_JSON() Permalink = JSON_Response.get("permalink") Permalink_Responses = Common.Request_Handler( URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Permalink_Response = Permalink_Responses["Filtered"] Title = "Threat Crowd | " + General.Get_Title( Permalink, Requests=True ).replace( " | Threatcrowd.org Open Source Threat Intelligence", "") Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Permalink_Response, Query, self.The_File_Extensions["Query"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Virus Report", self.Task_ID, Local_Plugin_Name.lower()) if Output_file: Output_Connections.Output([Main_File, Output_file], Permalink, Title, self.Plugin_Name.lower()) Data_to_Cache.append(URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid Type provided." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: Tor_Pull_URL = Tor_General_URL + Query Tor_Scrape_URLs = General.Get_Latest_URLs(Tor_Pull_URL, Tor_Scrape_Regex_URL) if Tor_Scrape_URLs: Output_file = General.Main_File_Create(Directory, Tor_Plugin_Name.lower(), "\n".join(Tor_Scrape_URLs), Query, The_File_Extension) if Output_file: Current_Step = 0 Output_Connections = General.Connections(Query, Tor_Plugin_Name, "ahmia.fl", "Darkweb Link", Task_ID, Plugin_Name.lower()) for URL in Tor_Scrape_URLs: if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(Limit): Title = f"Ahmia Tor | {URL}" Output_Connections.Output([Output_file], URL, Title, Plugin_Name.lower()) Data_to_Cache.append(URL) Current_Step += 1 else: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") else: logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - No Tor links scraped.") I2P_Pull_URL = I2P_General_URL + Query I2P_Scrape_URLs = General.Get_Latest_URLs(I2P_Pull_URL, I2P_Scrape_Regex_URL) if I2P_Scrape_URLs: Output_file = General.Main_File_Create(Directory, I2P_Plugin_Name.lower(), "\n".join(I2P_Scrape_URLs), Query, The_File_Extension) if Output_file: Current_Step = 0 Output_Connections = General.Connections(Query, I2P_Plugin_Name, "ahmia.fl", "Darkweb Link", Task_ID, Plugin_Name.lower()) for URL in I2P_Scrape_URLs: if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(Limit): Title = f"Ahmia I2P | {URL}" Output_Connections.Output([Output_file], URL, Title, Plugin_Name.lower()) Data_to_Cache.append(URL) Current_Step += 1 else: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") else: logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - No I2P links scraped.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: if Type == "CIK": Main_URL = f'https://www.{Domain}/cgi-bin/browse-edgar?action=getcompany&CIK={Query}&owner=exclude&count=40&hidefilings=0' Responses = General.Request_Handler( Main_URL, Filter=True, Host=f"https://www.{Domain}") Response = Responses["Regular"] try: if 'No matching CIK.' not in Response: Query = str(int(Query)) Response = Responses["Filtered"] if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, f"edgar-american-business-search-{Query.lower()}", The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Company Details", Task_ID, Plugin_Name) Output_Connections.Output( [Output_file], Main_URL, f"American Business Number (EDGAR) {Query}", Concat_Plugin_Name) Data_to_Cache.append(Main_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CIK Search." ) elif Type == "ACN": Main_URL = f'https://www.{Domain}/cgi-bin/browse-edgar?company={Query}&owner=exclude&action=getcompany' Responses = General.Request_Handler( Main_URL, Filter=True, Host=f"https://www.{Domain}") Response = Responses["Regular"] Filtered_Response = Responses["Filtered"] Limit = General.Get_Limit(kwargs) try: ACN = re.search(r".*[a-zA-Z].*", Query) if ACN: Main_File = General.Main_File_Create( Directory, Plugin_Name, Filtered_Response, Query, The_File_Extensions["Main"]) Current_Step = 0 CIKs_Regex = re.findall( r"(\d{10})\<\/a\>\<\/td\>\s+\<td\sscope\=\"row\"\>(.*\S.*)\<\/td\>", Response) if CIKs_Regex: Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Company Details", Task_ID, Plugin_Name) for CIK_URL, ACN in CIKs_Regex: Full_CIK_URL = f'https://www.{Domain}/cgi-bin/browse-edgar?action=getcompany&CIK={CIK_URL}&owner=exclude&count=40&hidefilings=0' if Full_CIK_URL not in Cached_Data and Full_CIK_URL not in Data_to_Cache and Current_Step < int( Limit): Current_Responses = General.Request_Handler( Full_CIK_URL, Filter=True, Host=f"https://www.{Domain}") Current_Response = Current_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, str(Current_Response), ACN.replace(' ', '-'), The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Full_CIK_URL, f"American Business Number (EDGAR) {CIK_URL} for Query {Query}", Concat_Plugin_Name) Data_to_Cache.append(Full_CIK_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Response did not match regular expression." ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Query did not match regular expression." ) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for ACN Search." ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Invalid request type." ) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make request." ) General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name) except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Transaction_Search(Query_List, Task_ID, Type, Limit=10): try: Local_Plugin_Name = Plugin_Name + "-Transaction-Search" Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: if Type != "monero": if Type == "btc" or Type == "bch": Query_Regex = Common.Regex_Handler( Query, Custom_Regex=r"[\d\w]{64}") elif Type == "eth": Query_Regex = Common.Regex_Handler( Query, Custom_Regex=r"(0x[\d\w]{64})") else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid type provided." ) if Query_Regex: Main_URL = f"https://www.{Domain}/{Type}/tx/{Query}" Main_Response = Common.Request_Handler(Main_URL) if Type == "btc": Address_Regex = Common.Regex_Handler( Main_Response, Custom_Regex=r"\/btc\/address\/([\d\w]{26,34})", Findall=True) elif Type == "bch": Address_Regex = Common.Regex_Handler( Main_Response, Custom_Regex=r"([\d\w]{42})", Findall=True) elif Type == "eth": Address_Regex = Common.Regex_Handler( Main_Response, Custom_Regex=r"(0x[\w\d]{40})", Findall=True) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid type provided." ) if Address_Regex: Current_Step = 0 Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Blockchain Address", Task_ID, Plugin_Name.lower()) for Transaction in Address_Regex: Query_URL = f"https://www.{Domain}/{Type}/address/{Transaction}" if Query_URL not in Cached_Data and Query_URL not in Data_to_Cache and Current_Step < int( Limit): Transaction_Responses = Common.Request_Handler( Query_URL, Filter=True, Host=f"https://www.{Domain}") Transaction_Response = Transaction_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Transaction_Response, Transaction, The_File_Extension) if Output_file: Output_Connections.Output( [Output_file], Query_URL, General.Get_Title(Query_URL), Plugin_Name.lower()) Data_to_Cache.append(Query_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) else: Alt_Domain = "localmonero.co" Query_URL = f"https://{Alt_Domain}/blocks/search/{Query}" Transaction_Response = Common.Request_Handler(Query_URL) if "Whoops, looks like something went wrong." not in Transaction_Response and Query_URL not in Cached_Data and Query_URL not in Data_to_Cache: Transaction_Responses = Common.Request_Handler( Query_URL, Filter=True, Host=f"https://{Alt_Domain}") Transaction_Response = Transaction_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Transaction_Response, Query, The_File_Extension) if Output_file: Output_Connections = General.Connections( Query, Local_Plugin_Name, Alt_Domain, "Blockchain Transaction", Task_ID, Plugin_Name.lower()) Output_Connections.Output([Output_file], Query_URL, General.Get_Title(Query_URL, Requests=True), Plugin_Name.lower()) Data_to_Cache.append(Query_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: Play_Store_Response = play_scraper.developer(Query, results=Limit) Play_Store_Response_JSON = json.dumps(Play_Store_Response, indent=4, sort_keys=True) General.Main_File_Create(Plugin_Name, Play_Store_Response_JSON, Query, ".json") for Result_Details in Play_Store_Response: Result_URL = Result_Details['url'] if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache: Win_Store_Regex = re.search( r"https\:\/\/play\.google\.com\/store\/apps\/details\?id\=([\w\d\_\-\.]+)", Result_URL) if Win_Store_Regex: headers = { 'Content-Type': 'application/json', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0', 'Accept': 'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5' } Play_Store_Response = requests.get( Result_URL, headers=headers).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Play_Store_Response, Win_Store_Regex.group(1), The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Result_URL, "play.google.com", "Data Leakage", Task_ID, General.Get_Title(Result_URL), Concat_Plugin_Name) Data_to_Cache.append(Result_URL) except: logging.info( str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + " Failed to get results, this may be due to the query provided." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID): Data_to_Cache = [] Cached_Data = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) try: DNS_Info = checkdmarc.check_domains(Query_List) if len(Query_List) > 1: for DNS_Item in DNS_Info: Query = DNS_Item['base_domain'] Output_Dict = json.dumps(DNS_Item, indent=4, sort_keys=True) Link = "https://www." + Query Title = "DNS Information for " + DNS_Info['base_domain'] if Link not in Data_to_Cache and Link not in Cached_Data: Output_file = General.Main_File_Create( Directory, Plugin_Name, Output_Dict, Query, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Link, Query, "Domain Spoof", Task_ID, Title, Concat_Plugin_Name) Data_to_Cache.append(Link) else: Query = DNS_Info['base_domain'] Output_Dict = json.dumps(DNS_Info, indent=4, sort_keys=True) Link = "https://www." + Query Title = "DNS Information for " + Query if Link not in Data_to_Cache and Link not in Cached_Data: Output_file = General.Main_File_Create(Directory, Plugin_Name, Output_Dict, Query, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Link, Query, "Domain Spoof", Task_ID, Title, Concat_Plugin_Name) Data_to_Cache.append(Link) except: logging.warning(General.Date() + " Error retrieving DNS details.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def General_Pull(Handle, Limit, Directory, API, Task_ID): try: Data_to_Cache = [] JSON_Response = [] Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Latest_Tweets = API.user_timeline(screen_name=Handle, count=Limit) for Tweet in Latest_Tweets: try: JSON_Response.append({ 'id': Tweet.id, 'text': Tweet.text, 'author_name': Tweet.user.screen_name, 'url': Tweet.entities['urls'][0]["expanded_url"] }) except: JSON_Response.append({ 'id': Tweet.id, 'text': Tweet.text, 'author_name': Tweet.user.screen_name }) JSON_Object = Common.JSON_Handler(JSON_Response) JSON_Output = JSON_Object.Dump_JSON() Output_Connections = General.Connections(Handle, Plugin_Name, Domain, "Social Media - Media", Task_ID, Plugin_Name.lower()) Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output, Handle, The_File_Extensions["Main"]) for JSON_Item in JSON_Response: if all(Item in JSON_Item for Item in ['id', 'url', 'text']): Link = JSON_Item['url'] if Link not in Cached_Data and Link not in Data_to_Cache: Title = "Twitter | " + JSON_Item['text'] Item_Responses = Common.Request_Handler( Link, Filter=True, Host=f"https://{Domain}") Item_Response = Item_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Handle, Plugin_Name, Item_Response, str(JSON_Item['id']), The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Link, Title, Plugin_Name.lower()) Data_to_Cache.append(Link) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Output file not returned." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Insufficient parameters provided." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) VK_Access_Token = Load_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: if Type == "User": VK_Response = requests.get( f"https://api.vk.com/method/users.search?v=5.52&access_token={VK_Access_Token}&fields=verified, blacklisted, sex, bdate, city, country, home_town, photo_50, photo_100, photo_200_orig, photo_200, photo_400_orig, photo_max, photo_max_orig, online, lists, domain, has_mobile, contacts, site, education, universities, schools, status, last_seen, followers_count, common_count, counters, occupation, nickname, relatives, relation, personal, connections, exports, wall_comments, activities, interests, music, movies, tv, books, games, about, quotes, can_post, can_see_all_posts, can_see_audio, can_write_private_message, timezone, screen_name&q={Query}&count={str(Limit)}" ).text JSON_Response = json.loads(VK_Response) JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True) Main_File = General.Main_File_Create( Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Plugin_Name, "vk.com", "Social Media - Person", Task_ID, Plugin_Name.lower()) New_JSON_Response = Recursive_Dict_Check(["response", "items"], JSON_Response) if New_JSON_Response: for VK_Item_Line in New_JSON_Response: try: if all(Item in VK_Item_Line for Item in ["first_name", "last_name", "screen_name"]): VK_URL = "https://vk.com/" + VK_Item_Line[ 'screen_name'] Full_Name = VK_Item_Line[ "first_name"] + " " + VK_Item_Line[ "last_name"] Title = f"VK User | {Full_Name}" if VK_URL not in Cached_Data and VK_URL not in Data_to_Cache: headers = { 'Content-Type': 'application/json', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0', 'Accept': 'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5' } VK_Item_Response = requests.get( VK_URL, headers=headers).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, VK_Item_Response, VK_URL, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], VK_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(VK_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}" ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - No results found." ) if Type == "Group": VK_Response = requests.get( f"https://api.vk.com/method/groups.search?v=5.52&access_token={VK_Access_Token}&q={Query}&count={str(Limit)}" ).text JSON_Response = json.loads(VK_Response) JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True) Main_File = General.Main_File_Create( Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Plugin_Name, "vk.com", "Social Media - Group", Task_ID, Plugin_Name.lower()) New_JSON_Response = Recursive_Dict_Check(["response", "items"], JSON_Response) if New_JSON_Response: for VK_Item_Line in New_JSON_Response: try: if all(Item in VK_Item_Line for Item in ["name", "screen_name"]): VK_URL = "https://vk.com/" + VK_Item_Line[ 'screen_name'] Full_Name = VK_Item_Line["name"] Title = f"VK Group | {Full_Name}" if VK_URL not in Cached_Data and VK_URL not in Data_to_Cache: headers = { 'Content-Type': 'application/json', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0', 'Accept': 'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5' } VK_Item_Response = requests.get( VK_URL, headers=headers).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, VK_Item_Response, VK_URL, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], VK_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(VK_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}" ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - No results found." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Shodan_API_Key = self.Load_Configuration() API_Session = PyHunter(Shodan_API_Key) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: try: if self.Type == "Domain": if Common.Regex_Handler(Query, Type="Domain"): Local_Plugin_Name = self.Plugin_Name + "-Domain" API_Response = API_Session.domain_search(Query) JSON_Output_Response = Common.JSON_Handler( API_Response).Dump_JSON() if API_Response.get("domain") and API_Response.get( "emails"): Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Account", self.Task_ID, self.Plugin_Name.lower()) Current_Step = 0 for Hunter_Item in API_Response["emails"]: Current_Email_Address = Hunter_Item[ "value"] Current_Hunter_Item_Host = f"https://{self.Domain}/verify/{Current_Email_Address}" Current_Hunter_Item_Responses = Common.Request_Handler( Current_Hunter_Item_Host, Filter=True, Host=f"https://{self.Domain}") Filtered_Response = Current_Hunter_Item_Responses[ "Filtered"] Title = "Hunter | " + Current_Email_Address if Current_Email_Address not in Cached_Data and Current_Email_Address not in Data_to_Cache and Current_Step < int( self.Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Filtered_Response, Current_Hunter_Item_Host, self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Current_Hunter_Item_Host, Title, self.Plugin_Name.lower()) Data_to_Cache.append( Current_Email_Address) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) Current_Step += 1 elif self.Type == "Email": if Common.Regex_Handler(Query, Type="Email"): Local_Plugin_Name = self.Plugin_Name + "-Email" API_Response = API_Session.email_verifier(Query) JSON_Output_Response = Common.JSON_Handler( API_Response).Dump_JSON() if API_Response.get("email") and API_Response.get( "sources"): Main_File = General.Main_File_Create( Directory, Local_Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Local_Plugin_Name, self.Domain, "Account Source", self.Task_ID, self.Plugin_Name.lower()) Current_Step = 0 for Hunter_Item in API_Response["sources"]: Current_Hunter_Item_Host = Hunter_Item[ "uri"] Current_Hunter_Item_Domain = Hunter_Item[ "Domain"] if 'http://' in Current_Hunter_Item_Host: Current_Hunter_Item_Responses = Common.Request_Handler( Current_Hunter_Item_Host, Filter=True, Host= f"http://{Current_Hunter_Item_Domain}" ) Filtered_Response = Current_Hunter_Item_Responses[ "Filtered"] elif 'https://' in Current_Hunter_Item_Host: Current_Hunter_Item_Responses = Common.Request_Handler( Current_Hunter_Item_Host, Filter=True, Host= f"https://{Current_Hunter_Item_Domain}" ) Filtered_Response = Current_Hunter_Item_Responses[ "Filtered"] else: Filtered_Response = Common.Request_Handler( Current_Hunter_Item_Host) Title = "Hunter | " + Current_Hunter_Item_Host if Current_Hunter_Item_Host not in Cached_Data and Current_Hunter_Item_Host not in Data_to_Cache and Current_Step < int( self.Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Filtered_Response, Current_Hunter_Item_Host, self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Current_Hunter_Item_Host, Title, self.Plugin_Name.lower()) Data_to_Cache.append( Current_Hunter_Item_Host) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) Current_Step += 1 except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to complete task - {str(e)}" ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) try: File_Dir = os.path.dirname(os.path.realpath('__file__')) Configuration_File = os.path.join(File_Dir, 'plugins/common/config/RSS_Feeds.txt') Current_File = open(Configuration_File, "r") # Open the provided file and retrieve each client to test. URLs = Current_File.read().splitlines() Current_File.close() except: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Please provide a valid RSS_Feeds file, failed to open the file which contains the data to search for.") Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: for URL in URLs: # URLs to be controlled by the web app. RSS = feedparser.parse(URL) Current_Step = 0 for Feed in RSS.entries: if Query in Feed.description: Dump_Types = General.Data_Type_Discovery(Feed.description) File_Link = Feed.link.replace("https://", "") File_Link = File_Link.replace("http://", "") File_Link = File_Link.replace("www.", "") File_Link = File_Link.replace("/", "-") Domain = URL.replace("https://", "") Domain = Domain.replace("http://", "") Domain = Domain.replace("www.", "") if Feed.link not in Cached_Data and Feed.link not in Data_to_Cache and Current_Step < int(self.Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Feed.description, File_Link, self.The_File_Extension) Title = "RSS Feed | " + General.Get_Title(Feed.link) if Output_file: Output_Connections = General.Connections(Query, self.Plugin_Name, Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) Output_Connections.Output([Output_file], Feed.link, Title, self.Plugin_Name.lower(), Dump_Types=Dump_Types) Data_to_Cache.append(Feed.link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Current_Step += 1 else: logging.info(f"{Common.Date()} - {self.Logging_Plugin_Name} - Query not found.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")