def Search(Query_List, Task_ID, Limit=10): try: Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Twitter_Credentials = Load_Configuration() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: try: Authentication = tweepy.OAuthHandler(Twitter_Credentials[0], Twitter_Credentials[1]) Authentication.set_access_token(Twitter_Credentials[2], Twitter_Credentials[3]) API = tweepy.API(Authentication) General_Pull(Query, Limit, Directory, API, Task_ID) except: logging.info( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to get results. Are you connected to the internet?" ) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def __init__(self, Query_List, Task_ID, Limit=10): self.The_File_Extension = ".html" self.Plugin_Name = "RSS" self.Logging_Plugin_Name = self.Plugin_Name + " Feed Search" self.Result_Type = "News Report" self.Limit = General.Get_Limit(Limit) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Flickr" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "flickr.com" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "DuckDuckGo" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "duckduckgo.com" self.Result_Type = "Search Result" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Twitter" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "twitter.com" self.Result_Type = "Social Media - Page" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Reddit" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "reddit.com" self.Result_Type = "Forum" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Phishstats" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "phishstats.info" self.Result_Type = "Phishing" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Torrent" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".json" self.Domain = "thepiratebay.org" self.Result_Type = "Torrent" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Library Genesis" self.Concat_Plugin_Name = "libgen" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "gen.lib.rus.ec" self.Result_Type = "Publication" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "IntelligenceX" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "intelx.io" self.Result_Type = "Data Leakage" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Windows Store" self.Concat_Plugin_Name = "windowsstore" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "microsoft.com" self.Result_Type = "Application" self.Limit = General.Get_Limit(Limit)
def Search(Query_List, Task_ID, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Craigslist_Location = Load_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: Main_URL = f"https://{Craigslist_Location.lower()}.craigslist.org/search/sss?format=rss&query={Query}" Craigslist_Response = feedparser.parse(Main_URL) Craigslist_Items = Craigslist_Response["items"] Current_Step = 0 for Item in Craigslist_Items: Item_URL = Item["link"] if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(Limit): Craigslist_Response = requests.get(Item_URL).text Local_URL = f"https://{Craigslist_Location.lower()}.craigslist.org/" Local_Domain = f"{Craigslist_Location.lower()}.craigslist.org/" Filename = Item_URL.replace(Local_URL, "") Filename = Filename.replace(".html/", "") Filename = Filename.replace(".html", "") Filename = Filename.replace("/", "-") Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Craigslist_Response, Filename, The_File_Extension) if Output_file: Output_Connections = General.Connections(Query, Plugin_Name, Local_Domain, "Search Result", Task_ID, Plugin_Name.lower()) Output_Connections.Output([Output_file], Item_URL, General.Get_Title(Item_URL), Plugin_Name.lower()) Data_to_Cache.append(Item_URL) else: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") Current_Step += 1 if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Username-Search" self.Concat_Plugin_Name = "usernamesearch" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "usersearch.org" self.Result_Type = "Account" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Apple Store" self.Concat_Plugin_Name = "applestore" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "itunes.apple.com" self.Result_Type = "Application" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Type, Limit=10): self.Plugin_Name = "Blockchain" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "blockchain.com" self.Monero_Domain = "localmonero.co" self.Result_Type = "Blockchain Transaction" self.Type = Type self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Type, Limit=10): self.Plugin_Name = "UK Business" self.Concat_Plugin_Name = "ukbusiness" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "companieshouse.gov.uk" self.Result_Type = "Company Details" self.Type = Type self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Type, Limit=10): self.Plugin_Name = "Shodan" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "shodan.io" self.Result_Type = "Domain Information" self.Type = Type self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Vulners" self.Unacceptable_Bulletins = ["advertisement", "kitsploit"] self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "vulners.com" self.Result_Type = "Exploit" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Default Password" self.Concat_Plugin_Name = "defaultpassword" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "default-password.info" self.Result_Type = "Credentials" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Type, Limit=10): self.Plugin_Name = "Australian Business" self.Concat_Plugin_Name = "australianbusiness" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".html", "Query": ".html"} self.Domain = "abr.business.gov.au" self.Result_Type = "Company Details" self.Limit = General.Get_Limit(Limit) self.Type = Type
def Search(Query_List, Task_ID, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) YouTube_Details = Load_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: YouTube_Handler = discovery.build(YouTube_Details[1], YouTube_Details[2], developerKey=YouTube_Details[0], cache_discovery=False) if YouTube_Details[3] and YouTube_Details[4]: Search_Response = YouTube_Handler.search().list(q=Query, type='video', location=YouTube_Details[3], locationRadius=YouTube_Details[4], part='id,snippet', maxResults=Limit,).execute() else: Search_Response = YouTube_Handler.search().list(q=Query, type='video', part='id,snippet', maxResults=Limit,).execute() Main_File = General.Main_File_Create(Directory, Plugin_Name, json.dumps(Search_Response.get('items', []), indent=4, sort_keys=True), Query, The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, Plugin_Name, "youtube.com", "Social Media - Media", Task_ID, Plugin_Name.lower()) for Search_Result in Search_Response.get('items', []): Full_Video_URL = "https://www.youtube.com/watch?v=" + Search_Result['id']['videoId'] Search_Video_Response = requests.get(Full_Video_URL).text Title = "YouTube | " + Search_Result['snippet']['title'] if Full_Video_URL not in Cached_Data and Full_Video_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Video_Response, Search_Result['id']['videoId'], The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Full_Video_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(Full_Video_URL) else: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def __init__(self, Query_List, Task_ID, Type, Limit=10): self.Plugin_Name = "Have I Been Pwned" self.Concat_Plugin_Name = "haveibeenpwned" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".json" self.Domain = "haveibeenpwned.com" self.Result_Type_1 = "Account" self.Result_Type_2 = "Credentials" self.Type = Type self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Type, Limit=10): self.Plugin_Name = "Canadian Business" self.Concat_Plugin_Name = "canadianbusiness" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "beta.canadasbusinessregistries.ca" self.Result_Type = "Company Details" self.Limit = General.Get_Limit(Limit) self.Type = Type
def __init__(self, Query_List, Task_ID, Type, Limit=10): self.Plugin_Name = "NZ Business" self.Concat_Plugin_Name = "nzbusiness" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "app.companiesoffice.govt.nz" self.Result_Type = "Company Details" self.Type = Type self.Limit = General.Get_Limit(Limit)
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: Response = Common.Request_Handler('https://tpbc.herokuapp.com/search/' + Query.replace(" ", "+") + '/?sort=seeds_desc') JSON_Object = Common.JSON_Handler(Response) Response = JSON_Object.To_JSON_Loads() JSON_Response = JSON_Object.Dump_JSON() Output_file = General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, The_File_Extension) if Output_file: Current_Step = 0 Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Torrent", Task_ID, Plugin_Name.lower()) for Search_Result in Response: Result_Title = Search_Result["title"] Result_URL = Search_Result["magnet"] if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, JSON_Response, Result_Title, The_File_Extension) if Output_file: Output_Connections.Output([Output_file], Result_URL, General.Get_Title(Result_URL), Plugin_Name.lower()) Data_to_Cache.append(Result_URL) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") Current_Step += 1 Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def __init__(self, Query_List, Task_ID, Type, Limit=10): self.Plugin_Name = "Ahmia" self.Tor_Plugin_Name = "Ahmia-Tor" self.I2P_Plugin_Name = "Ahmia-I2P" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Tor_Pull_URL = "" self.I2P_Pull_URL = "" self.Domain = "ahmia.fi" self.Tor_General_URL = f"https://{self.Domain}/search/?q=" self.I2P_General_URL = f"https://{self.Domain}/search/i2p/?q=" self.Tor_Scrape_Regex_URL = r"(http\:\/\/[\d\w]+\.onion(?:\/[\/\.\-\?\=\%\d\w]+)?)" self.I2P_Scrape_Regex_URL = r"(http\:\/\/[\d\w]+\.i2p(?:\/[\/\.\-\?\=\%\d\w]+)?)" self.Result_Type = "Darkweb Link" self.Limit = General.Get_Limit(Limit) self.Type = Type
def Search(Query_List, Task_ID, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) try: File_Dir = os.path.dirname(os.path.realpath('__file__')) Configuration_File = os.path.join( File_Dir, 'plugins/common/config/RSS_Feeds.txt') Current_File = open( Configuration_File, "r" ) # Open the provided file and retrieve each client to test. URLs = Current_File.read().splitlines() Current_File.close() except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Please provide a valid file, failed to open the file which contains the data to search for." ) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: for URL in URLs: # URLs to be controlled by the web app. RSS = feedparser.parse(URL) Current_Step = 0 for Feed in RSS.entries: if Query in Feed.description: Dump_Types = General.Data_Type_Discovery( Feed.description) File_Link = Feed.link.replace("https://", "") File_Link = File_Link.replace("http://", "") File_Link = File_Link.replace("www.", "") File_Link = File_Link.replace("/", "-") Domain = URL.replace("https://", "") Domain = Domain.replace("http://", "") Domain = Domain.replace("www.", "") if Feed.link not in Cached_Data and Feed.link not in Data_to_Cache and Current_Step < int( Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Feed.description, File_Link, The_File_Extension) Title = "RSS Feed | " + General.Get_Title( Feed.link) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, Domain, "News Report", Task_ID, Plugin_Name.lower()) Output_Connections.Output( [Output_file], Feed.link, Title, Plugin_Name.lower(), Dump_Types=Dump_Types) Data_to_Cache.append(Feed.link) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.info( f"{General.Date()} - {__name__.strip('plugins.')} - Query not found." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Location = Connectors.Load_Location_Configuration() Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: try: Request_Query = urllib.parse.quote(Query) Main_URL = f"http://{Domain}/search?term={Request_Query}&country={Location}&entity=software&limit={str(Limit)}" Response = Common.Request_Handler(Main_URL) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request, are you connected to the internet?" ) break JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Object.Dump_JSON(), Query, The_File_Extensions["Main"]) if 'resultCount' in JSON_Response: if JSON_Response['resultCount'] > 0: Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Application", Task_ID, Concat_Plugin_Name) for JSON_Object in JSON_Response['results']: JSON_Object_Responses = Common.Request_Handler( JSON_Object['artistViewUrl'], Filter=True, Host=f"https://{Domain}") JSON_Object_Response = JSON_Object_Responses[ "Filtered"] if JSON_Object[ 'artistViewUrl'] not in Cached_Data and JSON_Object[ 'artistViewUrl'] not in Data_to_Cache: Apple_Store_Regex = Common.Regex_Handler( JSON_Object['artistViewUrl'], Custom_Regex=r"https\:\/\/apps\.apple\.com\/" + rf"{Location}" + r"\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?.+") if Apple_Store_Regex: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, JSON_Object_Response, Apple_Store_Regex.group(1), The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], JSON_Object['artistViewUrl'], General.Get_Title( JSON_Object['artistViewUrl']), Concat_Plugin_Name) Data_to_Cache.append( JSON_Object['artistViewUrl']) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value provided, value not greater than 0." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: if Type == "CBN": Main_API_URL = f'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B{Query}%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc' Response = Common.Request_Handler(Main_API_URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() Indented_JSON_Response = JSON_Object.Dump_JSON() Main_Output_File = General.Main_File_Create( Directory, Plugin_Name, Indented_JSON_Response, Query, The_File_Extensions["Main"]) try: if JSON_Response['count'] != 0: Query = str(int(Query)) Main_URL = f'https://{Domain}/search/results?search=%7B{Query}%7D&status=Active' Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://{Domain}") Response = Responses["Filtered"] if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, General.Get_Title(Main_URL), The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, Domain.strip("beta."), "Company Details", Task_ID, Plugin_Name) Output_Connections.Output( [Main_Output_File, Output_file], Main_URL, f"Canadian Business Number {Query}", Concat_Plugin_Name) Data_to_Cache.append(Main_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CBN Search." ) elif Type == "CCN": Total_Results = 0 Iterator = "page=0" while Limit > Total_Results and Iterator is not None: Main_URL = 'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B' + urllib.parse.quote( Query ) + f'%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc&{Iterator}' Response = Common.Request_Handler(Main_URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() Total_Results += len(JSON_Response["docs"]) if "paging" in JSON_Response and "next" in JSON_Response.get( "paging"): Iterator = JSON_Response["paging"]["next"] else: Iterator = None Indented_JSON_Response = JSON_Object.Dump_JSON() Limit = General.Get_Limit(Limit) try: Main_File = General.Main_File_Create( Directory, Plugin_Name, Indented_JSON_Response, Query, The_File_Extensions["Main"]) Current_Step = 0 Output_Connections = General.Connections( Query, Plugin_Name, Domain.strip("beta."), "Company Details", Task_ID, Plugin_Name) for JSON_Item in JSON_Response['docs']: if JSON_Item.get('BN'): CCN = JSON_Item['Company_Name'] CBN = str(int(JSON_Item['BN'])) Full_CCN_URL = f'https://{Domain}/search/results?search=%7B{CBN}%7D&status=Active' if Full_CCN_URL not in Cached_Data and Full_CCN_URL not in Data_to_Cache and Current_Step < int( Limit): Current_Responses = Common.Request_Handler( Full_CCN_URL, Filter=True, Host=f"https://{Domain}") Current_Response = Current_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, str(Current_Response), CCN.replace(' ', '-'), The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Full_CCN_URL, f"Canadian Business Number {CBN} for Query {Query}", Concat_Plugin_Name) Data_to_Cache.append(Full_CCN_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Unable to retrieve business numbers from the JSON response." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CCN Search." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid request type." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) IX_Access_Token = Load_Configuration() Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: Data = {"term": Query, "buckets": [], "lookuplevel": 0, "maxresults": Limit, "timeout": 0, "datefrom": "", "dateto": "", "sort": 2, "media": 0, "terminate": []} IX_Response = Common.Request_Handler(f"https://2.{Domain}/intelligent/search?k={IX_Access_Token}", Method="POST", JSON_Data=Data) JSON_Object = Common.JSON_Handler(IX_Response) JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Main_File_1 = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query + "-Request-1", The_File_Extensions["Main"]) if "id" in JSON_Response: Search_ID = JSON_Response["id"] IX_Response = Common.Request_Handler(f"https://2.{Domain}/intelligent/search/result?k={IX_Access_Token}&id={Search_ID}") JSON_Object = Common.JSON_Handler(IX_Response) JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Main_File_2 = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query + "-Request-2", The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Data Leakage", Task_ID, Plugin_Name.lower()) if "records" in JSON_Response: for IX_Item in JSON_Response["records"]: if "systemid" in IX_Item and "name" in IX_Item: IX_URL = f"https://{Domain}/?did=" + IX_Item['systemid'] if IX_Item["name"] != "": Title = f"IntelligenceX Data Leak | " + IX_Item["name"] else: TItle = "IntelligenceX Data Leak | Untitled Document" if IX_URL not in Cached_Data and IX_URL not in Data_to_Cache: IX_Item_Responses = Common.Request_Handler(IX_URL, Filter=True, Host=f"https://{Domain}") IX_Item_Response = IX_Item_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, IX_Item_Response, IX_URL, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File_1, Main_File_2, Output_file], IX_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(IX_URL) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - No results found.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")