def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) for Query in Query_List: if Common.Regex_Handler(Query, Type="IP"): API_Key = Load_Configuration() Search_Response = Common.Request_Handler( f"http://api.{Domain}/{Query}?access_key={API_Key}") JSON_Object = Common.JSON_Handler(Search_Response) JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Output_Connections = General.Connections( Query, Plugin_Name, Domain, "IP Address Information", Task_ID, Plugin_Name.lower()) if Query not in Cached_Data and Query not in Data_to_Cache: Result_URL = f"https://{Domain}/?{Query}" Title = f"IP Stack | {Query}" Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, JSON_Output_Response, Title, The_File_Extensions["Main"]) HTML_Output_File_Data = General.JSONDict_to_HTML( JSON_Response, JSON_Output_Response, f"IPStack Query {Query}") HTML_Output_File = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, HTML_Output_File_Data, Title.replace(" ", "-"), The_File_Extensions["Main_Converted"]) if Output_file: Output_Connections.Output([Output_file], Result_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: BSB_Search_URL = f"https://www.{self.Domain}/{Query}.html" Responses = Common.Request_Handler( BSB_Search_URL, Filter=True, Host=f"https://www.{self.Domain}") Response = Responses["Filtered"] Error_Regex = Common.Regex_Handler( Response, Custom_Regex=r"Correct\sthe\sfollowing\serrors") Output_Connections = General.Connections( Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) if not Error_Regex: if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, self.Plugin_Name, Response, Query, self.The_File_Extension) if Output_file: Output_Connections.Output( [Output_file], BSB_Search_URL, General.Get_Title(BSB_Search_URL), self.Plugin_Name.lower()) Data_to_Cache.append(BSB_Search_URL) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - Query returned error, probably does not exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: if Common.Regex_Handler(Query, Type="Email"): Link = f"https://{self.Domain}/home/verify-as-guest/{Query}" JSON_Response = Common.Request_Handler(Link) JSON_Object = Common.JSON_Handler(JSON_Response) if JSON_Object.Is_JSON(): JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Table_JSON = {} for Key, Value in JSON_Response.items(): if Key != "response": Table_JSON[Key] = Value else: for Det_Key, Det_Val in JSON_Response["response"].items(): Table_JSON[Det_Key] = Det_Val Filter_JSON = [Table_JSON] Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) if Query not in Cached_Data and Query not in Data_to_Cache: Title = f"Email Verification | {Query}" Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Concat_Plugin_Name, JSON_Output_Response, Title, self.The_File_Extensions["Main"]) HTML_Output_File_Data = General.JSONDict_to_HTML(Filter_JSON, JSON_Output_Response, f"Email Verification Query {Query}") HTML_Output_File = General.Create_Query_Results_Output_File(Directory, Query, self.Concat_Plugin_Name, HTML_Output_File_Data, Title, self.The_File_Extensions["Main_Converted"]) if Output_file and HTML_Output_File: Output_Connections.Output([Output_file, HTML_Output_File], Link, Title, self.Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def General_Pull(self, Handle, Directory, API): try: Data_to_Cache = [] JSON_Response = [] Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Latest_Tweets = API.user_timeline(screen_name=Handle, count=self.Limit) for Tweet in Latest_Tweets: try: JSON_Response.append({ 'id': Tweet.id, 'text': Tweet.text, 'author_name': Tweet.user.screen_name, 'url': Tweet.entities['urls'][0]["expanded_url"] }) except: JSON_Response.append({ 'id': Tweet.id, 'text': Tweet.text, 'author_name': Tweet.user.screen_name }) JSON_Output = Common.JSON_Handler(JSON_Response).Dump_JSON() Output_Connections = General.Connections(Handle, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower()) Main_File = General.Main_File_Create(Directory, self.Plugin_Name, JSON_Output, Handle, self.The_File_Extensions["Main"]) for JSON_Item in JSON_Response: if all(Item in JSON_Item for Item in ['id', 'url', 'text']): Link = JSON_Item['url'] if Link not in Cached_Data and Link not in Data_to_Cache: Title = "Twitter | " + JSON_Item['text'] Item_Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://{self.Domain}") Item_Response = Item_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File(Directory, Handle, self.Plugin_Name, Item_Response, str(JSON_Item['id']), self.The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Link, Title, self.Plugin_Name.lower()) Data_to_Cache.append(Link) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Output file not returned.") else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Insufficient parameters provided.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Convert_to_JSON(Data): Data = str(Data) Flickr_Regex = Common.Regex_Handler(Data, Custom_Regex=r"\[(.+)\]") if Flickr_Regex: New_Data = Flickr_Regex.group(1).replace("...", "").replace("id=b", "'id': ").replace("title=b", "'title': ").replace("(", "{").replace(")", "}").replace("\'}", "}").replace("}", "\'}") New_Data = New_Data.replace("Photo", "") New_Data = f"[{New_Data}]" New_Data = eval(New_Data) JSON_Object = Common.JSON_Handler(New_Data) New_Data = JSON_Object.Dump_JSON() return New_Data
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Location = self.Load_Configuration() Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: Main_URL = f"https://www.{self.Domain}/en-{Location}/search?q={Query}" Win_Store_Response = Common.Request_Handler(Main_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True) Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Win_Store_Response, Query, self.The_File_Extension) Win_Store_Regex = Common.Regex_Handler(Win_Store_Response, Custom_Regex=r"\/en\-au\/p\/([\w\-]+)\/([\w\d]+)", Findall=True) Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) if Win_Store_Regex: Current_Step = 0 for Regex_Group_1, Regex_Group_2 in Win_Store_Regex: Item_URL = f"https://www.microsoft.com/en-au/p/{Regex_Group_1}/{Regex_Group_2}" Win_Store_Responses = Common.Request_Handler(Item_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}") Win_Store_Response = Win_Store_Responses["Filtered"] Title = "Windows Store | " + General.Get_Title(Item_URL) if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(self.Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Win_Store_Response, Regex_Group_1, self.The_File_Extension) if Output_file: Output_Connections.Output([Main_File, Output_file], Item_URL, Title, self.Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Current_Step += 1 else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Search(self): try: Data_to_Cache = [] Directory = General.Make_Directory(self.Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, self.Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, self.Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() for Query in self.Query_List: # Query can be Title or ISBN Main_URL = f"http://{self.Domain}/search.php?req={Query}&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def" Lib_Gen_Response = Common.Request_Handler(Main_URL) Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Lib_Gen_Response, Query, self.The_File_Extension) Lib_Gen_Regex = Common.Regex_Handler(Lib_Gen_Response, Custom_Regex=r"book\/index\.php\?md5=[A-Fa-f0-9]{32}", Findall=True) if Lib_Gen_Regex: Current_Step = 0 for Regex in Lib_Gen_Regex: Item_URL = f"http://{self.Domain}/{Regex}" Title = General.Get_Title(Item_URL).replace("Genesis:", "Genesis |") Lib_Item_Responses = Common.Request_Handler(Item_URL, Filter=True, Host=f"http://{self.Domain}") Lib_Item_Response = Lib_Item_Responses["Filtered"] if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(self.Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Lib_Item_Response, Regex, self.The_File_Extension) if Output_file: Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name) Output_Connections.Output([Main_File, Output_file], Item_URL, Title, self.Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.") Current_Step += 1 else: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
def Data_Type_Discovery(Data_to_Search): # Function responsible for determining the type of data found. Examples: Hash_Type, Credentials, Email, or URL. try: Dump_Types = [] Hash_Types = ["MD5", "SHA1", "SHA256"] Hash_Type_Dict = {} for Hash_Type in Hash_Types: Hash_Type_Dict[Hash_Type] = Common.Regex_Handler(Data_to_Search, Type=Hash_Type) for Hash_Key, Hash_Value in Hash_Type_Dict.items( ): # Hash_Type identification if Hash_Value: Hash_Type_Line = f"{Hash_Key} hash" if not Hash_Type_Line in Dump_Types: Dump_Types.append(Hash_Type_Line) else: pass if Common.Regex_Handler( Data_to_Search, Type="Credentials"): # Credentials identification if not "Credentials" in Dump_Types: Dump_Types.append("Credentials") else: if Common.Regex_Handler(Data_to_Search, Type="Email"): # Email Identification if not "Email" in Dump_Types: Dump_Types.append("Email") if Common.Regex_Handler(Data_to_Search, Type="URL"): # URL Indentification if not "URL" in Dump_Types: Dump_Types.append("URL") return Dump_Types except: logging.warning( f"{Common.Date()} - General Library - Failed to determine data type." )
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) for Query in Query_List: Headers_Custom = {"Referer": f"https://www.doingbusiness.org/en/data/exploreeconomies/{Query}"} Main_URL = f"https://wbgindicatorsqa.azure-api.net/DoingBusiness/api/GetEconomyByURL/{Query}" Doing_Business_Response = Common.Request_Handler(Main_URL, Optional_Headers=Headers_Custom) JSON_Object = Common.JSON_Handler(Doing_Business_Response) JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() if 'message' not in JSON_Response: Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Item_URL = f"https://www.{Domain}/en/data/exploreeconomies/{Query}" Title = f"Doing Business | {Query}" Current_Doing_Business_Responses = Common.Request_Handler(Item_URL, Filter=True, Host=f"https://www.{Domain}") Current_Doing_Business_Response = Current_Doing_Business_Responses["Filtered"] if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Current_Doing_Business_Response, Query, The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Economic Details", Task_ID, Concat_Plugin_Name) Output_Connections.Output([Main_File, Output_file], Item_URL, Title, Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression.") Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: Response = Common.Request_Handler('https://tpbc.herokuapp.com/search/' + Query.replace(" ", "+") + '/?sort=seeds_desc') JSON_Object = Common.JSON_Handler(Response) Response = JSON_Object.To_JSON_Loads() JSON_Response = JSON_Object.Dump_JSON() Output_file = General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, The_File_Extension) if Output_file: Current_Step = 0 Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Torrent", Task_ID, Plugin_Name.lower()) for Search_Result in Response: Result_Title = Search_Result["title"] Result_URL = Search_Result["magnet"] if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, JSON_Response, Result_Title, The_File_Extension) if Output_file: Output_Connections.Output([Output_file], Result_URL, General.Get_Title(Result_URL), Plugin_Name.lower()) Data_to_Cache.append(Result_URL) else: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") Current_Step += 1 Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def CSV_to_JSON(Query, CSV_Data): try: if type(CSV_Data) == list: JSON_Data = {Query: []} for CSV_Line in CSV_Data: if CSV_Line != CSV_Data[0]: Split_CSV_Line = CSV_Line.split(",") JSON_Data[Query].append({ "Domain": Split_CSV_Line[0], "IP Address": Split_CSV_Line[1] }) Indented_Registration_Response = Common.JSON_Handler( JSON_Data).Dump_JSON() return Indented_Registration_Response else: return None except: logging.warning( f"{Common.Date()} - General Library - Failed to convert provided CSV data to JSON." )
def Get_Title(URL, Requests=False): try: if URL.startswith('http://') or URL.startswith('https://'): if 'file:/' not in URL: if Requests: Soup = BeautifulSoup(Common.Request_Handler(URL), features="lxml") else: Soup = BeautifulSoup(urllib.request.urlopen(URL), features="lxml") return Soup.title.text else: logging.warning( f"{Common.Date()} - General Library - This function does not work on files." ) else: logging.warning( f"{Common.Date()} - General Library - Invalid URL provided.") except: logging.warning( f"{Common.Date()} - General Library - Failed to get title.")
def Make_Directory(Plugin_Name): Today = Common.Date(Full_Timestamp=True) Year = str(Today.year) Month = str(Today.month) Day = str(Today.day) if len(Month) == 1: Month = f"0{Month}" if len(Day) == 1: Day = f"0{Day}" File_Path = os.path.dirname(os.path.realpath('__file__')) Directory = f"{File_Path}/static/protected/output/{Plugin_Name}/{Year}/{Month}/{Day}" if not os.path.isdir(Directory): os.makedirs(Directory) logging.info( f"{Common.Date()} - General Library - Using new directory: {Directory}." ) else: logging.info( f"{Common.Date()} - General Library - Using existing directory: {Directory}." ) return Directory
def __init__(self, Directory, Plugin_Name): Cache_File = f"{Plugin_Name}-cache.txt" General_Directory_Search = Common.Regex_Handler( Directory, Custom_Regex=r"(.*)\/\d{4}\/\d{2}\/\d{2}") if General_Directory_Search: self.Complete_File = os.path.join( General_Directory_Search.group(1), Cache_File)
def Load_Configuration(self): logging.info(f"{Common.Date()} - {self.Logging_Plugin_Name} - Loading configuration data.") Result = Common.Configuration(Input=True).Load_Configuration(Location=True, Object="general", Details_to_Load=["location"]) if Result: return Result else: return None
def Load_Configuration(self): logging.info(f"{Common.Date()} - {self.Logging_Plugin_Name} - Loading configuration data.") Result = Common.Configuration(Input=True).Load_Configuration(Object=self.Plugin_Name.lower(), Details_to_Load=["api_key"]) if Result: return Result else: return None
def Load_Configuration(self): logging.info(f"{Common.Date()} - {self.Logging_Plugin_Name} - Loading configuration data.") Result = Common.Configuration(Input=True).Load_Configuration(Object=self.Concat_Plugin_Name, Details_to_Load=["api_key"]) if Result: return base64.b64encode(Result.encode('ascii')) else: return None
def Create_Event(self, Description): try: self.Cursor.execute( "INSERT INTO events (description, created_at) VALUES (%s,%s)", (Description, Common.Date())) self.Connection.commit() except Exception as e: logging.error(f"{Common.Date()} - General Library - {str(e)}.")
def Load_Configuration(): logging.info(f"{Common.Date()} - {__name__.strip('plugins.')} - Loading configuration data.") Connector_Object = Common.Configuration(Input=True) Result = Connector_Object.Load_Configuration(Object=Plugin_Name.lower(), Details_to_Load=["api_key"]) if Result: return Result else: return None
def Load_Configuration(): logging.info(f"{Common.Date()} - {__name__.strip('plugins.')} - Loading configuration data.") Connector_Object = Common.Configuration(Input=True) Result = Connector_Object.Load_Configuration(Object=Concat_Plugin_Name, Details_to_Load=["api_key"]) if Result: return base64.b64encode(Result.encode('ascii')) else: return None
def __init__(self, File_Path, Internally_Requested=False, **kwargs): self.Internally_Requested = Internally_Requested self.Chrome_Config = Common.Configuration( Core=True).Load_Configuration( Object="google_chrome", Details_to_Load=["application_path", "chromedriver_path"]) self.File_Path = File_Path self.Connection = Common.Configuration(Output=True).Load_Configuration( Postgres_Database=True, Object="postgresql") self.Cursor = self.Connection.cursor() if not self.Internally_Requested and kwargs.get( 'Screenshot_ID') and kwargs.get('Screenshot_User'): self.Screenshot_ID = kwargs['Screenshot_ID'] self.Screenshot_User = kwargs['Screenshot_User'] elif self.Internally_Requested and kwargs.get('Screenshot_Link'): self.Screenshot_ID = False self.Screenshot_User = False self.Screenshot_Link = kwargs['Screenshot_Link']
def Load_Configuration(): logging.info( f"{Common.Date()} - {__name__.strip('plugins.')} - Loading configuration data." ) try: with open(Common.Set_Configuration_File()) as JSON_File: JSON_Object = Common.JSON_Handler(JSON_File) Configuration_Data = JSON_Object.To_JSON_Load() Pinterest_Details = Configuration_Data["inputs"][ Plugin_Name.lower()] if Pinterest_Details['oauth_token']: return Pinterest_Details['oauth_token'] else: return None except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to load location details." )
def Logging(Directory, Plugin_Name): try: Main_File = f"{Plugin_Name}-log-file.log" General_Directory_Search = Common.Regex_Handler( Directory, Custom_Regex=r"(.*)\/\d{4}\/\d{2}\/\d{2}") if General_Directory_Search: Complete_File = os.path.join(General_Directory_Search.group(1), Main_File) return Complete_File except: logging.warning( f"{Common.Date()} - General Library - Failed to initialise logging." )
def Load_Configuration(self): logging.info( f"{Common.Date()} - {self.Logging_Plugin_Name} - Loading configuration data." ) Result = Common.Configuration(Input=True).Load_Configuration( Object=self.Plugin_Name.lower(), Details_to_Load=[ "client_id", "client_secret", "user_agent", "username", "password", "subreddits" ]) if Result: return Result else: return None
def Load_Configuration(): logging.info( f"{Common.Date()} - {__name__.strip('plugins.')} - Loading configuration data." ) Connector_Object = Common.Configuration(Input=True) Result = Connector_Object.Load_Configuration(Object=Plugin_Name.lower(), Details_to_Load=[ "client_id", "client_secret", "user_agent", "username", "password", "subreddits" ]) if Result: return Result else: return None
def Call_Plugin(self): Object = Common.Configuration(Output=True) self.Starter(Object) try: Plugin = plugin_verifier.Plugin_Verifier( self.plugin_name, self.task_id, self.query, self.limit).Verify_Plugin() if Plugin and all(Item in Plugin for Item in ["Object", "Search Option", "Function Kwargs"]): getattr(Plugin["Object"], Plugin["Search Option"])(**Plugin["Function Kwargs"]) else: print( f"{Common.Date()} - Plugin Caller - Failed to start plugin." ) except Exception as e: print(f"{Common.Date()} - Plugin Caller - {str(e)}") finally: self.Stopper(Object)
def Search(Query_List, Task_ID, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Location = Connectors.Load_Location_Configuration() Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: try: Request_Query = urllib.parse.quote(Query) Main_URL = f"http://{Domain}/search?term={Request_Query}&country={Location}&entity=software&limit={str(Limit)}" Response = Common.Request_Handler(Main_URL) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request, are you connected to the internet?" ) break JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Object.Dump_JSON(), Query, The_File_Extensions["Main"]) if 'resultCount' in JSON_Response: if JSON_Response['resultCount'] > 0: Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Application", Task_ID, Concat_Plugin_Name) for JSON_Object in JSON_Response['results']: JSON_Object_Responses = Common.Request_Handler( JSON_Object['artistViewUrl'], Filter=True, Host=f"https://{Domain}") JSON_Object_Response = JSON_Object_Responses[ "Filtered"] if JSON_Object[ 'artistViewUrl'] not in Cached_Data and JSON_Object[ 'artistViewUrl'] not in Data_to_Cache: Apple_Store_Regex = Common.Regex_Handler( JSON_Object['artistViewUrl'], Custom_Regex=r"https\:\/\/apps\.apple\.com\/" + rf"{Location}" + r"\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?.+") if Apple_Store_Regex: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, JSON_Object_Response, Apple_Store_Regex.group(1), The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], JSON_Object['artistViewUrl'], General.Get_Title( JSON_Object['artistViewUrl']), Concat_Plugin_Name) Data_to_Cache.append( JSON_Object['artistViewUrl']) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value provided, value not greater than 0." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Call_Plugin(self): Object = Common.Configuration(Output=True) Starter(Object, self.task_id) Plugins_Dictionary = { "YouTube Search": { "Module": "plugins.YouTube_Search", "Limit": True }, "Yandex Search": { "Module": "plugins.Yandex_Search", "Limit": True }, "Windows Store Search": { "Module": "plugins.Windows_Store_Search", "Limit": True }, "Vulners Search": { "Module": "plugins.Vulners_Search", "Limit": True }, "Virus Total Search - Domain": { "Module": "plugins.Virus_Total_Search", "Type": "Domain" }, "Virus Total Search - URL": { "Module": "plugins.Virus_Total_Search", "Type": "URL" }, "Virus Total Search - IP Address": { "Module": "plugins.Virus_Total_Search", "Type": "IP" }, "Virus Total Search - File Hash": { "Module": "plugins.Virus_Total_Search", "Type": "Hash" }, "Vkontakte - User Search": { "Module": "plugins.Vkontakte_Search", "Type": "User", "Limit": True }, "Vkontakte - Group Search": { "Module": "plugins.Vkontakte_Search", "Type": "Group", "Limit": True }, "Vehicle Registration Search": { "Module": "plugins.Vehicle_Registration_Search" }, "Username Search": { "Module": "plugins.Username_Search" }, "Twitter Search": { "Module": "plugins.Twitter_Search", "Limit": True }, "Tumblr Search": { "Module": "plugins.Tumblr_Search" }, "Torrent Search": { "Module": "plugins.Torrent_Search", "Limit": True }, "Threat Crowd - Virus Report Search": { "Module": "plugins.Threat_Crowd_Search", "Type": "Virus Report" }, "Threat Crowd - IP Address Search": { "Module": "plugins.Threat_Crowd_Search", "Type": "IP Address" }, "Threat Crowd - Email Search": { "Module": "plugins.Threat_Crowd_Search", "Type": "Email" }, "Threat Crowd - Domain Search": { "Module": "plugins.Threat_Crowd_Search", "Type": "Domain" }, "Threat Crowd - Antivirus Search": { "Module": "plugins.Threat_Crowd_Search", "Type": "AV" }, "Shodan Search - Query": { "Module": "plugins.Shodan_Search", "Type": "Search", "Limit": True }, "Shodan Search - IP Address": { "Module": "plugins.Shodan_Search", "Type": "Host" }, "RSS Feed Search": { "Module": "plugins.RSS_Feed_Search", "Limit": True }, "Reddit Search": { "Module": "plugins.Reddit_Search", "Limit": True }, "Phone Search - SIM Number": { "Module": "plugins.Phone_Search", "Type": "SIM" }, "Phone Search - ISPC Number": { "Module": "plugins.Phone_Search", "Type": "ISPC" }, "Phone Search - IMSI Number": { "Module": "plugins.Phone_Search", "Type": "IMSI" }, "Phone Search - IMEI Number": { "Module": "plugins.Phone_Search", "Type": "IMEI" }, "Phone Search - Cellular Number": { "Module": "plugins.Phone_Search", "Type": "Number" }, "Phishstats Search": { "Module": "plugins.Phishstats_Search", "Limit": True }, "Pinterest - Pin Search": { "Module": "plugins.Pinterest_Search", "Type": "pin", "Limit": True }, "Pinterest - Board Search": { "Module": "plugins.Pinterest_Search", "Type": "board", "Limit": True }, "OK Search - User": { "Module": "plugins.OK_Search", "Type": "User" }, "OK Search - Group": { "Module": "plugins.OK_Search", "Type": "Group" }, "Naver Search": { "Module": "plugins.Naver_Search", "Limit": True }, "Library Genesis Search": { "Module": "plugins.Library_Genesis_Search", "Limit": True }, "Kik Search": { "Module": "plugins.Kik_Search" }, "IP Stack Search": { "Module": "plugins.IPStack_Search" }, "IntelligenceX Search": { "Module": "plugins.IntelligenceX_Search", "Limit": True }, "Instagram - User Search": { "Module": "plugins.Instagram_Search", "Type": "User", "Limit": True }, "Instagram - Tag Search": { "Module": "plugins.Instagram_Search", "Type": "Tag", "Limit": True }, "Instagram - Post Search": { "Module": "plugins.Instagram_Search", "Type": "Post" }, "Hunter Search - Email": { "Module": "plugins.Hunter_Search", "Type": "Email", "Limit": True }, "Hunter Search - Domain": { "Module": "plugins.Hunter_Search", "Type": "Domain", "Limit": True }, "Have I Been Pwned - Password Search": { "Module": "plugins.Have_I_Been_Pwned", "Type": "password" }, "Have I Been Pwned - Email Search": { "Module": "plugins.Have_I_Been_Pwned", "Type": "email" }, "Have I Been Pwned - Breach Search": { "Module": "plugins.Have_I_Been_Pwned", "Type": "breach" }, "Have I Been Pwned - Account Search": { "Module": "plugins.Have_I_Been_Pwned", "Type": "account" }, "Greynoise IP Search": { "Module": "plugins.Greynoise_IP_Search" }, "Google Search": { "Module": "plugins.Google_Search", "Limit": True }, "Google Play Store Search": { "Module": "plugins.Google_Play_Store_Search", "Limit": True }, "Flickr Search": { "Module": "plugins.Flickr_Search", "Limit": True }, "Email Verification Search": { "Module": "plugins.Email_Verification_Search" }, "Email Reputation Search": { "Module": "plugins.Email_Reputation_Search" }, "Ebay Search": { "Module": "plugins.Ebay_Search", "Limit": True }, "Domain Fuzzer - Regular Domain Suffixes": { "Module": "plugins.Domain_Fuzzer", "Custom_Search": "Regular_Extensions", "Is_Object": True }, "Domain Fuzzer - Global Domain Suffixes": { "Module": "plugins.Domain_Fuzzer", "Custom_Search": "Global_Extensions", "Is_Object": True }, "Domain Fuzzer - Punycode (Latin Comprehensive)": { "Module": "plugins.Domain_Fuzzer", "Custom_Search": "Character_Switch", "Is_Object": True, "Alphabets": "Latin", "Comprehensive": True }, "Domain Fuzzer - Punycode (Latin Condensed)": { "Module": "plugins.Domain_Fuzzer", "Custom_Search": "Character_Switch", "Is_Object": True, "Alphabets": "Latin" }, "Domain Fuzzer - Punycode (Asian)": { "Module": "plugins.Domain_Fuzzer", "Custom_Search": "Character_Switch", "Is_Object": True, "Alphabets": "Asian" }, "Domain Fuzzer - Punycode (Middle Eastern)": { "Module": "plugins.Domain_Fuzzer", "Custom_Search": "Character_Switch", "Is_Object": True, "Alphabets": "Middle Eastern" }, "Domain Fuzzer - Punycode (North African)": { "Module": "plugins.Domain_Fuzzer", "Custom_Search": "Character_Switch", "Is_Object": True, "Alphabets": "North African" }, "Domain Fuzzer - Punycode (Native American)": { "Module": "plugins.Domain_Fuzzer", "Custom_Search": "Character_Switch", "Is_Object": True, "Alphabets": "Native American" }, "Domain Fuzzer - All Extensions": { "Module": "plugins.Domain_Fuzzer", "Custom_Search": "All_Extensions", "Is_Object": True }, "Doing Business Search": { "Module": "plugins.Doing_Business_Search" }, "DNS Reconnaissance Search": { "Module": "plugins.DNS_Recon_Search" }, "Default Password Search": { "Module": "plugins.Default_Password_Search", "Limit": True }, "DuckDuckGo Search": { "Module": "plugins.DuckDuckGo_Search", "Limit": True }, "Craigslist Search": { "Module": "plugins.Craigslist_Search", "Limit": True }, "Certificate Transparency - SSLMate": { "Module": "plugins.Certificate_Transparency" }, "Certificate Transparency - CRT.sh": { "Module": "plugins.Certificate_Transparency_CRT" }, "Builtwith Search": { "Module": "plugins.BuiltWith_Search" }, "Business Search - United Kingdom Business Number": { "Module": "plugins.UK_Business_Search", "Type": "UKBN" }, "Business Search - United Kingdom Company Name": { "Module": "plugins.UK_Business_Search", "Type": "UKCN", "Limit": True }, "Business Search - New Zealand Business Number": { "Module": "plugins.NZ_Business_Search", "Type": "NZBN" }, "Business Search - New Zealand Company Name": { "Module": "plugins.NZ_Business_Search", "Type": "NZCN", "Limit": True }, "Business Search - Canadian Business Number": { "Module": "plugins.Canadian_Business_Search", "Type": "CBN" }, "Business Search - Canadian Company Name": { "Module": "plugins.Canadian_Business_Search", "Type": "CCN", "Limit": True }, "Business Search - Australian Business Number": { "Module": "plugins.Australian_Business_Search", "Type": "ABN" }, "Business Search - Australian Company Name": { "Module": "plugins.Australian_Business_Search", "Type": "ACN", "Limit": True }, "Business Search - American Central Index Key": { "Module": "plugins.American_Business_Search", "Type": "CIK" }, "Business Search - American Company Name": { "Module": "plugins.American_Business_Search", "Type": "ACN", "Limit": True }, "BSB Search": { "Module": "plugins.BSB_Search" }, "Blockchain - Monero Transaction Search": { "Module": "plugins.Blockchain_Search", "Custom_Search": "Transaction_Search", "Type": "monero" }, "Blockchain - Ethereum Transaction Search": { "Module": "plugins.Blockchain_Search", "Custom_Search": "Transaction_Search", "Type": "eth" }, "Blockchain - Bitcoin Cash Transaction Search": { "Module": "plugins.Blockchain_Search", "Custom_Search": "Transaction_Search", "Type": "bch" }, "Blockchain - Bitcoin Transaction Search": { "Module": "plugins.Blockchain_Search", "Custom_Search": "Transaction_Search", "Type": "btc" }, "Blockchain - Ethereum Address Search": { "Module": "plugins.Blockchain_Search", "Custom_Search": "Address_Search", "Type": "eth" }, "Blockchain - Bitcoin Cash Address Search": { "Module": "plugins.Blockchain_Search", "Custom_Search": "Address_Search", "Type": "bch" }, "Blockchain - Bitcoin Address Search": { "Module": "plugins.Blockchain_Search", "Custom_Search": "Address_Search", "Type": "btc" }, "Apple Store Search": { "Module": "plugins.Apple_Store_Search", "Limit": True }, "Ahmia Darkweb Search": { "Module": "plugins.Ahmia_Darkweb_Search", "Limit": True } } try: if self.plugin_name in Plugins_Dictionary: Dict_Item = Plugins_Dictionary[self.plugin_name] Kwargs = {} for Key in ["Limit", "Type", "Alphabets", "Comprehensive"]: if Key in Dict_Item and Key != "Limit": Kwargs[Key] = Dict_Item[Key] elif Key in Dict_Item and Key == "Limit": Kwargs[Key] = self.limit if "Is_Object" not in Dict_Item: def Inner_Function_Call(self, To_Import, Search_Option, **kwargs): Plugin = importlib.import_module(To_Import) getattr(Plugin, Search_Option)(self.query, self.task_id, **kwargs) if "Custom_Search" in Dict_Item: Search_Option = Dict_Item["Custom_Search"] else: Search_Option = "Search" Inner_Function_Call(self, Dict_Item["Module"], Search_Option, **Kwargs) else: def Inner_Function_Call(Domain_Fuzz_Obj, **kwargs): getattr(Domain_Fuzz_Obj, Dict_Item["Custom_Search"])(**kwargs) Class = importlib.import_module(Dict_Item["Module"]) Domain_Fuzz_Obj = Class.Fuzzer(self.query, self.task_id) Inner_Function_Call(Domain_Fuzz_Obj, Search_Option, **Kwargs) else: print(f"{Common.Date()} - Invalid plugin provided - {e}") except Exception as e: print(f"{Common.Date()} - Plugin Caller Error - {e}") finally: Stopper(Object, self.task_id)
'--task', help= 'This option is used to specify a task ID to run. ./plugin_caller.py -t 1' ) Arguments = Parser.parse_args() Scrummage_Working_Directory = pathlib.Path(__file__).parent.absolute() os.chdir(Scrummage_Working_Directory) Task_ID = 0 if str(Scrummage_Working_Directory) == str(os.getcwd()): if Arguments.task: try: Task_ID = int(Arguments.task) Connector_Object = Common.Configuration(Output=True) Connection = Connector_Object.Load_Configuration( Postgres_Database=True, Object="postgresql") cursor = Connection.cursor() PSQL_Select_Query = 'SELECT * FROM tasks WHERE task_id = %s;' cursor.execute(PSQL_Select_Query, (Task_ID, )) result = cursor.fetchone() if result and Output_API_Checker(result[2]): Plugin_to_Call = Plugin_Caller(Plugin_Name=result[2], Limit=result[5], Task_ID=Task_ID, Query=result[1]) Plugin_to_Call.Call_Plugin() except:
def Search(Query_List, Task_ID, Type, Limit=10): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: if Type == "CBN": Main_API_URL = f'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B{Query}%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc' Response = Common.Request_Handler(Main_API_URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() Indented_JSON_Response = JSON_Object.Dump_JSON() Main_Output_File = General.Main_File_Create( Directory, Plugin_Name, Indented_JSON_Response, Query, The_File_Extensions["Main"]) try: if JSON_Response['count'] != 0: Query = str(int(Query)) Main_URL = f'https://{Domain}/search/results?search=%7B{Query}%7D&status=Active' Responses = Common.Request_Handler( Main_URL, Filter=True, Host=f"https://{Domain}") Response = Responses["Filtered"] if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, General.Get_Title(Main_URL), The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, Domain.strip("beta."), "Company Details", Task_ID, Plugin_Name) Output_Connections.Output( [Main_Output_File, Output_file], Main_URL, f"Canadian Business Number {Query}", Concat_Plugin_Name) Data_to_Cache.append(Main_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CBN Search." ) elif Type == "CCN": Total_Results = 0 Iterator = "page=0" while Limit > Total_Results and Iterator is not None: Main_URL = 'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B' + urllib.parse.quote( Query ) + f'%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc&{Iterator}' Response = Common.Request_Handler(Main_URL) JSON_Object = Common.JSON_Handler(Response) JSON_Response = JSON_Object.To_JSON_Loads() Total_Results += len(JSON_Response["docs"]) if "paging" in JSON_Response and "next" in JSON_Response.get( "paging"): Iterator = JSON_Response["paging"]["next"] else: Iterator = None Indented_JSON_Response = JSON_Object.Dump_JSON() Limit = General.Get_Limit(Limit) try: Main_File = General.Main_File_Create( Directory, Plugin_Name, Indented_JSON_Response, Query, The_File_Extensions["Main"]) Current_Step = 0 Output_Connections = General.Connections( Query, Plugin_Name, Domain.strip("beta."), "Company Details", Task_ID, Plugin_Name) for JSON_Item in JSON_Response['docs']: if JSON_Item.get('BN'): CCN = JSON_Item['Company_Name'] CBN = str(int(JSON_Item['BN'])) Full_CCN_URL = f'https://{Domain}/search/results?search=%7B{CBN}%7D&status=Active' if Full_CCN_URL not in Cached_Data and Full_CCN_URL not in Data_to_Cache and Current_Step < int( Limit): Current_Responses = Common.Request_Handler( Full_CCN_URL, Filter=True, Host=f"https://{Domain}") Current_Response = Current_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, str(Current_Response), CCN.replace(' ', '-'), The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Full_CCN_URL, f"Canadian Business Number {CBN} for Query {Query}", Concat_Plugin_Name) Data_to_Cache.append(Full_CCN_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Unable to retrieve business numbers from the JSON response." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CCN Search." ) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid request type." ) except: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")