def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) YouTube_Details = Load_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: YouTube_Handler = discovery.build(YouTube_Details[1], YouTube_Details[2], developerKey=YouTube_Details[0]) Search_Response = YouTube_Handler.search().list( q=Query, type='video', location=YouTube_Details[3], locationRadius=YouTube_Details[4], part='id,snippet', maxResults=Limit, ).execute() General.Main_File_Create(Directory, Plugin_Name, json.dumps(Search_Response.get('items', []), indent=4, sort_keys=True), Query, ".json") for Search_Result in Search_Response.get('items', []): Full_Video_URL = "https://www.youtube.com/watch?v=" + Search_Result['id']['videoId'] Search_Video_Response = requests.get(Full_Video_URL).text if Full_Video_URL not in Cached_Data and Full_Video_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Video_Response, Search_Result['id']['videoId'], The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Full_Video_URL, "youtube.com", "Data Leakage", Task_ID, General.Get_Title(Full_Video_URL), Plugin_Name.lower()) Data_to_Cache.append(Full_Video_URL) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID): Data_to_Cache = [] Cached_Data = [] Configuration_Details = Load_Configuration() Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: if Configuration_Details[1].lower() == "true": Request = 'https://api.certspotter.com/v1/issuances?domain=' + Query + '&include_subdomains=true&expand=dns_names&expand=issuer&expand=cert' Response = requests.get(Request, auth=(Configuration_Details[0], '')).text else: Request = 'https://api.certspotter.com/v1/issuances?domain=' + Query + '&expand=dns_names&expand=issuer&expand=cert' Response = requests.get(Request, auth=(Configuration_Details[0], '')).text JSON_Response = json.loads(Response) if 'exists' not in JSON_Response: if JSON_Response: if Request not in Cached_Data and Request not in Data_to_Cache: try: SSLMate_Regex = re.search("([\w\d]+)\.[\w]{2,3}(\.[\w]{2,3})?(\.[\w]{2,3})?", Query) if SSLMate_Regex: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, json.dumps(JSON_Response, indent=4, sort_keys=True), SSLMate_Regex.group(1), The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Request, "sslmate.com", "Domain Spoof", Task_ID, General.Get_Title(Request), Plugin_Name.lower()) except: logging.info(str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + "[-] Failed to create file.") Data_to_Cache.append(Request) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, **kwargs): if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Twitter_Credentials = Load_Configuration() Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: Authentication = tweepy.OAuthHandler(Twitter_Credentials[0], Twitter_Credentials[1]) Authentication.set_access_token(Twitter_Credentials[2], Twitter_Credentials[3]) API = tweepy.API(Authentication) General_Pull(Query, Limit, Directory, API, Task_ID) except: logging.info(General.Date() + " Failed to get results. Are you connected to the internet?")
def Search(Query_List, Task_ID, Limit=10): try: Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Twitter_Credentials = Load_Configuration() Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(Limit) for Query in Query_List: try: Authentication = tweepy.OAuthHandler(Twitter_Credentials[0], Twitter_Credentials[1]) Authentication.set_access_token(Twitter_Credentials[2], Twitter_Credentials[3]) API = tweepy.API(Authentication) General_Pull(Query, Limit, Directory, API, Task_ID) except: logging.info( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to get results. Are you connected to the internet?" ) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: # Query can be Title or ISBN Main_URL = "http://gen.lib.rus.ec/search.php?req=" + Query + "&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def" Lib_Gen_Response = requests.get(Main_URL).text General.Main_File_Create(Directory, Plugin_Name, Lib_Gen_Response, Query, The_File_Extension) Lib_Gen_Regex = re.findall("book\/index\.php\?md5=[A-Fa-f0-9]{32}", Lib_Gen_Response) if Lib_Gen_Regex: Current_Step = 0 for Regex in Lib_Gen_Regex: Item_URL = "http://gen.lib.rus.ec/" + Regex Lib_Item_Response = requests.get(Item_URL).text if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Lib_Item_Response, Regex, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Item_URL, "gen.lib.rus.ec", "Data Leakage", Task_ID, General.Get_Title(Item_URL), Concat_Plugin_Name) Data_to_Cache.append(Item_URL) Current_Step += 1 if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) for Query in Query_List: BSB_Search_URL = f"https://www.bsbnumbers.com/{Query}.html" Response = requests.get(BSB_Search_URL).text Error_Regex = re.search(r"Correct\sthe\sfollowing\serrors", Response) Output_Connections = General.Connections(Query, Plugin_Name, "bsbnumbers.com", "BSB Details", Task_ID, Plugin_Name.lower()) if not Error_Regex: if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, Query, The_File_Extension) if Output_file: Output_Connections.Output( [Output_file], BSB_Search_URL, General.Get_Title(BSB_Search_URL), Plugin_Name.lower()) Data_to_Cache.append(BSB_Search_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Query returned error, probably does not exist." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data_Object = General.Cache(Directory, Plugin_Name) Cached_Data = Cached_Data_Object.Get_Cache() Query_List = General.Convert_to_List(Query_List) for Query in Query_List: if Common.Regex_Handler(Query, Type="IP"): API_Key = Load_Configuration() Search_Response = Common.Request_Handler( f"http://api.{Domain}/{Query}?access_key={API_Key}") JSON_Object = Common.JSON_Handler(Search_Response) JSON_Response = JSON_Object.To_JSON_Loads() JSON_Output_Response = JSON_Object.Dump_JSON() Output_Connections = General.Connections( Query, Plugin_Name, Domain, "IP Address Information", Task_ID, Plugin_Name.lower()) if Query not in Cached_Data and Query not in Data_to_Cache: Result_URL = f"https://{Domain}/?{Query}" Title = f"IP Stack | {Query}" Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, JSON_Output_Response, Title, The_File_Extensions["Main"]) HTML_Output_File_Data = General.JSONDict_to_HTML( JSON_Response, JSON_Output_Response, f"IPStack Query {Query}") HTML_Output_File = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, HTML_Output_File_Data, Title.replace(" ", "-"), The_File_Extensions["Main_Converted"]) if Output_file: Output_Connections.Output([Output_file], Result_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) else: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Cached_Data_Object.Write_Cache(Data_to_Cache) except Exception as e: logging.warning( f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.0; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0'} Response = requests.get('https://tpbc.herokuapp.com/search/' + Query.replace(" ", "+") + '/?sort=seeds_desc', headers=headers).text Response = json.loads(Response) JSON_Response = json.dumps(Response, indent=4, sort_keys=True) Output_file = General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, ".json") if Output_file: Current_Step = 0 for Search_Result in Response: Result_Title = Search_Result["title"] Result_URL = Search_Result["magnet"] if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(Limit): # Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Result_Response, Result_Title, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Result_URL, "thepiratebay.org", "Data Leakage", Task_ID, Result_Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) Current_Step += 1 if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Flickr" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "flickr.com" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Type): self.Plugin_Name = "VirusTotal" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "virustotal.com" self.Type = Type
def __init__(self, Query_List, Task_ID): self.Plugin_Name = "SSLMate" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".json" self.Domain = "sslmate.com" self.Result_Type = "Certificate Details"
def __init__(self, Query_List, Task_ID, Limit=10): self.The_File_Extension = ".html" self.Plugin_Name = "RSS" self.Logging_Plugin_Name = self.Plugin_Name + " Feed Search" self.Result_Type = "News Report" self.Limit = General.Get_Limit(Limit) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List)
def __init__(self, Query_List, Task_ID): self.Plugin_Name = "Kik" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "kik.me" self.Result_Type = "Social Media - Person"
def __init__(self, Query_List, Task_ID): self.Plugin_Name = "IPStack" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Main_Converted": ".html"} self.Domain = "ipstack.com" self.Result_Type = "IP Address Information"
def __init__(self, Query_List, Task_ID): self.Plugin_Name = "DNS Reconnaissance" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Concat_Plugin_Name = "dnsrecon" self.Result_Type = "Domain Information"
def __init__(self, Query_List, Task_ID): self.Plugin_Name = "Tumblr" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "tumblr.com" self.Result_Type = "Social Media - Page"
def __init__(self, Query_List, Task_ID): self.Plugin_Name = "BSB" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "bsbnumbers.com" self.Result_Type = "BSB Details"
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: vulners_api = vulners.Vulners(api_key=Load_Configuration()) Search_Response = vulners_api.search(Query, limit=int(Limit)) JSON_Response = json.dumps(Search_Response, indent=4, sort_keys=True) General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, ".json") for Search_Result in Search_Response: if Search_Result["bulletinFamily"] not in Unacceptable_Bulletins: Result_Title = Search_Result["title"] Result_URL = Search_Result["vhref"] Search_Result_Response = requests.get(Result_URL).text if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Result_Response, Result_Title, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Result_URL, "vulners.com", "Exploit", Task_ID, Result_Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def __init__(self, Query_List, Task_ID, Type): self.Plugin_Name = "Threat Crowd" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "threatcrowd.org" self.Type = Type
def __init__(self, Query_List, Task_ID): self.Plugin_Name = "Email Verification" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Main_Converted": ".html"} self.Concat_Plugin_Name = "emailverify" self.Domain = "verify-email.org" self.Result_Type = "Email Information"
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "DuckDuckGo" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "duckduckgo.com" self.Result_Type = "Search Result" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID): self.Plugin_Name = "CRT" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "crt.sh" self.Result_Type = "Certificate Details"
def __init__(self, Query_List, Task_ID): self.Plugin_Name = "BuiltWith" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "builtwith.com" self.Result_Type = "Web Application Architecture"
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Windows Store" self.Concat_Plugin_Name = "windowsstore" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "microsoft.com" self.Result_Type = "Application" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Reddit" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "reddit.com" self.Result_Type = "Forum" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Library Genesis" self.Concat_Plugin_Name = "libgen" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name(self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extension = ".html" self.Domain = "gen.lib.rus.ec" self.Result_Type = "Publication" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID, Limit=10): self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Plugin_Name = "YouTube" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.Domain = "youtube.com" self.Result_Type = "Social Media - Media" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID): self.Plugin_Name = "Doing Business" self.Concat_Plugin_Name = "doingbusiness" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "doingbusiness.org" self.Result_Type = "Economic Details"
def __init__(self, Query_List, Task_ID, Limit=10): self.Plugin_Name = "Phishstats" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Domain = "phishstats.info" self.Result_Type = "Phishing" self.Limit = General.Get_Limit(Limit)
def __init__(self, Query_List, Task_ID): self.Plugin_Name = "Email Reputation" self.Logging_Plugin_Name = General.Get_Plugin_Logging_Name( self.Plugin_Name) self.Task_ID = Task_ID self.Query_List = General.Convert_to_List(Query_List) self.The_File_Extensions = {"Main": ".json", "Query": ".html"} self.Concat_Plugin_Name = "emailrep" self.Domain = "emailrep.io" self.Result_Type = "Email Information"