def Search(Query_List, Task_ID): Data_to_Cache = [] Cached_Data = [] Configuration_Details = Load_Configuration() Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: if Configuration_Details[1].lower() == "true": Request = 'https://api.certspotter.com/v1/issuances?domain=' + Query + '&include_subdomains=true&expand=dns_names&expand=issuer&expand=cert' Response = requests.get(Request, auth=(Configuration_Details[0], '')).text else: Request = 'https://api.certspotter.com/v1/issuances?domain=' + Query + '&expand=dns_names&expand=issuer&expand=cert' Response = requests.get(Request, auth=(Configuration_Details[0], '')).text JSON_Response = json.loads(Response) if 'exists' not in JSON_Response: if JSON_Response: if Request not in Cached_Data and Request not in Data_to_Cache: try: SSLMate_Regex = re.search("([\w\d]+)\.[\w]{2,3}(\.[\w]{2,3})?(\.[\w]{2,3})?", Query) if SSLMate_Regex: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, json.dumps(JSON_Response, indent=4, sort_keys=True), SSLMate_Regex.group(1), The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Request, "sslmate.com", "Domain Spoof", Task_ID, General.Get_Title(Request), Plugin_Name.lower()) except: logging.info(str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + "[-] Failed to create file.") Data_to_Cache.append(Request) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) YouTube_Details = Load_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: YouTube_Handler = discovery.build(YouTube_Details[1], YouTube_Details[2], developerKey=YouTube_Details[0]) Search_Response = YouTube_Handler.search().list( q=Query, type='video', location=YouTube_Details[3], locationRadius=YouTube_Details[4], part='id,snippet', maxResults=Limit, ).execute() General.Main_File_Create(Directory, Plugin_Name, json.dumps(Search_Response.get('items', []), indent=4, sort_keys=True), Query, ".json") for Search_Result in Search_Response.get('items', []): Full_Video_URL = "https://www.youtube.com/watch?v=" + Search_Result['id']['videoId'] Search_Video_Response = requests.get(Full_Video_URL).text if Full_Video_URL not in Cached_Data and Full_Video_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Video_Response, Search_Result['id']['videoId'], The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Full_Video_URL, "youtube.com", "Data Leakage", Task_ID, General.Get_Title(Full_Video_URL), Plugin_Name.lower()) Data_to_Cache.append(Full_Video_URL) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def General_Pull(Handle, Limit, Directory, API, Task_ID): Data_to_Cache = [] Cached_Data = [] JSON_Response = [] Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Latest_Tweets = API.user_timeline(screen_name=Handle, count=Limit) for Tweet in Latest_Tweets: Link = "" try: JSON_Response.append({ 'id': Tweet.id, 'text': Tweet.text, 'author_name': Tweet.user.screen_name, 'url': Tweet.entities['urls'][0]["expanded_url"] }) Link = Tweet.entities['urls'][0]["expanded_url"] except: JSON_Response.append({ 'id': Tweet.id, 'text': Tweet.text, 'author_name': Tweet.user.screen_name }) JSON_Output = json.dumps(JSON_Response, indent=4, sort_keys=True) for JSON_Item in JSON_Response: if 'text' in JSON_Item and 'url' in JSON_Item: Link = JSON_Item['url'] if Link not in Cached_Data and Link not in Data_to_Cache: logging.info(General.Date() + " " + Link) Item_Response = requests.get(Link).text Output_file = General.Create_Query_Results_Output_File(Directory, Handle, Plugin_Name, Item_Response, str(JSON_Item['id']), ".html") if Output_file: General.Connections(Output_file, Handle, Plugin_Name, Link, "twitter.com", "Data Leakage", Task_ID, General.Get_Title(Link), Plugin_Name.lower()) else: logging.warning(General.Date() + " Output file not returned.") else: logging.warning(General.Date() + " Insufficient parameters provided.") Data_to_Cache.append(Link) General.Main_File_Create(Directory, Plugin_Name, JSON_Output, Handle, ".json") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.0; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0'} Response = requests.get('https://tpbc.herokuapp.com/search/' + Query.replace(" ", "+") + '/?sort=seeds_desc', headers=headers).text Response = json.loads(Response) JSON_Response = json.dumps(Response, indent=4, sort_keys=True) Output_file = General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, ".json") if Output_file: Current_Step = 0 for Search_Result in Response: Result_Title = Search_Result["title"] Result_URL = Search_Result["magnet"] if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(Limit): # Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Result_Response, Result_Title, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Result_URL, "thepiratebay.org", "Data Leakage", Task_ID, Result_Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) Current_Step += 1 if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: # Query can be Title or ISBN Main_URL = "http://gen.lib.rus.ec/search.php?req=" + Query + "&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def" Lib_Gen_Response = requests.get(Main_URL).text General.Main_File_Create(Directory, Plugin_Name, Lib_Gen_Response, Query, The_File_Extension) Lib_Gen_Regex = re.findall("book\/index\.php\?md5=[A-Fa-f0-9]{32}", Lib_Gen_Response) if Lib_Gen_Regex: Current_Step = 0 for Regex in Lib_Gen_Regex: Item_URL = "http://gen.lib.rus.ec/" + Regex Lib_Item_Response = requests.get(Item_URL).text if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(Limit): Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Lib_Item_Response, Regex, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Item_URL, "gen.lib.rus.ec", "Data Leakage", Task_ID, General.Get_Title(Item_URL), Concat_Plugin_Name) Data_to_Cache.append(Item_URL) Current_Step += 1 if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) for Query in Query_List: BSB_Search_URL = f"https://www.bsbnumbers.com/{Query}.html" Response = requests.get(BSB_Search_URL).text Error_Regex = re.search(r"Correct\sthe\sfollowing\serrors", Response) Output_Connections = General.Connections(Query, Plugin_Name, "bsbnumbers.com", "BSB Details", Task_ID, Plugin_Name.lower()) if not Error_Regex: if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, Query, The_File_Extension) if Output_file: Output_Connections.Output( [Output_file], BSB_Search_URL, General.Get_Title(BSB_Search_URL), Plugin_Name.lower()) Data_to_Cache.append(BSB_Search_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Query returned error, probably does not exist." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: vulners_api = vulners.Vulners(api_key=Load_Configuration()) Search_Response = vulners_api.search(Query, limit=int(Limit)) JSON_Response = json.dumps(Search_Response, indent=4, sort_keys=True) General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, ".json") for Search_Result in Search_Response: if Search_Result["bulletinFamily"] not in Unacceptable_Bulletins: Result_Title = Search_Result["title"] Result_URL = Search_Result["vhref"] Search_Result_Response = requests.get(Result_URL).text if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Result_Response, Result_Title, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Result_URL, "vulners.com", "Exploit", Task_ID, Result_Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) for Query in Query_List: Output_Connections = General.Connections(Query, Plugin_Name, "general-insurance.coles.com.au", "Vehicle Details", Task_ID, Concat_Plugin_Name) for State in States: Post_URL = 'https://general-insurance.coles.com.au/bin/wesfarmers/search/vehiclerego' data = '''{"isRegoSearch":"YES","regoSearchCount":2,"regoMatchCount":1,"regoSearchFailureCount":0,"failPaymentAttempts":0,"pauseStep":"false","campaignBaseURL":"https://secure.colesinsurance.com.au/campaignimages/","sessionState":"OPEN","sessionStep":"0","policyHolders":[],"updateSessionURL":"http://dev.gtw.gp-mdl.auiag.corp:9000/sys/colessessionservice/motor/v1/update-session","insuranceType":"COMP","startDate":"03/07/2019","drivers":[{"driverRef":"MainDriver","yearsLicenced":{"vehRef":"veh1"}}],"priceBeatAttemptsRemaining":"2","currentInsurerOptions":[{"id":"AAMI","value":"AAMI","text":"AAMI"},{"id":"Allianz","value":"Allianz","text":"Allianz"},{"id":"Apia","value":"Apia","text":"Apia"},{"id":"Bingle","value":"Bingle","text":"Bingle"},{"id":"Broker","value":"Broker","text":"Broker"},{"id":"BudgDirect","value":"BudgDirect","text":"Budget Direct"},{"id":"Buzz","value":"Buzz","text":"Buzz"},{"id":"CGU","value":"CGU","text":"CGU"},{"id":"Coles","value":"Coles","text":"Coles"},{"id":"CommInsure","value":"CommInsure","text":"CommInsure"},{"id":"GIO","value":"GIO","text":"GIO"},{"id":"HBF","value":"HBF","text":"HBF"},{"id":"JustCar","value":"JustCar","text":"Just Car"},{"id":"NRMA","value":"NRMA","text":"NRMA"},{"id":"Progress","value":"Progress","text":"Progressive"},{"id":"QBE","value":"QBE","text":"QBE"},{"id":"RAA","value":"RAA","text":"RAA"},{"id":"RAC","value":"RAC","text":"RAC"},{"id":"RACQ","value":"RACQ","text":"RACQ"},{"id":"RACT","value":"RACT","text":"RACT"},{"id":"RACV","value":"RACV","text":"RACV"},{"id":"Real","value":"Real","text":"Real"},{"id":"SGIC","value":"SGIC","text":"SGIC"},{"id":"SGIO","value":"SGIO","text":"SGIO"},{"id":"Shannons","value":"Shannons","text":"Shannons"},{"id":"Suncorp","value":"Suncorp","text":"Suncorp"},{"id":"Youi","value":"Youi","text":"Youi"},{"id":"None","value":"None","text":"Car is not currently insured"},{"id":"Dontknow","value":"Dontknow","text":"Don't Know"},{"id":"Other","value":"Other","text":"Other"}],"coverLevelOptions":[{"id":"Gold","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"NRMA","code":"Gold","order":"1"},{"id":"Gold1","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"BudgDirect","code":"Gold","order":"1"},{"id":"Standard2","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIC","code":"Standard","order":"2"},{"id":"Gold6","value":"Comprehensive Advantages Car Insurance","text":"Comprehensive Advantages Car Insurance","flagname":"Suncorp","code":"Gold","order":"1"},{"id":"Standard","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"GIO","code":"Standard","order":"2"},{"id":"Standard0","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"NRMA","code":"Standard","order":"2"},{"id":"Gold4","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIC","code":"Gold","order":"1"},{"id":"Standard5","value":"Full Comprehensive Car Insurance","text":"Full Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Standard","order":"2"},{"id":"Gold5","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIO","code":"Gold","order":"1"},{"id":"Gold2","value":"Platinum Car Insurance","text":"Platinum Car Insurance","flagname":"GIO","code":"Gold","order":"1"},{"id":"Standard3","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIO","code":"Standard","order":"2"},{"id":"Gold3","value":"Complete Care Motor Insurance","text":"Complete Care Motor Insurance","flagname":"RACV","code":"Gold","order":"1"},{"id":"Standard4","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"Suncorp","code":"Standard","order":"2"},{"id":"Gold0","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Gold","order":"1"},{"id":"Standard1","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"RACV","code":"Standard","order":"2"}],"riskAddress":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"postcode":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"carRegistration":"''' + Query + '''","chooseValue":"","whatValueInsure":"Marketvalue","whatValueInsure_value":{"key":"Marketvalue","value":"Market Value"}}''' headers = {'Content-Type': 'ext/plain;charset=UTF-8', 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.90 Safari/537.36', 'Accept': '*/*', 'Accept-Encoding': 'gzip, deflate, br', 'Referer': 'https://general-insurance.coles.com.au/motor/get-quote', 'Origin': 'https://general-insurance.coles.com.au', 'Host': 'general-insurance.coles.com.au'} Registration_Response = requests.post(Post_URL, data=data, headers=headers).text Registration_Response = json.loads(Registration_Response) try: Title = "Vehicle Registration | " + Registration_Response['vehicles'][0]['make'] + " " + Registration_Response['vehicles'][0]['model'] Item_URL = Post_URL + "?" + Query if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, json.dumps(Registration_Response, indent=4, sort_keys=True), Title.replace(" ", "-"), The_File_Extension) if Output_file: Output_Connections.Output([Output_file], Item_URL, Title, Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") except: logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - No result found for given query {Query} for state {State}.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Craigslist_Location = Load_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: Main_URL = f"https://{Craigslist_Location.lower()}.craigslist.org/search/sss?format=rss&query={Query}" Craigslist_Response = feedparser.parse(Main_URL) Craigslist_Items = Craigslist_Response["items"] Current_Step = 0 for Item in Craigslist_Items: Item_URL = Item["link"] if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(Limit): Craigslist_Response = requests.get(Item_URL).text Local_URL = f"https://{Craigslist_Location.lower()}.craigslist.org/" Local_Domain = f"{Craigslist_Location.lower()}.craigslist.org/" Filename = Item_URL.replace(Local_URL, "") Filename = Filename.replace(".html/", "") Filename = Filename.replace(".html", "") Filename = Filename.replace("/", "-") Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Craigslist_Response, Filename, The_File_Extension) if Output_file: Output_Connections = General.Connections(Query, Plugin_Name, Local_Domain, "Search Result", Task_ID, Plugin_Name.lower()) Output_Connections.Output([Output_file], Item_URL, General.Get_Title(Item_URL), Plugin_Name.lower()) Data_to_Cache.append(Item_URL) else: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") Current_Step += 1 if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) YouTube_Details = Load_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: YouTube_Handler = discovery.build(YouTube_Details[1], YouTube_Details[2], developerKey=YouTube_Details[0], cache_discovery=False) if YouTube_Details[3] and YouTube_Details[4]: Search_Response = YouTube_Handler.search().list(q=Query, type='video', location=YouTube_Details[3], locationRadius=YouTube_Details[4], part='id,snippet', maxResults=Limit,).execute() else: Search_Response = YouTube_Handler.search().list(q=Query, type='video', part='id,snippet', maxResults=Limit,).execute() Main_File = General.Main_File_Create(Directory, Plugin_Name, json.dumps(Search_Response.get('items', []), indent=4, sort_keys=True), Query, The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, Plugin_Name, "youtube.com", "Social Media - Media", Task_ID, Plugin_Name.lower()) for Search_Result in Search_Response.get('items', []): Full_Video_URL = "https://www.youtube.com/watch?v=" + Search_Result['id']['videoId'] Search_Video_Response = requests.get(Full_Video_URL).text Title = "YouTube | " + Search_Result['snippet']['title'] if Full_Video_URL not in Cached_Data and Full_Video_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Video_Response, Search_Result['id']['videoId'], The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Full_Video_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(Full_Video_URL) else: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) for Query in Query_List: if General.Regex_Checker(Query, "IP"): API_Key = Load_Configuration() Search_Response = General.Request_Handler( f"http://api.{Domain}/{Query}?access_key={API_Key}") JSON_Response = json.loads(Search_Response) JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True) Output_Connections = General.Connections( Query, Plugin_Name, Domain, "IP Address Information", Task_ID, Plugin_Name.lower()) if Query not in Cached_Data and Query not in Data_to_Cache: Result_URL = f"https://{Domain}/?{Query}" Title = f"IP Stack | {Query}" Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, JSON_Output_Response, Title, The_File_Extension) if Output_file: Output_Connections.Output([Output_file], Result_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(Result_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name) except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID): Data_to_Cache = [] Cached_Data = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: BSB_Search_URL = "https://www.bsbnumbers.com/" + Query + ".html" Response = requests.get(BSB_Search_URL).text Error_Regex = re.search(r"Correct\sthe\sfollowing\serrors", Response) if not Error_Regex: if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, Query, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, BSB_Search_URL, "bsbnumbers.com", "Data Leakage", Task_ID, General.Get_Title(BSB_Search_URL), Plugin_Name.lower()) Data_to_Cache.append(BSB_Search_URL) else: logging.warning(General.Date() + " Query returned error, probably does not exist.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: URL_Query = urllib.parse.quote(Query) URL = f"https://api.duckduckgo.com/?q={URL_Query}&format=json" DDG_Response = requests.get(URL).text JSON_Response = json.loads(DDG_Response) JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True) Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, Plugin_Name, "duckduckgo.com", "Search Result", Task_ID, Plugin_Name.lower()) if JSON_Response.get('RelatedTopics'): Current_Step = 0 for DDG_Item_Link in JSON_Response['RelatedTopics']: try: if 'FirstURL' in DDG_Item_Link: DDG_URL = DDG_Item_Link['FirstURL'] Title = General.Get_Title(DDG_URL) Title = f"DuckDuckGo | {Title}" if DDG_URL not in Cached_Data and DDG_URL not in Data_to_Cache and Current_Step < int( Limit): headers = { 'Content-Type': 'application/json', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0', 'Accept': 'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5' } DDG_Item_Response = requests.get( DDG_URL, headers=headers).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, DDG_Item_Response, DDG_URL, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], DDG_URL, Title, Plugin_Name.lower()) Data_to_Cache.append(DDG_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: break elif 'Topics' in DDG_Item_Link: if type(DDG_Item_Link['Topics']) == list: JSON_Response['RelatedTopics'].extend( DDG_Item_Link['Topics']) except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}" ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - No results found." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) try: Flickr_Details = Load_Configuration() flickr_api.set_keys(api_key=Flickr_Details[0], api_secret=Flickr_Details[1]) except: logging.info(General.Date() + " - " + __name__.strip('plugins.') + " - Failed to establish API identity.") for Query in Query_List: Email_Regex = re.search(r"[^@]+@[^\.]+\..+", Query) if Email_Regex: try: User = flickr_api.Person.findByEmail(Query) Photos = User.getPhotos() General.Main_File_Create(Directory, Plugin_Name, Photos, Query, ".txt") Output_Connections = General.Connections( Query, Plugin_Name, "flickr.com", "Data Leakage", Task_ID, Plugin_Name.lower()) Current_Step = 0 for Photo in Photos: Photo_URL = "https://www.flickr.com/photos/" + Query + "/" + Photo[ "id"] if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int( Limit): headers = { 'Content-Type': 'application/json', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0', 'Accept': 'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5' } Photo_Response = requests.get(Photo_URL, headers=headers).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Photo_Response, Photo, The_File_Extension) if Output_file: Output_Connections.Output( Output_file, Photo_URL, General.Get_Title(Photo_URL)) Data_to_Cache.append(Photo_URL) Current_Step += 1 except: logging.info(General.Date() + " - " + __name__.strip('plugins.') + " - Failed to make API call.") else: try: User = flickr_api.Person.findByUserName(Query) Photos = User.getPhotos() General.Main_File_Create(Directory, Plugin_Name, Photos, Query, ".txt") Output_Connections = General.Connections( Query, Plugin_Name, "flickr.com", "Data Leakage", Task_ID, Plugin_Name.lower()) Current_Step = 0 for Photo in Photos: Photo_URL = "https://www.flickr.com/photos/" + Query + "/" + Photo[ "id"] if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int( Limit): headers = { 'Content-Type': 'application/json', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0', 'Accept': 'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5' } Photo_Response = requests.get(Photo_URL, headers=headers).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Photo_Response, str(Photo['id']), The_File_Extension) if Output_file: Output_Connections.Output( Output_file, Photo_URL, General.Get_Title(Photo_URL)) Data_to_Cache.append(Photo_URL) Current_Step += 1 except: logging.info(General.Date() + " - " + __name__.strip('plugins.') + " - Failed to make API call.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: try: headers = {"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0", "Content-Type": "application/x-www-form-urlencoded;charset=utf-8"} body = {"f.req": f'''[[["lGYRle","[[[],[[10,[10,50]],true,null,[96,27,4,8,57,30,110,11,16,49,1,3,9,12,104,55,56,51,10,34,31,77,145],[null,null,null,[[[[7,31],[[1,52,43,112,92,58,69,31,19,96,103]]]]]]],[\\"{Query}\\"],7,[null,1]]]",null,"2"]]]'''} Play_Store_Response = requests.post("https://play.google.com/_/PlayStoreUi/data/batchexecute", headers=headers, data=body).text Play_Store_Response = Play_Store_Response.replace(')]}\'\n\n', "").replace("\\\\u003d", "=") Play_Store_Response_JSON = json.dumps(json.loads(Play_Store_Response), indent=4, sort_keys=True) Main_File = General.Main_File_Create(Directory, Plugin_Name, Play_Store_Response_JSON, Query, The_File_Extensions["Main"]) Output_Connections = General.Connections(Query, Plugin_Name, "play.google.com", "Application", Task_ID, Concat_Plugin_Name) Win_Store_Regex = re.findall(r"(\/store\/apps\/details\?id\\\\([\w\d\.]+))\\\"", Play_Store_Response) Current_Step = 0 for Result, Item in Win_Store_Regex: Result = Result.replace("\\\\u003d", "=") Result_URL = f"https://play.google.com{Result}" Item = Item.replace("u003d", "") Title = f"Play Store | {Item}" if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(Limit): headers = {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0'} Play_Store_Response = requests.get(Result_URL, headers=headers).text Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Play_Store_Response, Item, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output([Main_File, Output_file], Result_URL, Title, Concat_Plugin_Name) Data_to_Cache.append(Result_URL) else: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.") Current_Step += 1 except: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to get results, this may be due to the query provided.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: Tor_Pull_URL = Tor_General_URL + Query Tor_Scrape_URLs = General.Get_Latest_URLs(Tor_Pull_URL, Tor_Scrape_Regex_URL) if Tor_Scrape_URLs: Output_file = General.Main_File_Create(Directory, Tor_Plugin_Name.lower(), "\n".join(Tor_Scrape_URLs), Query, The_File_Extension) if Output_file: Current_Step = 0 for URL in Tor_Scrape_URLs: if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int( Limit): General.Connections(Output_file, Query, Tor_Plugin_Name, URL, "ahmia.fl", "Domain Spoof", Task_ID, General.Get_Title(URL), Plugin_Name.lower()) Data_to_Cache.append(URL) Current_Step += 1 else: logging.info(General.Date() + " No Tor links scraped.") I2P_Pull_URL = I2P_General_URL + Query I2P_Scrape_URLs = General.Get_Latest_URLs(I2P_Pull_URL, I2P_Scrape_Regex_URL) if I2P_Scrape_URLs: Output_file = General.Main_File_Create(Directory, I2P_Plugin_Name.lower(), "\n".join(I2P_Scrape_URLs), Query, The_File_Extension) if Output_file: Current_Step = 0 for URL in I2P_Scrape_URLs: if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int( Limit): General.Connections(Output_file, Query, I2P_Plugin_Name, URL, "ahmia.fl", "Domain Spoof", Task_ID, General.Get_Title(URL), Plugin_Name.lower()) Data_to_Cache.append(URL) Current_Step += 1 else: logging.info(General.Date() + " No I2P links scraped.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] Results = [] if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Reddit_Details = Load_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: Reddit_Connection = praw.Reddit(client_id=Reddit_Details[0], \ client_secret=Reddit_Details[1], \ user_agent=Reddit_Details[2], \ username=Reddit_Details[3], \ password=Reddit_Details[4]) All_Subreddits = Reddit_Connection.subreddit(Reddit_Details[5]) for Subreddit in All_Subreddits.search(Query, limit=Limit): # Limit, subreddit and search to be controlled by the web app. Current_Result = [] Current_Result.append(Subreddit.url) Current_Result.append(Subreddit.selftext) Results.append(Current_Result) except: logging.warning(General.Date() + " Failed to get results. Are you connected to the internet?") for Result in Results: if Result[0] not in Cached_Data and Result[0] not in Data_to_Cache: try: Reddit_Regex = re.search("https\:\/\/www\.reddit\.com\/r\/(\w+)\/comments\/(\w+)\/([\w\d]+)\/", Result[0]) if Reddit_Regex: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Result[1], Reddit_Regex.group(3), The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Result[0], "reddit.com", "Data Leakage", Task_ID, General.Get_Title(Result[0]), Plugin_Name.lower()) except: logging.warning(General.Date() + " Failed to create file.") Data_to_Cache.append(Result[0]) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: # Query can be Title or ISBN Main_URL = f"http://{Domain}/search.php?req={Query}&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def" Lib_Gen_Response = General.Request_Handler(Main_URL) Main_File = General.Main_File_Create(Directory, Plugin_Name, Lib_Gen_Response, Query, The_File_Extension) Lib_Gen_Regex = re.findall("book\/index\.php\?md5=[A-Fa-f0-9]{32}", Lib_Gen_Response) if Lib_Gen_Regex: Current_Step = 0 for Regex in Lib_Gen_Regex: Item_URL = f"http://{Domain}/{Regex}" Title = General.Get_Title(Item_URL).replace( "Genesis:", "Genesis |") Lib_Item_Responses = General.Request_Handler( Item_URL, Filter=True, Host=f"http://{Domain}") Lib_Item_Response = Lib_Item_Responses["Filtered"] if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int( Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Lib_Item_Response, Regex, The_File_Extension) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Publication", Task_ID, Concat_Plugin_Name) Output_Connections.Output([Main_File, Output_file], Item_URL, Title, Concat_Plugin_Name) Data_to_Cache.append(Item_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name) except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) for Query in Query_List: if General.Regex_Checker(Query, "Domain"): Request = f"https://{Domain}/?q={Query}" Responses = General.Request_Handler(Request, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://{Domain}") Response = Responses["Regular"] Filtered_Response = Responses["Filtered"] if "<TD class=\"outer\"><I>None found</I></TD>" not in Response: if Request not in Cached_Data and Request not in Data_to_Cache: try: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name.lower(), Filtered_Response, SSLMate_Regex.group(1), The_File_Extension) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, Domain, "Certificate", Task_ID, Plugin_Name.lower()) Output_Connections.Output( [Output_file], Request, f"Subdomain Certificate Search for {Query}", Plugin_Name.lower()) Data_to_Cache.append(Request) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create file." ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Query does not exist." ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name) except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): Data_to_Cache = [] Cached_Data = [] if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Craigslist_Location = Load_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: Main_URL = "https://" + Craigslist_Location.lower( ) + ".craigslist.org/search/sss?format=rss&query=" + Query Craigslist_Response = feedparser.parse(Main_URL) Craigslist_Items = Craigslist_Response["items"] Current_Step = 0 for Item in Craigslist_Items: Item_URL = Item["link"] if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int( Limit): Craigslist_Response = requests.get(Item_URL).text Local_URL = "https://" + Craigslist_Location.lower( ) + ".craigslist.org/" Local_Domain = Craigslist_Location.lower() + ".craigslist.org/" Filename = Item_URL.replace(Local_URL, "") Filename = Filename.replace(".html/", "") Filename = Filename.replace(".html", "") Filename = Filename.replace("/", "-") Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Craigslist_Response, Filename, The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Item_URL, Local_Domain, "Data Leakage", Task_ID, General.Get_Title(Item_URL), Plugin_Name.lower()) Data_to_Cache.append(Item_URL) Current_Step += 1 if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Transaction_Search(Query_List, Task_ID, Type, **kwargs): try: Local_Plugin_Name = Plugin_Name + "-Transaction-Search" Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: if Type != "monero": if Type == "btc" or Type == "bch": Query_Regex = re.search(r"[\d\w]{64}", Query) elif Type == "eth": Query_Regex = re.search(r"(0x[\d\w]{64})", Query) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Invalid type provided." ) if Query_Regex: Main_URL = f"https://www.{Domain}/{Type}/tx/{Query}" Main_Response = General.Request_Handler(Main_URL) if Type == "btc": Address_Regex = re.findall( r"\/btc\/address\/([\d\w]{26,34})", Main_Response) elif Type == "bch": Address_Regex = re.findall(r"([\d\w]{42})", Main_Response) elif Type == "eth": Address_Regex = re.findall(r"(0x[\w\d]{40})", Main_Response) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Invalid type provided." ) if Address_Regex: Current_Step = 0 Output_Connections = General.Connections( Query, Local_Plugin_Name, Domain, "Blockchain Address", Task_ID, Plugin_Name.lower()) for Transaction in Address_Regex: Query_URL = f"https://www.{Domain}/{Type}/address/{Transaction}" if Query_URL not in Cached_Data and Query_URL not in Data_to_Cache and Current_Step < int( Limit): Transaction_Responses = General.Request_Handler( Query_URL, Filter=True, Host=f"https://www.{Domain}") Transaction_Response = Transaction_Responses[ "Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Transaction_Response, Transaction, The_File_Extension) if Output_file: Output_Connections.Output( [Output_file], Query_URL, General.Get_Title(Query_URL), Plugin_Name.lower()) Data_to_Cache.append(Query_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) else: Alt_Domain = "localmonero.co" Query_URL = f"https://{Alt_Domain}/blocks/search/{Query}" Transaction_Response = General.Request_Handler(Query_URL) if "Whoops, looks like something went wrong." not in Transaction_Response and Query_URL not in Cached_Data and Query_URL not in Data_to_Cache: Transaction_Responses = General.Request_Handler( Query_URL, Filter=True, Host=f"https://{Alt_Domain}") Transaction_Response = Transaction_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Local_Plugin_Name, Transaction_Response, Query, The_File_Extension) if Output_file: Output_Connections = General.Connections( Query, Local_Plugin_Name, Alt_Domain, "Blockchain Transaction", Task_ID, Plugin_Name.lower()) Output_Connections.Output( [Output_file], Query_URL, General.Get_Title_Requests_Module(Query_URL), Plugin_Name.lower()) Data_to_Cache.append(Query_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w") except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, **kwargs): Data_to_Cache = [] Cached_Data = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: if Type == "CIK": Main_URL = 'https://www.sec.gov/cgi-bin/browse-edgar?action=getcompany&CIK=' + Query + '&owner=exclude&count=40&hidefilings=0' Response = requests.get(Main_URL).text try: if 'No matching CIK.' not in Response: Query = str(int(Query)) if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, General.Get_Title(Main_URL), The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Main_URL, "sec.gov", "Data Leakage", Task_ID, General.Get_Title(Main_URL), Plugin_Name) Data_to_Cache.append(Main_URL) except: logging.info( str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + " Invalid query provided for CIK Search.") elif Type == "ACN": Main_URL = 'https://www.sec.gov/cgi-bin/browse-edgar?company=' + Query + '&owner=exclude&action=getcompany' Response = requests.get(Main_URL).text if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 try: ACN = re.search(r".*[a-zA-Z].*", Query) if ACN: General.Main_File_Create(Directory, Plugin_Name, Response, Query, The_File_Extension) Current_Step = 0 CIKs_Regex = re.findall( r"(\d{10})\<\/a\>\<\/td\>\s+\<td\sscope\=\"row\"\>(.*\S.*)\<\/td\>", Response) if CIKs_Regex: for CIK_URL, ACN in CIKs_Regex: Full_CIK_URL = 'https://www.sec.gov/cgi-bin/browse-edgar?action=getcompany&CIK=' + CIK_URL + '&owner=exclude&count=40&hidefilings=0' if Full_CIK_URL not in Cached_Data and Full_CIK_URL not in Data_to_Cache and Current_Step < int( Limit): Current_Response = requests.get( Full_CIK_URL).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, str(Current_Response), ACN.replace(' ', '-'), The_File_Extension) if Output_file: General.Connections( Output_file, Query, Plugin_Name, Full_CIK_URL, "sec.gov", "Data Leakage", Task_ID, General.Get_Title(Full_CIK_URL), Plugin_Name) Data_to_Cache.append(Full_CIK_URL) Current_Step += 1 except: logging.info( str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + " Invalid query provided for ACN Search.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, **kwargs): try: Data_to_Cache = [] Results = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Reddit_Details = Load_Configuration() Cached_Data = General.Get_Cache(Directory, Plugin_Name) Limit = General.Get_Limit(kwargs) Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: Reddit_Connection = praw.Reddit( client_id=Reddit_Details[0], client_secret=Reddit_Details[1], user_agent=Reddit_Details[2], username=Reddit_Details[3], password=Reddit_Details[4]) All_Subreddits = Reddit_Connection.subreddit(Reddit_Details[5]) for Subreddit in All_Subreddits.search( Query, limit=Limit ): # Limit, subreddit and search to be controlled by the web app. Results.append(Subreddit.url) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to get results. Are you connected to the internet?" ) Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Forum", Task_ID, Plugin_Name.lower()) for Result in Results: if Result not in Cached_Data and Result not in Data_to_Cache: try: Reddit_Regex = re.search( "https\:\/\/www\.reddit\.com\/r\/(\w+)\/comments\/(\w+)\/([\w\d]+)\/", Result[0]) if Reddit_Regex: Reddit_Responses = General.Request_Handler( Result, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}") Reddit_Response = Reddit_Responses["Filtered"] Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Reddit_Response, Reddit_Regex.group(3), The_File_Extension) if Output_file: Output_Connections.Output( [Output_file], Result, General.Get_Title(Result[0]), Plugin_Name.lower()) Data_to_Cache.append(Result[0]) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create file." ) General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name) except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, **kwargs): Data_to_Cache = [] Cached_Data = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: if Type == "NZBN": Main_URL = 'https://app.companiesoffice.govt.nz/companies/app/ui/pages/companies/search?q=' + Query + '&entityTypes=ALL&entityStatusGroups=ALL&incorpFrom=&incorpTo=&addressTypes=ALL&addressKeyword=&start=0&limit=1&sf=&sd=&advancedPanel=true&mode=advanced#results' Response = requests.get(Main_URL).text try: if 'An error has occurred and the requested action cannot be performed.' not in Response: Query = str(int(Query)) if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Response, General.Get_Title(Main_URL), The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Main_URL, "app.companiesoffice.govt.nz", "Data Leakage", Task_ID, General.Get_Title(Main_URL), Plugin_Name) Data_to_Cache.append(Main_URL) except: logging.warning(General.Date() + " Invalid query provided for NZBN Search.") elif Type == "NZCN": if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 try: Main_URL = 'https://app.companiesoffice.govt.nz/companies/app/ui/pages/companies/search?q=' + urllib.parse.quote( Query) + '&entityTypes=ALL&entityStatusGroups=ALL&incorpFrom=&incorpTo=&addressTypes=ALL&addressKeyword=&start=0&limit=' + str( Limit) + '&sf=&sd=&advancedPanel=true&mode=advanced#results' Response = requests.get(Main_URL).text NZCN_Regex = re.search(r".*[a-zA-Z].*", Query) if NZCN_Regex: General.Main_File_Create(Directory, Plugin_Name, Response, Query, The_File_Extension) NZBNs_Regex = re.findall(r"\<span\sclass\=\"entityName\"\>([\w\d\s\-\_\&\|\!\@\#\$\%\^\*\(\)\.\,]+)\<\/span\>\s<span\sclass\=\"entityInfo\"\>\((\d{6})\)\s\(NZBN\:\s(\d{13})\)", Response) if NZBNs_Regex: for NZCN, NZ_ID, NZBN_URL in NZBNs_Regex: print(NZBN_URL, NZ_ID, NZCN) Full_NZBN_URL = 'https://app.companiesoffice.govt.nz/companies/app/ui/pages/companies/' + NZ_ID + '?backurl=H4sIAAAAAAAAAEXLuwrCQBCF4bfZNtHESIpBbLQwhWBeYNgddSF7cWai5O2NGLH7zwenyHgjKWwKGaOfSwjZ3ncPaOt1W9bbsmqaamMoqtepnzIJ7Ltu2RdFHeXIacxf9tEmzgdOAZbuExh0jknk%2F17gRNMrsQMjiqxQmsEHr7Aycp3NfY5PjJbcGSMNoDySCckR%2FPwNLgXMiL4AAAA%3D' if Full_NZBN_URL not in Cached_Data and Full_NZBN_URL not in Data_to_Cache: Current_Response = requests.get(Full_NZBN_URL).text Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, str(Current_Response), NZCN.replace(' ', '-'), The_File_Extension) if Output_file: General.Connections(Output_file, Query, Plugin_Name, Full_NZBN_URL, "app.companiesoffice.govt.nz", "Data Leakage", Task_ID, General.Get_Title(Full_NZBN_URL), Plugin_Name) Data_to_Cache.append(Full_NZBN_URL) else: logging.warning(General.Date() + " Response did not match regular expression.") else: logging.warning(General.Date() + " Query did not match regular expression.") except: logging.warning(General.Date() + " Invalid query provided for NZCN Search.") else: logging.warning(General.Date() + " Invalid request type.") except: logging.warning(General.Date() + " Failed to make request.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Subdomains = Load_Configuration() Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) for Query in Query_List: if Subdomains: Request = f'https://api.certspotter.com/v1/issuances?domain={Query}&include_subdomains=true&expand=dns_names&expand=issuer&expand=cert' Response = requests.get(Request).text else: Request = f'https://api.certspotter.com/v1/issuances?domain={Query}&expand=dns_names&expand=issuer&expand=cert' Response = requests.get(Request).text JSON_Response = json.loads(Response) if 'exists' not in JSON_Response: if JSON_Response: if Request not in Cached_Data and Request not in Data_to_Cache: try: SSLMate_Regex = re.search( "([\w\d]+)\.[\w]{2,3}(\.[\w]{2,3})?(\.[\w]{2,3})?", Query) if SSLMate_Regex: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, json.dumps(JSON_Response, indent=4, sort_keys=True), SSLMate_Regex.group(1), The_File_Extension) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, "sslmate.com", "Certificate", Task_ID, Plugin_Name.lower()) Data_to_Cache.append(Request) if Subdomains: Output_Connections.Output( [Output_file], Request, f"Subdomain Certificate Search for {Query}", Plugin_Name.lower()) else: Output_Connections.Output( [Output_file], Request, f"Domain Certificate Search for {Query}", Plugin_Name.lower()) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression." ) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create file." ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - No response." ) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Query does not exist." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, **kwargs): Data_to_Cache = [] Cached_Data = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Concat_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) if not Cached_Data: Cached_Data = [] Query_List = General.Convert_to_List(Query_List) for Query in Query_List: try: if Type == "ABN": Main_URL = 'https://abr.business.gov.au/ABN/View?id=' + Query Response = requests.get(Main_URL).text try: if 'Error searching ABN Lookup' not in Response: Query = str(int(Query)) if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache: Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, General.Get_Title(Main_URL), The_File_Extension) if Output_file: General.Connections( Output_file, Query, Plugin_Name, Main_URL, "abr.business.gov.au", "Data Leakage", Task_ID, General.Get_Title(Main_URL), Plugin_Name) Data_to_Cache.append(Main_URL) except: logging.warning(General.Date() + " Invalid query provided for ABN Search.") elif Type == "ACN": Main_URL = 'https://abr.business.gov.au/Search/Run' Data = { 'SearchParameters.SearchText': Query, 'SearchParameters.AllNames': 'true', 'ctl00%24ContentPagePlaceholder%24SearchBox%24MainSearchButton': 'Search' } Response = requests.post(Main_URL, data=Data).text if kwargs.get('Limit'): if int(kwargs["Limit"]) > 0: Limit = kwargs["Limit"] else: Limit = 10 try: ACN_Regex = re.search(r".*[a-zA-Z].*", Query) if ACN_Regex: General.Main_File_Create(Directory, Plugin_Name, Response, Query, The_File_Extension) Current_Step = 0 ABNs_Regex = re.findall( r"\<input\sid\=\"Results\_NameItems\_\d+\_\_Compressed\"\sname\=\"Results\.NameItems\[\d+\]\.Compressed\"\stype\=\"hidden\"\svalue\=\"(\d{11})\,\d{2}\s\d{3}\s\d{3}\s\d{3}\,0000000001\,Active\,active\,([\d\w\s\&\-\_\.]+)\,Current\,", Response) if ABNs_Regex: for ABN_URL, ACN in ABNs_Regex: Full_ABN_URL = 'https://abr.business.gov.au/ABN/View?abn=' + ABN_URL if Full_ABN_URL not in Cached_Data and Full_ABN_URL not in Data_to_Cache and Current_Step < int( Limit): ACN = ACN.rstrip() Current_Response = requests.get( Full_ABN_URL).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, str(Current_Response), ACN.replace(' ', '-'), The_File_Extension) if Output_file: General.Connections( Output_file, Query, Plugin_Name, Full_ABN_URL, "abr.business.gov.au", "Data Leakage", Task_ID, General.Get_Title(Full_ABN_URL), Plugin_Name) Data_to_Cache.append(Full_ABN_URL) Current_Step += 1 else: logging.warning( General.Date() + " Response did not match regular expression.") else: logging.warning( General.Date() + " Query did not match regular expression.") except: logging.warning(General.Date() + " Invalid query provided for ACN Search.") else: logging.warning(General.Date() + " Invalid request type.") except: logging.warning(General.Date() + " Failed to make request.") if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Character_Switch(Query_List, Task_ID): Local_Plugin_Name = Plugin_Name + "-Character-Switch" Data_to_Cache = [] Cached_Data = [] Valid_Results = ["Domain,IP Address"] Valid_Hosts = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Local_Plugin_Name) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name) if not Cached_Data: Cached_Data = [] logging.info( str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + " Character Switching Selected.") Query_List = General.Convert_to_List(Query_List) for Query in Query_List: URL_Regex = re.search( r"(https?:\/\/(www\.)?)?([-a-zA-Z0-9@:%_\+~#=]{2,256})(\.[a-z]{2,3})(\.[a-z]{2,3})?(\.[a-z]{2,3})?", Query) if URL_Regex: URL_Prefix = URL_Regex.group(1) URL_Body = URL_Regex.group(3) if URL_Regex.group(5) and URL_Regex.group(6): URL_Extension = URL_Regex.group(4) + URL_Regex.group( 5) + URL_Regex.group(6) elif URL_Regex.group(5): URL_Extension = URL_Regex.group(4) + URL_Regex.group(5) else: URL_Extension = URL_Regex.group(4) else: logging.warning( str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + " Please provide valid URLs.") logging.info( str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + URL_Body) URL_List = list(URL_Body) Altered_URLs = Rotor.Search(URL_List, True, False, False, False, True, True, True) logging.info( str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + ", ".join(Altered_URLs)) for Altered_URL in Altered_URLs: if not Altered_URL == URL_Body: try: Query = Altered_URL + URL_Extension Web_Host = URL_Prefix.replace("s", "") + Query Response = socket.gethostbyname(Query) if Response: Cache = Query + ":" + Response if Cache not in Cached_Data and Cache not in Data_to_Cache: Valid_Results.append(Query + "," + Response) Data_to_Cache.append(Cache) Valid_Hosts.append(Web_Host) except Exception as e: logging.info( str(datetime.datetime.now().strftime( '%Y-%m-%d %H:%M:%S')) + str(e)) logging.info( str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + Directory) URL_Domain = URL_Body + URL_Extension Output_File = General.Main_File_Create(Directory, Local_Plugin_Name, "\n".join(Valid_Results), URL_Body, The_File_Extension) if Output_File: for Host in Valid_Hosts: General.Connections(Output_File, Query, Local_Plugin_Name, Host, URL_Domain, "Domain Spoof", Task_ID, General.Get_Title(Host), Local_Plugin_Name.lower()) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")
def Search(Query_List, Task_ID): try: Data_to_Cache = [] Directory = General.Make_Directory(Concat_Plugin_Name) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) try: DNS_Info = checkdmarc.check_domains(Query_List) if len(Query_List) > 1: for DNS_Item in DNS_Info: Query = DNS_Item['base_domain'] Output_Dict = json.dumps(DNS_Item, indent=4, sort_keys=True) Link = "https://www." + Query Title = "DNS Information for " + DNS_Item['base_domain'] if Link not in Data_to_Cache and Link not in Cached_Data: Response = requests.get(Link, headers=headers).text Main_File = General.Main_File_Create( Directory, Plugin_Name, Output_Dict, Query, The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, Title, The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, Query, "Domain Information", Task_ID, Concat_Plugin_Name) Output_Connections.Output([Main_File, Output_file], Link, Title, Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) else: Query = DNS_Info['base_domain'] Output_Dict = json.dumps(DNS_Info, indent=4, sort_keys=True) Link = "https://www." + Query Title = "DNS Information for " + Query if Link not in Data_to_Cache and Link not in Cached_Data: Response = requests.get(Link, headers=headers).text Main_File = General.Main_File_Create( Directory, Plugin_Name, Output_Dict, Query, The_File_Extensions["Main"]) Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Response, Title, The_File_Extensions["Query"]) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, Query, "Domain Information", Task_ID, Concat_Plugin_Name) Output_Connections.Output([Main_File, Output_file], Link, Title, Concat_Plugin_Name) Data_to_Cache.append(Link) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Error retrieving DNS details." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) try: File_Dir = os.path.dirname(os.path.realpath('__file__')) Configuration_File = os.path.join( File_Dir, 'plugins/common/config/RSS_Feeds.txt') Current_File = open( Configuration_File, "r" ) # Open the provided file and retrieve each client to test. URLs = Current_File.read().splitlines() Current_File.close() except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Please provide a valid file, failed to open the file which contains the data to search for." ) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) for Query in Query_List: for URL in URLs: # URLs to be controlled by the web app. RSS = feedparser.parse(URL) Current_Step = 0 for Feed in RSS.entries: if Query in Feed.description: Dump_Types = General.Data_Type_Discovery( Feed.description) File_Link = Feed.link.replace("https://", "") File_Link = File_Link.replace("http://", "") File_Link = File_Link.replace("www.", "") File_Link = File_Link.replace("/", "-") Domain = URL.replace("https://", "") Domain = Domain.replace("http://", "") Domain = Domain.replace("www.", "") if Feed.link not in Cached_Data and Feed.link not in Data_to_Cache and Current_Step < int( Limit): Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Feed.description, File_Link, The_File_Extension) Title = "RSS Feed | " + General.Get_Title( Feed.link) if Output_file: Output_Connections = General.Connections( Query, Plugin_Name, Domain, "News Report", Task_ID, Plugin_Name.lower()) Output_Connections.Output( [Output_file], Feed.link, Title, Plugin_Name.lower(), Dump_Types=Dump_Types) Data_to_Cache.append(Feed.link) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.info( f"{General.Date()} - {__name__.strip('plugins.')} - Query not found." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs): try: Data_to_Cache = [] Directory = General.Make_Directory(Plugin_Name.lower()) logger = logging.getLogger() logger.setLevel(logging.INFO) Log_File = General.Logging(Directory, Plugin_Name.lower()) handler = logging.FileHandler(os.path.join(Directory, Log_File), "w") handler.setLevel(logging.DEBUG) formatter = logging.Formatter("%(levelname)s - %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) Cached_Data = General.Get_Cache(Directory, Plugin_Name) Query_List = General.Convert_to_List(Query_List) Limit = General.Get_Limit(kwargs) try: Flickr_Details = Load_Configuration() flickr_api.set_keys(api_key=Flickr_Details[0], api_secret=Flickr_Details[1]) except: logging.info( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to establish API identity." ) for Query in Query_List: Email_Regex = re.search(r"[^@]+@[^\.]+\..+", Query) if Email_Regex: try: User = flickr_api.Person.findByEmail(Query) Photos = User.getPhotos() if Photos: Main_File = General.Main_File_Create( Directory, Plugin_Name, Convert_to_JSON(Photos), Query, The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Plugin_Name, "flickr.com", "Social Media - Media", Task_ID, Plugin_Name.lower()) Current_Step = 0 for Photo in Photos: Photo_URL = f"https://www.flickr.com/photos/{Query}/{Photo['id']}" if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int( Limit): Photo_Response = requests.get( Photo_URL, headers=headers).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Photo_Response, Photo, The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Photo_URL, General.Get_Title(Photo_URL), Plugin_Name.lower()) Data_to_Cache.append(Photo_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - No photos found." ) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make API call." ) else: try: User = flickr_api.Person.findByUserName(Query) Photos = User.getPhotos() if Photos: Main_File = General.Main_File_Create( Directory, Plugin_Name, Convert_to_JSON(Photos), Query, The_File_Extensions["Main"]) Output_Connections = General.Connections( Query, Plugin_Name, "flickr.com", "Data Leakage", Task_ID, Plugin_Name.lower()) Current_Step = 0 for Photo in Photos: Photo_URL = f"https://www.flickr.com/photos/{Query}/{Photo['id']}" if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int( Limit): Photo_Response = requests.get( Photo_URL, headers=headers).text Output_file = General.Create_Query_Results_Output_File( Directory, Query, Plugin_Name, Photo_Response, str(Photo['id']), The_File_Extensions["Query"]) if Output_file: Output_Connections.Output( [Main_File, Output_file], Photo_URL, General.Get_Title(Photo_URL), Plugin_Name.lower()) Data_to_Cache.append(Photo_URL) else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist." ) Current_Step += 1 else: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - No photos found." ) except: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make API call." ) if Cached_Data: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a") else: General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w") except Exception as e: logging.warning( f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")