Пример #1
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if General.Regex_Checker(Query, "IP"):
                API_Key = Load_Configuration()
                Search_Response = General.Request_Handler(
                    f"http://api.{Domain}/{Query}?access_key={API_Key}")
                JSON_Response = json.loads(Search_Response)
                JSON_Output_Response = json.dumps(JSON_Response,
                                                  indent=4,
                                                  sort_keys=True)
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "IP Address Information",
                    Task_ID, Plugin_Name.lower())

                if Query not in Cached_Data and Query not in Data_to_Cache:
                    Result_URL = f"https://{Domain}/?{Query}"
                    Title = f"IP Stack | {Query}"
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, JSON_Output_Response,
                        Title, The_File_Extension)

                    if Output_file:
                        Output_Connections.Output([Output_file], Result_URL,
                                                  Title, Plugin_Name.lower())
                        Data_to_Cache.append(Result_URL)

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #2
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        YouTube_Details = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            YouTube_Handler = discovery.build(YouTube_Details[1], YouTube_Details[2], developerKey=YouTube_Details[0], cache_discovery=False)

            if YouTube_Details[3] and YouTube_Details[4]:
                Search_Response = YouTube_Handler.search().list(q=Query, type='video', location=YouTube_Details[3], locationRadius=YouTube_Details[4], part='id,snippet', maxResults=Limit,).execute()

            else:
                Search_Response = YouTube_Handler.search().list(q=Query, type='video', part='id,snippet', maxResults=Limit,).execute()
            
            Main_File = General.Main_File_Create(Directory, Plugin_Name, json.dumps(Search_Response.get('items', []), indent=4, sort_keys=True), Query, The_File_Extensions["Main"])
            Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Social Media - Media", Task_ID, Plugin_Name.lower())

            for Search_Result in Search_Response.get('items', []):
                Full_Video_URL = f"https://www.{Domain}/watch?v=" + Search_Result['id']['videoId']
                Search_Video_Responses = General.Request_Handler(Full_Video_URL, Filter=True, Host=f"https://www.{Domain}")
                Search_Video_Response = Search_Video_Responses["Filtered"]
                Title = "YouTube | " + Search_Result['snippet']['title']

                if Full_Video_URL not in Cached_Data and Full_Video_URL not in Data_to_Cache:
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Video_Response, Search_Result['id']['videoId'], The_File_Extensions["Query"])

                    if Output_file:
                        Output_Connections.Output([Main_File, Output_file], Full_Video_URL, Title, Plugin_Name.lower())
                        Data_to_Cache.append(Full_Video_URL)

                    else:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #3
0
def Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            try:

                if Type == "CIK":
                    Main_URL = f'https://www.{Domain}/cgi-bin/browse-edgar?action=getcompany&CIK={Query}&owner=exclude&count=40&hidefilings=0'
                    Responses = General.Request_Handler(
                        Main_URL, Filter=True, Host=f"https://www.{Domain}")
                    Response = Responses["Regular"]

                    try:

                        if 'No matching CIK.' not in Response:
                            Query = str(int(Query))
                            Response = Responses["Filtered"]

                            if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name, Response,
                                    f"edgar-american-business-search-{Query.lower()}",
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections = General.Connections(
                                        Query, Plugin_Name, Domain,
                                        "Company Details", Task_ID,
                                        Plugin_Name)
                                    Output_Connections.Output(
                                        [Output_file], Main_URL,
                                        f"American Business Number (EDGAR) {Query}",
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(Main_URL)

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                    except:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CIK Search."
                        )

                elif Type == "ACN":
                    Main_URL = f'https://www.{Domain}/cgi-bin/browse-edgar?company={Query}&owner=exclude&action=getcompany'
                    Responses = General.Request_Handler(
                        Main_URL, Filter=True, Host=f"https://www.{Domain}")
                    Response = Responses["Regular"]
                    Filtered_Response = Responses["Filtered"]
                    Limit = General.Get_Limit(kwargs)

                    try:
                        ACN = re.search(r".*[a-zA-Z].*", Query)

                        if ACN:
                            Main_File = General.Main_File_Create(
                                Directory, Plugin_Name, Filtered_Response,
                                Query, The_File_Extensions["Main"])
                            Current_Step = 0
                            CIKs_Regex = re.findall(
                                r"(\d{10})\<\/a\>\<\/td\>\s+\<td\sscope\=\"row\"\>(.*\S.*)\<\/td\>",
                                Response)

                            if CIKs_Regex:
                                Output_Connections = General.Connections(
                                    Query, Plugin_Name, Domain,
                                    "Company Details", Task_ID, Plugin_Name)

                                for CIK_URL, ACN in CIKs_Regex:
                                    Full_CIK_URL = f'https://www.{Domain}/cgi-bin/browse-edgar?action=getcompany&CIK={CIK_URL}&owner=exclude&count=40&hidefilings=0'

                                    if Full_CIK_URL not in Cached_Data and Full_CIK_URL not in Data_to_Cache and Current_Step < int(
                                            Limit):
                                        Current_Responses = General.Request_Handler(
                                            Full_CIK_URL,
                                            Filter=True,
                                            Host=f"https://www.{Domain}")
                                        Current_Response = Current_Responses[
                                            "Filtered"]
                                        Output_file = General.Create_Query_Results_Output_File(
                                            Directory, Query, Plugin_Name,
                                            str(Current_Response),
                                            ACN.replace(' ', '-'),
                                            The_File_Extensions["Query"])

                                        if Output_file:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                Full_CIK_URL,
                                                f"American Business Number (EDGAR) {CIK_URL} for Query {Query}",
                                                Concat_Plugin_Name)
                                            Data_to_Cache.append(Full_CIK_URL)

                                        else:
                                            logging.warning(
                                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                            )

                                        Current_Step += 1

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Response did not match regular expression."
                                )

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Query did not match regular expression."
                            )

                    except:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for ACN Search."
                        )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Invalid request type."
                    )

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make request."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #4
0
    def All_Extensions(self):

        try:
            Local_Plugin_Name = self.Plugin_Name + "-All-Extensions"
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, Local_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            self.Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)
            logging.info(
                f"{General.Date()} {__name__.strip('plugins.')} - All Extensions Selected."
            )
            self.Query_List = General.Convert_to_List(self.Query_List)

            for Query in self.Query_List:
                URL_Regex = General.Regex_Checker(Query, "URL")

                if URL_Regex:
                    self.URL_Prefix = URL_Regex.group(1)
                    self.URL_Body = URL_Regex.group(3)

                    if URL_Regex.group(5) and URL_Regex.group(6):
                        self.URL_Extension = URL_Regex.group(
                            4) + URL_Regex.group(5) + URL_Regex.group(6)

                    elif URL_Regex.group(5):
                        self.URL_Extension = URL_Regex.group(
                            4) + URL_Regex.group(5)

                    else:
                        self.URL_Extension = URL_Regex.group(4)

                else:
                    logging.warning(
                        f"{General.Date()} {__name__.strip('plugins.')} - Please provide valid URLs."
                    )

                Pool = mpool.ThreadPool(
                    int(multiprocessing.cpu_count()) *
                    int(multiprocessing.cpu_count()))
                Pool_Threads = []

                for Extension in self.Generic_Extensions:

                    for suffix in self.Global_Domain_Suffixes:
                        suffix = suffix.replace(".com", "")
                        suffix = suffix.replace(".co", "")

                        if not self.URL_Extension == suffix:
                            Thread = Pool.apply_async(self.Query_URL,
                                                      args=(
                                                          self.URL_Body,
                                                          Extension + suffix,
                                                      ))
                            Pool_Threads.append(Thread)

                [Pool_Thread.wait() for Pool_Thread in Pool_Threads]
                URL_Domain = self.URL_Body + self.URL_Extension
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name,
                    "\n".join(self.Valid_Results), self.URL_Body,
                    self.The_File_Extensions["Main"])

                if Main_File:

                    for Host in self.Valid_Hosts:
                        Current_Domain = Host[0].strip('https://').strip(
                            'http://')

                        try:
                            Current_Responses = General.Request_Handler(
                                Host[0],
                                Filter=True,
                                Host=Host[0],
                                Risky_Plugin=True)
                            Current_Response = Current_Responses["Filtered"]
                            Output_File = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name,
                                Current_Response, Current_Domain,
                                self.The_File_Extensions["Query"])

                            if Output_File:
                                Output_File_List = [Main_File, Output_File]
                                Output_Connections = General.Connections(
                                    Query, Local_Plugin_Name, Current_Domain,
                                    "Domain Spoof", self.Task_ID,
                                    Local_Plugin_Name.lower())
                                Output_Connections.Output(
                                    Output_File_List,
                                    Host[0],
                                    f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}",
                                    Directory_Plugin_Name=self.
                                    Concat_Plugin_Name)

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                        except requests.exceptions.ConnectionError:
                            Output_File_List = [Main_File]
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, Current_Domain,
                                "Domain Spoof", self.Task_ID,
                                Local_Plugin_Name.lower())
                            Output_Connections.Output(
                                Output_File_List,
                                Host[0],
                                f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}",
                                Directory_Plugin_Name=self.Concat_Plugin_Name)

                if self.Data_to_Cache:

                    if self.Cached_Data:
                        General.Write_Cache(Directory, self.Data_to_Cache,
                                            Local_Plugin_Name, "a")

                    else:
                        General.Write_Cache(Directory, self.Data_to_Cache,
                                            Local_Plugin_Name, "w")

        except Exception as e:
            logging.warning(
                f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #5
0
def Search(Query_List, Task_ID, Type):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if Type == "Email":

                if General.Regex_Checker(Query, Type):
                    Local_Plugin_Name = Plugin_Name + "-" + Type
                    URL = f"https://www.threatcrowd.org/searchApi/v2/email/report/?email={Query}"
                    Response = General.Request_Handler(URL)
                    JSON_Response = json.loads(Response)

                    if int(JSON_Response.get("response_code")) != 0:
                        JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)
                        Permalink = JSON_Response.get("permalink")
                        Permalink_Responses = General.Request_Handler(URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}")
                        Permalink_Response = Permalink_Responses["Filtered"]
                        Title = "Threat Crowd | " + General.Get_Title_Requests_Module(Permalink).replace(" | Threatcrowd.org Open Source Threat Intelligence", "")
                        Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Permalink_Response, Query, The_File_Extensions["Query"])
                        Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Account", Task_ID, Local_Plugin_Name.lower())

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file], Permalink, Title, Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                    else:
                        logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Provided query returned no results.")

                else:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match query to email regular expression.")

            elif Type == "Domain":

                if General.Regex_Checker(Query, Type):
                    Local_Plugin_Name = Plugin_Name + "-" + Type
                    URL = f"https://www.threatcrowd.org/searchApi/v2/domain/report/?domain={Query}"
                    Response = General.Request_Handler(URL)
                    JSON_Response = json.loads(Response)

                    if int(JSON_Response.get("response_code")) != 0:
                        JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)
                        Permalink = JSON_Response.get("permalink")
                        Permalink_Responses = General.Request_Handler(URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}")
                        Permalink_Response = Permalink_Responses["Filtered"]
                        Title = "Threat Crowd | " + General.Get_Title_Requests_Module(Permalink).replace(" | Threatcrowd.org Open Source Threat Intelligence", "")
                        Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Permalink_Response, Query, The_File_Extensions["Query"])
                        Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Domain Information", Task_ID, Local_Plugin_Name.lower())
                        
                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file], Permalink, Title, Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                    else:
                        logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Provided query returned no results.")

                else:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match query to domain regular expression.")

            elif Type == "IP Address":

                if General.Regex_Checker(Query, "IP"):
                    Local_Plugin_Name = Plugin_Name + "-" + Type
                    URL = f"https://www.threatcrowd.org/searchApi/v2/ip/report/?ip={Query}"
                    Response = General.Request_Handler(URL)
                    JSON_Response = json.loads(Response)

                    if int(JSON_Response.get("response_code")) != 0:
                        JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)
                        Permalink = JSON_Response.get("permalink")
                        Permalink_Responses = General.Request_Handler(URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}")
                        Permalink_Response = Permalink_Responses["Filtered"]
                        Title = "Threat Crowd | " + General.Get_Title_Requests_Module(Permalink).replace(" | Threatcrowd.org Open Source Threat Intelligence", "")
                        Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Permalink_Response, Query, The_File_Extensions["Query"])
                        Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Domain Information", Task_ID, Local_Plugin_Name.lower())
                        
                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file], Permalink, Title, Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                    else:
                        logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Provided query returned no results.")

                else:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match query to IP address regular expression.")

            elif Type == "AV":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                URL = f"https://www.threatcrowd.org/searchApi/v2/antivirus/report/?antivirus={Query}"
                Response = General.Request_Handler(URL)
                JSON_Response = json.loads(Response)

                if int(JSON_Response.get("response_code")) != 0:
                    JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)
                    Permalink = JSON_Response.get("permalink")
                    Permalink_Responses = General.Request_Handler(URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}")
                    Permalink_Response = Permalink_Responses["Filtered"]
                    Title = "Threat Crowd | " + General.Get_Title_Requests_Module(Permalink).replace(" | Threatcrowd.org Open Source Threat Intelligence", "")
                    Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Permalink_Response, Query, The_File_Extensions["Query"])
                    Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Virus", Task_ID, Local_Plugin_Name.lower())
                    
                    if Output_file:
                        Output_Connections.Output([Main_File, Output_file], Permalink, Title, Plugin_Name.lower())
                        Data_to_Cache.append(URL)

                    else:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                else:
                    logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Provided query returned no results.")

            elif Type == "Virus Report":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                URL = f"https://www.threatcrowd.org/searchApi/v2/file/report/?resource={Query}"
                Response = General.Request_Handler(URL)
                JSON_Response = json.loads(Response)

                if int(JSON_Response.get("response_code")) != 0:
                    JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)
                    Permalink = JSON_Response.get("permalink")
                    Permalink_Responses = General.Request_Handler(URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}")
                    Permalink_Response = Permalink_Responses["Filtered"]
                    Title = "Threat Crowd | " + General.Get_Title_Requests_Module(Permalink).replace(" | Threatcrowd.org Open Source Threat Intelligence", "")
                    Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Permalink_Response, Query, The_File_Extensions["Query"])
                    Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Virus Report", Task_ID, Local_Plugin_Name.lower())
                    
                    if Output_file:
                        Output_Connections.Output([Main_File, Output_file], Permalink, Title, Plugin_Name.lower())
                        Data_to_Cache.append(URL)

                    else:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                else:
                    logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Provided query returned no results.")

            else:
                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Invalid type provided.")

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #6
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Location = General.Load_Location_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            try:
                Request_Query = urllib.parse.quote(Query)
                Main_URL = f"http://{Domain}/search?term={Request_Query}&country={Location}&entity=software&limit={str(Limit)}"
                Response = General.Request_Handler(Main_URL)

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make request, are you connected to the internet?"
                )
                break

            JSON_Response = json.loads(Response)
            Main_File = General.Main_File_Create(
                Directory, "iTunes",
                json.dumps(JSON_Response, indent=4, sort_keys=True), Query,
                The_File_Extensions["Main"])

            if 'resultCount' in JSON_Response:

                if JSON_Response['resultCount'] > 0:
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain, "Application", Task_ID,
                        Concat_Plugin_Name)

                    for JSON_Object in JSON_Response['results']:
                        JSON_Object_Responses = General.Request_Handler(
                            JSON_Object['artistViewUrl'],
                            Filter=True,
                            Host=f"https://{Domain}")
                        JSON_Object_Response = JSON_Object_Responses[
                            "Filtered"]

                        if JSON_Object[
                                'artistViewUrl'] not in Cached_Data and JSON_Object[
                                    'artistViewUrl'] not in Data_to_Cache:
                            iTunes_Regex = re.search(
                                r"https\:\/\/apps\.apple\.com\/" +
                                rf"{Location}" +
                                r"\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?.+",
                                JSON_Object['artistViewUrl'])

                            if iTunes_Regex:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query,
                                    Plugin_Name, JSON_Object_Response,
                                    iTunes_Regex.group(1),
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file],
                                        JSON_Object['artistViewUrl'],
                                        General.Get_Title(
                                            JSON_Object['artistViewUrl']),
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(
                                        JSON_Object['artistViewUrl'])

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Invalid value provided, value not greater than 0."
                    )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Invalid value."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #7
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Results = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Reddit_Details = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Limit = General.Get_Limit(kwargs)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            try:
                Reddit_Connection = praw.Reddit(
                    client_id=Reddit_Details[0],
                    client_secret=Reddit_Details[1],
                    user_agent=Reddit_Details[2],
                    username=Reddit_Details[3],
                    password=Reddit_Details[4])
                All_Subreddits = Reddit_Connection.subreddit(Reddit_Details[5])

                for Subreddit in All_Subreddits.search(
                        Query, limit=Limit
                ):  # Limit, subreddit and search to be controlled by the web app.
                    Results.append(Subreddit.url)

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to get results. Are you connected to the internet?"
                )

            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain, "Forum", Task_ID,
                                                     Plugin_Name.lower())

            for Result in Results:

                if Result not in Cached_Data and Result not in Data_to_Cache:

                    try:
                        Reddit_Regex = re.search(
                            "https\:\/\/www\.reddit\.com\/r\/(\w+)\/comments\/(\w+)\/([\w\d]+)\/",
                            Result[0])

                        if Reddit_Regex:
                            Reddit_Responses = General.Request_Handler(
                                Result,
                                Application_JSON_CT=True,
                                Accept_XML=True,
                                Accept_Language_EN_US=True,
                                Filter=True,
                                Host=f"https://www.{Domain}")
                            Reddit_Response = Reddit_Responses["Filtered"]
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name, Reddit_Response,
                                Reddit_Regex.group(3), The_File_Extension)

                            if Output_file:
                                Output_Connections.Output(
                                    [Output_file], Result,
                                    General.Get_Title(Result[0]),
                                    Plugin_Name.lower())
                                Data_to_Cache.append(Result[0])

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                    except:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create file."
                        )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #8
0
def Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            try:

                if Type == "CBN":
                    Main_API_URL = f'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B{Query}%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc'
                    Response = General.Request_Handler(Main_API_URL)
                    JSON_Response = json.loads(Response)
                    Indented_JSON_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)
                    Main_Output_File = General.Main_File_Create(Directory, Plugin_Name, Indented_JSON_Response, Query, The_File_Extensions["Main"])

                    try:

                        if JSON_Response['count'] != 0:
                            Query = str(int(Query))
                            Main_URL = f'https://{Domain}/search/results?search=%7B{Query}%7D&status=Active'
                            Responses = General.Request_Handler(Main_URL, Filter=True, Host=f"https://{Domain}")
                            Response = Responses["Filtered"]

                            if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Response, General.Get_Title(Main_URL), The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections = General.Connections(Query, Plugin_Name, Domain.strip("beta."), "Company Details", Task_ID, Plugin_Name)
                                    Output_Connections.Output([Main_Output_File, Output_file], Main_URL, f"Canadian Business Number {Query}", Concat_Plugin_Name)
                                    Data_to_Cache.append(Main_URL)

                                else:
                                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                    except:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CBN Search.")

                elif Type == "CCN":
                    Main_URL = 'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B' + urllib.parse.quote(Query) + '%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc'
                    Response = General.Request_Handler(Main_URL)
                    JSON_Response = json.loads(Response)
                    Indented_JSON_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)
                    Limit = General.Get_Limit(kwargs)

                    try:
                        Main_File = General.Main_File_Create(Directory, Plugin_Name, Indented_JSON_Response, Query, The_File_Extensions["Main"])
                        Current_Step = 0
                        Output_Connections = General.Connections(Query, Plugin_Name, Domain.strip("beta."), "Company Details", Task_ID, Plugin_Name)

                        for JSON_Item in JSON_Response['docs']:

                            if JSON_Item.get('BN'):
                                CCN = JSON_Item['Company_Name']
                                CBN = JSON_Item['BN']

                                Full_CCN_URL = f'https://{Domain}/search/results?search=%7B{CBN}%7D&status=Active'

                                if Full_CCN_URL not in Cached_Data and Full_CCN_URL not in Data_to_Cache and Current_Step < int(Limit):
                                    Current_Responses = General.Request_Handler(Full_CCN_URL, Filter=True, Host=f"https://{Domain}")
                                    Current_Response = Current_Responses["Filtered"]
                                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, str(Current_Response), CCN.replace(' ', '-'), The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output([Main_File, Output_file], Full_CCN_URL, f"Canadian Business Number {CBN} for Query {Query}", Concat_Plugin_Name)
                                        Data_to_Cache.append(Full_CCN_URL)

                                    else:
                                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                                    Current_Step += 1

                            else:
                                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Unable to retrieve business numbers from the JSON response.")

                    except:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CCN Search.")

                else:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Invalid request type.")

            except:
                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make request.")

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #9
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            vulners_api = vulners.Vulners(api_key=Load_Configuration())
            Search_Response = vulners_api.search(Query, limit=int(Limit))
            JSON_Response = json.dumps(Search_Response,
                                       indent=4,
                                       sort_keys=True)
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 JSON_Response, Query,
                                                 The_File_Extensions["Main"])
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain, "Exploit",
                                                     Task_ID,
                                                     Plugin_Name.lower())

            for Search_Result in Search_Response:

                if Search_Result[
                        "bulletinFamily"] not in Unacceptable_Bulletins:
                    Result_Title = Search_Result["title"]
                    Result_URL = Search_Result["vhref"]
                    Search_Result_Responses = General.Request_Handler(
                        Result_URL, Filter=True, Host=f"https://{Domain}")
                    Search_Result_Response = Search_Result_Responses[
                        "Filtered"]

                    if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name,
                            Search_Result_Response, Result_Title,
                            The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Result_URL, Result_Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(Result_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                else:
                    logging.info(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Skipping as bulletin type is not supported."
                    )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #10
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Location = General.Load_Location_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            Main_URL = f"https://www.{Domain}/en-{Location}/search?q={Query}"
            Win_Store_Response = General.Request_Handler(
                Main_URL,
                Application_JSON_CT=True,
                Accept_XML=True,
                Accept_Language_EN_US=True)
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 Win_Store_Response, Query,
                                                 The_File_Extension)
            Win_Store_Regex = re.findall(r"\/en\-au\/p\/([\w\-]+)\/([\w\d]+)",
                                         Win_Store_Response)
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain, "Application",
                                                     Task_ID,
                                                     Concat_Plugin_Name)

            if Win_Store_Regex:
                Current_Step = 0

                for Regex_Group_1, Regex_Group_2 in Win_Store_Regex:
                    Item_URL = f"https://www.microsoft.com/en-au/p/{Regex_Group_1}/{Regex_Group_2}"
                    Win_Store_Responses = General.Request_Handler(
                        Item_URL,
                        Application_JSON_CT=True,
                        Accept_XML=True,
                        Accept_Language_EN_US=True,
                        Filter=True,
                        Host=f"https://www.{Domain}")
                    Win_Store_Response = Win_Store_Responses["Filtered"]
                    Title = "Windows Store | " + General.Get_Title(Item_URL)

                    if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Win_Store_Response,
                            Regex_Group_1, The_File_Extension)

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Item_URL, Title,
                                                      Concat_Plugin_Name)
                            Data_to_Cache.append(Item_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #11
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Ebay_API_Key = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            try:
                API_Request = Connection(appid=Ebay_API_Key, config_file=None)
                API_Response = API_Request.execute('findItemsAdvanced',
                                                   {'keywords': Query})
                JSON_Output_Response = json.dumps(API_Response.dict(),
                                                  indent=4,
                                                  sort_keys=True)
                JSON_Response = json.dumps(API_Response.dict())
                JSON_Response = json.loads(JSON_Response)
                Main_File = General.Main_File_Create(
                    Directory, Plugin_Name, JSON_Output_Response, Query,
                    The_File_Extensions["Main"])

                if JSON_Response["ack"] == "Success":
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain, "Search Result", Task_ID,
                        Plugin_Name.lower())
                    Current_Step = 0

                    for JSON_Line in JSON_Response['searchResult']['item']:
                        Ebay_Item_URL = JSON_Line['viewItemURL']
                        Title = "Ebay | " + General.Get_Title(Ebay_Item_URL)

                        if Ebay_Item_URL not in Cached_Data and Ebay_Item_URL not in Data_to_Cache and Current_Step < int(
                                Limit):
                            Ebay_Item_Regex = re.search(
                                r"https\:\/\/www\.ebay\.com\/itm\/([\w\d\-]+)\-\/\d+",
                                Ebay_Item_URL)
                            Ebay_Item_Responses = General.Request_Handler(
                                Ebay_Item_URL,
                                Application_JSON_CT=True,
                                Accept_XML=True,
                                Accept_Language_EN_US=True,
                                Filter=True,
                                Host=f"https://www.{Domain}")
                            Ebay_Item_Response = Ebay_Item_Responses[
                                "Filtered"]
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name,
                                Ebay_Item_Response,
                                Ebay_Item_Regex.group(1).rstrip("-"),
                                The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], Ebay_Item_URL,
                                    Title, Plugin_Name.lower())
                                Data_to_Cache.append(Ebay_Item_URL)

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                            Current_Step += 1

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - No results found."
                    )

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make API call."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #12
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain, "Vehicle Details",
                                                     Task_ID,
                                                     Concat_Plugin_Name)

            for State in States:
                Post_URL = f'https://{Domain}/bin/wesfarmers/search/vehiclerego'
                data = '''{"isRegoSearch":"YES","regoSearchCount":2,"regoMatchCount":1,"regoSearchFailureCount":0,"failPaymentAttempts":0,"pauseStep":"false","campaignBaseURL":"https://secure.colesinsurance.com.au/campaignimages/","sessionState":"OPEN","sessionStep":"0","policyHolders":[],"updateSessionURL":"http://dev.gtw.gp-mdl.auiag.corp:9000/sys/colessessionservice/motor/v1/update-session","insuranceType":"COMP","startDate":"03/07/2019","drivers":[{"driverRef":"MainDriver","yearsLicenced":{"vehRef":"veh1"}}],"priceBeatAttemptsRemaining":"2","currentInsurerOptions":[{"id":"AAMI","value":"AAMI","text":"AAMI"},{"id":"Allianz","value":"Allianz","text":"Allianz"},{"id":"Apia","value":"Apia","text":"Apia"},{"id":"Bingle","value":"Bingle","text":"Bingle"},{"id":"Broker","value":"Broker","text":"Broker"},{"id":"BudgDirect","value":"BudgDirect","text":"Budget Direct"},{"id":"Buzz","value":"Buzz","text":"Buzz"},{"id":"CGU","value":"CGU","text":"CGU"},{"id":"Coles","value":"Coles","text":"Coles"},{"id":"CommInsure","value":"CommInsure","text":"CommInsure"},{"id":"GIO","value":"GIO","text":"GIO"},{"id":"HBF","value":"HBF","text":"HBF"},{"id":"JustCar","value":"JustCar","text":"Just Car"},{"id":"NRMA","value":"NRMA","text":"NRMA"},{"id":"Progress","value":"Progress","text":"Progressive"},{"id":"QBE","value":"QBE","text":"QBE"},{"id":"RAA","value":"RAA","text":"RAA"},{"id":"RAC","value":"RAC","text":"RAC"},{"id":"RACQ","value":"RACQ","text":"RACQ"},{"id":"RACT","value":"RACT","text":"RACT"},{"id":"RACV","value":"RACV","text":"RACV"},{"id":"Real","value":"Real","text":"Real"},{"id":"SGIC","value":"SGIC","text":"SGIC"},{"id":"SGIO","value":"SGIO","text":"SGIO"},{"id":"Shannons","value":"Shannons","text":"Shannons"},{"id":"Suncorp","value":"Suncorp","text":"Suncorp"},{"id":"Youi","value":"Youi","text":"Youi"},{"id":"None","value":"None","text":"Car is not currently insured"},{"id":"Dontknow","value":"Dontknow","text":"Don't Know"},{"id":"Other","value":"Other","text":"Other"}],"coverLevelOptions":[{"id":"Gold","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"NRMA","code":"Gold","order":"1"},{"id":"Gold1","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"BudgDirect","code":"Gold","order":"1"},{"id":"Standard2","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIC","code":"Standard","order":"2"},{"id":"Gold6","value":"Comprehensive Advantages Car Insurance","text":"Comprehensive Advantages Car Insurance","flagname":"Suncorp","code":"Gold","order":"1"},{"id":"Standard","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"GIO","code":"Standard","order":"2"},{"id":"Standard0","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"NRMA","code":"Standard","order":"2"},{"id":"Gold4","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIC","code":"Gold","order":"1"},{"id":"Standard5","value":"Full Comprehensive Car Insurance","text":"Full Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Standard","order":"2"},{"id":"Gold5","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIO","code":"Gold","order":"1"},{"id":"Gold2","value":"Platinum Car Insurance","text":"Platinum Car Insurance","flagname":"GIO","code":"Gold","order":"1"},{"id":"Standard3","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIO","code":"Standard","order":"2"},{"id":"Gold3","value":"Complete Care Motor Insurance","text":"Complete Care Motor Insurance","flagname":"RACV","code":"Gold","order":"1"},{"id":"Standard4","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"Suncorp","code":"Standard","order":"2"},{"id":"Gold0","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Gold","order":"1"},{"id":"Standard1","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"RACV","code":"Standard","order":"2"}],"riskAddress":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"postcode":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"carRegistration":"''' + Query + '''","chooseValue":"","whatValueInsure":"Marketvalue","whatValueInsure_value":{"key":"Marketvalue","value":"Market Value"}}'''
                headers = {
                    'Content-Type': 'ext/plain;charset=UTF-8',
                    'Accept': '*/*',
                    'Accept-Encoding': 'gzip, deflate, br',
                    'Referer': f'https://{Domain}/motor/get-quote',
                    'Origin': f'https://{Domain}',
                    'Host': Domain
                }
                Registration_Response = General.Request_Handler(
                    Post_URL,
                    Method="POST",
                    Data=data,
                    Optional_Headers=headers)
                Registration_Response = json.loads(Registration_Response)

                try:
                    Title = "Vehicle Registration | " + Registration_Response[
                        'vehicles'][0]['make'] + " " + Registration_Response[
                            'vehicles'][0]['model']
                    Item_URL = Post_URL + "?" + Query

                    if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name,
                            json.dumps(Registration_Response,
                                       indent=4,
                                       sort_keys=True),
                            Title.replace(" ", "-"), The_File_Extension)

                        if Output_file:
                            Output_Connections.Output([Output_file], Item_URL,
                                                      Title,
                                                      Concat_Plugin_Name)
                            Data_to_Cache.append(Item_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                except:
                    logging.info(
                        f"{General.Date()} - {__name__.strip('plugins.')} - No result found for given query {Query} for state {State}."
                    )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #13
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:
            Headers_Custom = {
                "Referer":
                f"https://www.doingbusiness.org/en/data/exploreeconomies/{Query}"
            }
            Main_URL = f"https://wbgindicatorsqa.azure-api.net/DoingBusiness/api/GetEconomyByURL/{Query}"
            Doing_Business_Response = General.Request_Handler(
                Main_URL, Optional_Headers=Headers_Custom)
            JSON_Response = json.loads(Doing_Business_Response)
            JSON_Output_Response = json.dumps(JSON_Response,
                                              indent=4,
                                              sort_keys=True)

            if 'message' not in JSON_Response:
                Main_File = General.Main_File_Create(
                    Directory, Plugin_Name, JSON_Output_Response, Query,
                    The_File_Extensions["Main"])
                Item_URL = f"https://www.{Domain}/en/data/exploreeconomies/{Query}"
                Title = f"Doing Business | {Query}"
                Current_Doing_Business_Responses = General.Request_Handler(
                    Item_URL, Filter=True, Host=f"https://www.{Domain}")
                Current_Doing_Business_Response = Current_Doing_Business_Responses[
                    "Filtered"]

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache:
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name,
                        Current_Doing_Business_Response, Query,
                        The_File_Extensions["Query"])

                    if Output_file:
                        Output_Connections = General.Connections(
                            Query, Plugin_Name, Domain, "Economic Details",
                            Task_ID, Concat_Plugin_Name)
                        Output_Connections.Output([Main_File, Output_file],
                                                  Item_URL, Title,
                                                  Concat_Plugin_Name)
                        Data_to_Cache.append(Item_URL)

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #14
0
def Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        VK_Access_Token = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            if Type == "User":
                VK_Response = General.Request_Handler(
                    f"https://api.{Domain}/method/users.search?v=5.52&access_token={VK_Access_Token}&fields=verified, blacklisted, sex, bdate, city, country, home_town, photo_50, photo_100, photo_200_orig, photo_200, photo_400_orig, photo_max, photo_max_orig, online, lists, domain, has_mobile, contacts, site, education, universities, schools, status, last_seen, followers_count, common_count, counters, occupation, nickname, relatives, relation, personal, connections, exports, wall_comments, activities, interests, music, movies, tv, books, games, about, quotes, can_post, can_see_all_posts, can_see_audio, can_write_private_message, timezone, screen_name&q={Query}&count={str(Limit)}"
                )
                JSON_Response = json.loads(VK_Response)
                JSON_Output_Response = json.dumps(JSON_Response,
                                                  indent=4,
                                                  sort_keys=True)
                Main_File = General.Main_File_Create(
                    Directory, Plugin_Name, JSON_Output_Response, Query,
                    The_File_Extensions["Main"])
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "Social Media - Person",
                    Task_ID, Plugin_Name.lower())
                New_JSON_Response = Recursive_Dict_Check(["response", "items"],
                                                         JSON_Response)

                if New_JSON_Response:

                    for VK_Item_Line in New_JSON_Response:

                        try:

                            if all(Item in VK_Item_Line for Item in
                                   ["first_name", "last_name", "screen_name"]):
                                VK_URL = f"https://{Domain}/" + VK_Item_Line[
                                    'screen_name']
                                Full_Name = VK_Item_Line[
                                    "first_name"] + " " + VK_Item_Line[
                                        "last_name"]
                                Title = f"VK User | {Full_Name}"

                                if VK_URL not in Cached_Data and VK_URL not in Data_to_Cache:
                                    VK_Item_Responses = General.Request_Handler(
                                        VK_URL,
                                        Filter=True,
                                        Host=f"https://{Domain}")
                                    VK_Item_Response = VK_Item_Responses[
                                        "Filtered"]
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, Plugin_Name,
                                        VK_Item_Response, VK_URL,
                                        The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output(
                                            [Main_File, Output_file], VK_URL,
                                            Title, Plugin_Name.lower())
                                        Data_to_Cache.append(VK_URL)

                                    else:
                                        logging.warning(
                                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                        )

                        except Exception as e:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}"
                            )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - No results found."
                    )

            if Type == "Group":
                VK_Response = General.Request_Handler(
                    f"https://api.{Domain}/method/groups.search?v=5.52&access_token={VK_Access_Token}&q={Query}&count={str(Limit)}"
                )
                JSON_Response = json.loads(VK_Response)
                JSON_Output_Response = json.dumps(JSON_Response,
                                                  indent=4,
                                                  sort_keys=True)
                Main_File = General.Main_File_Create(
                    Directory, Plugin_Name, JSON_Output_Response, Query,
                    The_File_Extensions["Main"])
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "Social Media - Group",
                    Task_ID, Plugin_Name.lower())
                New_JSON_Response = Recursive_Dict_Check(["response", "items"],
                                                         JSON_Response)

                if New_JSON_Response:

                    for VK_Item_Line in New_JSON_Response:

                        try:

                            if all(Item in VK_Item_Line
                                   for Item in ["name", "screen_name"]):
                                VK_URL = f"https://{Domain}/" + VK_Item_Line[
                                    'screen_name']
                                Full_Name = VK_Item_Line["name"]
                                Title = f"VK Group | {Full_Name}"

                                if VK_URL not in Cached_Data and VK_URL not in Data_to_Cache:
                                    VK_Item_Responses = General.Request_Handler(
                                        VK_URL,
                                        Filter=True,
                                        Host=f"https://{Domain}")
                                    VK_Item_Response = VK_Item_Responses[
                                        "Filtered"]
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, Plugin_Name,
                                        VK_Item_Response, VK_URL,
                                        The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output(
                                            [Main_File, Output_file], VK_URL,
                                            Title, Plugin_Name.lower())
                                        Data_to_Cache.append(VK_URL)

                                    else:
                                        logging.warning(
                                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                        )

                        except Exception as e:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}"
                            )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - No results found."
                    )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #15
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Craigslist_Location = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            Main_URL = f"https://{Craigslist_Location.lower()}.craigslist.org/search/sss?format=rss&query={Query}"
            Craigslist_Response = feedparser.parse(Main_URL)
            Craigslist_Items = Craigslist_Response["items"]
            Current_Step = 0

            for Item in Craigslist_Items:
                Item_URL = Item["link"]

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                        Limit):
                    Craigslist_Responses = General.Request_Handler(
                        Item_URL,
                        Filter=True,
                        Host=
                        f"https://{Craigslist_Location.lower()}.craigslist.org"
                    )
                    Craigslist_Response = Craigslist_Responses["Filtered"]
                    Local_URL = f"https://{Craigslist_Location.lower()}.craigslist.org/"
                    Local_Domain = f"{Craigslist_Location.lower()}.craigslist.org"
                    Filename = Item_URL.replace(Local_URL, "")
                    Filename = Filename.replace(".html/", "")
                    Filename = Filename.replace(".html", "")
                    Filename = Filename.replace("/", "-")
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, Craigslist_Response,
                        Filename, The_File_Extension)

                    if Output_file:
                        Output_Connections = General.Connections(
                            Query, Plugin_Name, Local_Domain, "Search Result",
                            Task_ID, Plugin_Name.lower())
                        Output_Connections.Output([Output_file], Item_URL,
                                                  General.Get_Title(Item_URL),
                                                  Plugin_Name.lower())
                        Data_to_Cache.append(Item_URL)

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

                    Current_Step += 1

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #16
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            Response = General.Request_Handler(
                'https://tpbc.herokuapp.com/search/' +
                Query.replace(" ", "+") + '/?sort=seeds_desc')
            Response = json.loads(Response)
            JSON_Response = json.dumps(Response, indent=4, sort_keys=True)
            Output_file = General.Main_File_Create(Directory, Plugin_Name,
                                                   JSON_Response, Query,
                                                   The_File_Extension)

            if Output_file:
                Current_Step = 0
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "Torrent", Task_ID,
                    Plugin_Name.lower())

                for Search_Result in Response:
                    Result_Title = Search_Result["title"]
                    Result_URL = Search_Result["magnet"]

                    if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, JSON_Response,
                            Result_Title, The_File_Extension)

                        if Output_file:
                            Output_Connections.Output(
                                [Output_file], Result_URL,
                                General.Get_Title(Result_URL),
                                Plugin_Name.lower())
                            Data_to_Cache.append(Result_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #17
0
def Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            if Type == "pin":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                Request_URL = f"https://api.{Domain}/v1/pins/{Query}/?access_token=" + Load_Configuration() + "&fields=id%2Clink%2Cnote%2Curl%2Ccreated_at%2Cmedia%2Coriginal_link%2Cmetadata%2Ccounts%2Ccolor%2Cboard%2Cattribution"
                Search_Response = General.Request_Handler(Request_URL)
                Search_Response = json.loads(Search_Response)

                if Search_Response.get('message') != "You have exceeded your rate limit. Try again later.":
                    JSON_Response = json.dumps(Search_Response, indent=4, sort_keys=True)
                    Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, The_File_Extensions["Main"])
                    Result_Title = "Pinterest | " + Search_Response["data"]["metadata"]["link"]["title"]
                    Result_URL = Search_Response["data"]["url"]
                    Search_Result_Response = General.Request_Handler(Result_URL)

                    if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Search_Result_Response, Result_Title, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Social Media - Media", Task_ID, Local_Plugin_Name.lower())
                            Output_Connections.Output([Main_File, Output_file], Result_URL, Result_Title, Plugin_Name.lower())
                            Data_to_Cache.append(Result_URL)

                        else:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                else:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

            elif Type == "board":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                Request_URL = "https://api.pinterest.com/v1/boards/" + Query + "/pins/?access_token=" + Load_Configuration() + "&fields=id%2Clink%2Cnote%2Curl%2Coriginal_link%2Cmetadata%2Cmedia%2Cimage%2Ccreator%2Ccreated_at%2Ccounts%2Ccolor%2Cboard%2Cattribution&limit=" + str(Limit) + ""
                Search_Response = General.Request_Handler(Request_URL)
                Search_Response = json.loads(Search_Response)

                if Search_Response.get('message') != "You have exceeded your rate limit. Try again later.":
                    JSON_Response = json.dumps(Search_Response, indent=4, sort_keys=True)
                    Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, The_File_Extensions["Main"])
                    Output_Connections = General.Connections(Query, Local_Plugin_Name, "pinterest.com", "Social Media - Page", Task_ID, Local_Plugin_Name.lower())
                    Current_Step = 0

                    for Response in Search_Response["data"]:
                        Result_Title = "Pinterest | " + Response["note"]
                        Result_URL = Response["url"]
                        Search_Result_Response = General.Request_Handler(Result_URL)

                        if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(Limit):
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Search_Result_Response, Result_Title, The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output([Main_File, Output_file], Result_URL, Result_Title, Plugin_Name.lower())
                                Data_to_Cache.append(Result_URL)
                                Current_Step += 1

                            else:
                                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                else:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #18
0
def Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Shodan_API_Key = Load_Configuration()
        API_Session = Shodan(Shodan_API_Key)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            try:

                if Type == "Search":
                    Local_Plugin_Name = Plugin_Name + "-Search"

                    try:
                        API_Response = API_Session.search(Query)

                    except Exception as e:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}."
                        )
                        break

                    JSON_Output_Response = json.dumps(API_Response,
                                                      indent=4,
                                                      sort_keys=True)
                    Main_File = General.Main_File_Create(
                        Directory, Local_Plugin_Name, JSON_Output_Response,
                        Query, The_File_Extensions["Main"])
                    Output_Connections = General.Connections(
                        Query, Local_Plugin_Name, Domain, "Domain Information",
                        Task_ID, Plugin_Name.lower())
                    Current_Step = 0

                    for Shodan_Item in API_Response["matches"]:
                        Shodan_Item_Module = Shodan_Item['_shodan']['module']
                        Shodan_Item_Module = Shodan_Item_Module.replace(
                            '-simple-new', '')

                        if Shodan_Item_Module.startswith("http"):
                            Shodan_Item_Host = ""
                            Shodan_Item_Port = 0

                            if 'http' in Shodan_Item:
                                Shodan_Item_Host = Shodan_Item['http']['host']
                                Shodan_Item_Response = Shodan_Item['http'][
                                    'html']

                            elif 'ip_str' in Shodan_Item and 'domains' in Shodan_Item and len(
                                    Shodan_Item['domains']) > 0:
                                Shodan_Item_Host = Shodan_Item['domains'][0]
                                Shodan_Item_Response = Shodan_Item['data']

                            elif 'ip_str' in Shodan_Item and 'domains' not in Shodan_Item:
                                Shodan_Item_Host = Shodan_Item['ip_str']
                                Shodan_Item_Response = Shodan_Item['data']

                            if Shodan_Item_Host:

                                if 'port' in Shodan_Item_Host:

                                    if int(Shodan_Item['port']) not in [
                                            80, 443
                                    ]:
                                        Shodan_Item_Port = Shodan_Item['port']

                                if Shodan_Item_Port != 0:
                                    Shodan_Item_URL = f"{Shodan_Item_Module}://{Shodan_Item_Host}:{str(Shodan_Item_Port)}"

                                else:
                                    Shodan_Item_URL = f"{Shodan_Item_Module}://{Shodan_Item_Host}"

                                Title = "Shodan | " + str(Shodan_Item_Host)

                                if Shodan_Item_URL not in Cached_Data and Shodan_Item_URL not in Data_to_Cache and Current_Step < int(
                                        Limit):
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, Local_Plugin_Name,
                                        Shodan_Item_Response, Shodan_Item_Host,
                                        The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output(
                                            [Main_File, Output_file],
                                            Shodan_Item_URL, Title,
                                            Plugin_Name.lower())
                                        Data_to_Cache.append(Shodan_Item_URL)

                                    else:
                                        logging.warning(
                                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                        )

                                    Current_Step += 1

                elif Type == "Host":
                    Local_Plugin_Name = Plugin_Name + "-Host"

                    try:
                        API_Response = API_Session.host(Query)

                    except Exception as e:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}."
                        )
                        break

                    JSON_Output_Response = json.dumps(API_Response,
                                                      indent=4,
                                                      sort_keys=True)
                    Main_File = General.Main_File_Create(
                        Directory, Local_Plugin_Name, JSON_Output_Response,
                        Query, The_File_Extensions["Main"])
                    Output_Connections = General.Connections(
                        Query, Local_Plugin_Name, Domain, "Domain Information",
                        Task_ID, Plugin_Name.lower())
                    Shodan_URL = f"https://www.{Domain}/host/{Query}"
                    Title = "Shodan | " + Query

                    if Shodan_URL not in Cached_Data and Shodan_URL not in Data_to_Cache:
                        Shodan_Responses = General.Request_Handler(
                            Shodan_URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Shodan_Response = Shodan_Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Shodan_Response,
                            Query, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Shodan_URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(Shodan_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - No results found."
                        )

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to complete task."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            try:
                body = {
                    "f.req":
                    f'''[[["lGYRle","[[[],[[10,[10,50]],true,null,[96,27,4,8,57,30,110,11,16,49,1,3,9,12,104,55,56,51,10,34,31,77,145],[null,null,null,[[[[7,31],[[1,52,43,112,92,58,69,31,19,96,103]]]]]]],[\\"{Query}\\"],7,[null,1]]]",null,"2"]]]'''
                }
                Play_Store_Response = General.Request_Handler(
                    f"https://{Domain}/_/PlayStoreUi/data/batchexecute",
                    Method="POST",
                    Data=body)
                Play_Store_Response = Play_Store_Response.replace(
                    ')]}\'\n\n', "").replace("\\\\u003d", "=")
                Play_Store_Response_JSON = json.dumps(
                    json.loads(Play_Store_Response), indent=4, sort_keys=True)
                Main_File = General.Main_File_Create(
                    Directory, Plugin_Name, Play_Store_Response_JSON, Query,
                    The_File_Extensions["Main"])
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "Application", Task_ID,
                    Concat_Plugin_Name)
                Win_Store_Regex = re.findall(
                    r"(\/store\/apps\/details\?id\\\\([\w\d\.]+))\\\"",
                    Play_Store_Response)
                Current_Step = 0

                for Result, Item in Win_Store_Regex:
                    Result = Result.replace("\\\\u003d", "=")
                    Result_URL = f"https://{Domain}{Result}"
                    Item = Item.replace("u003d", "")
                    Title = f"Play Store | {Item}"

                    if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Play_Store_Responses = General.Request_Handler(
                            Result_URL, Filter=True, Host=f"https://{Domain}")
                        Play_Store_Response = Play_Store_Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Play_Store_Response,
                            Item, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Result_URL, Title,
                                                      Concat_Plugin_Name)
                            Data_to_Cache.append(Result_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to get results, this may be due to the query provided."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #20
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        try:
            Flickr_Details = Load_Configuration()
            flickr_api.set_keys(api_key=Flickr_Details[0], api_secret=Flickr_Details[1])

        except:
            logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to establish API identity.")

        for Query in Query_List:
            Email_Regex = re.search(r"[^@]+@[^\.]+\..+", Query)

            if Email_Regex:

                try:
                    User = flickr_api.Person.findByEmail(Query)
                    Photos = User.getPhotos()

                    if Photos:
                        Main_File = General.Main_File_Create(Directory, Plugin_Name, Convert_to_JSON(Photos), Query, The_File_Extensions["Main"])
                        Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Social Media - Media", Task_ID, Plugin_Name.lower())
                        Current_Step = 0

                        for Photo in Photos:
                            Photo_URL = f"https://www.{Domain}/photos/{Query}/{Photo['id']}"

                            if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int(Limit):
                                Photo_Response = General.Request_Handler(Photo_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True)
                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Photo_Response, Photo, The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output([Main_File, Output_file], Photo_URL, General.Get_Title(Photo_URL), Plugin_Name.lower())
                                    Data_to_Cache.append(Photo_URL)

                                else:
                                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                                Current_Step += 1

                    else:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - No photos found.")

                except:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make API call.")

            else:

                try:
                    User = flickr_api.Person.findByUserName(Query)
                    Photos = User.getPhotos()

                    if Photos:
                        Main_File = General.Main_File_Create(Directory, Plugin_Name, Convert_to_JSON(Photos), Query, The_File_Extensions["Main"])
                        Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Data Leakage", Task_ID, Plugin_Name.lower())
                        Current_Step = 0

                        for Photo in Photos:
                            Photo_URL = f"https://www.{Domain}/photos/{Query}/{Photo['id']}"

                            if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int(Limit):
                                Photo_Response = General.Request_Handler(Photo_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True)
                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Photo_Response, str(Photo['id']), The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output([Main_File, Output_file], Photo_URL, General.Get_Title(Photo_URL), Plugin_Name.lower())
                                    Data_to_Cache.append(Photo_URL)

                                else:
                                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                                Current_Step += 1

                    else:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - No photos found.")

                except:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make API call.")

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #21
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            if Query_List.index(Query) != 0:
                time.sleep(5)

            Main_URL = f"https://{Domain}/results_normal.php"
            body = {"ran": "", "username": Query}
            Responses = General.Request_Handler(
                Main_URL,
                Method="POST",
                Data=body,
                Filter=True,
                Host=f"https://{Domain}",
                Optional_Headers={
                    "Content-Type": "application/x-www-form-urlencoded"
                })
            Response = Responses["Regular"]
            Filtered_Response = Responses["Filtered"]
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 Filtered_Response, Query,
                                                 The_File_Extension)
            Link_Regex = re.findall(
                r"\<a\sclass\=\"pretty-button results-button\"\shref\=\"(https?:\/\/(www\.)?[-a-zA-Z0-9@:%_\+~#=\.\/\?]+)\"\starget\=\"\_blank\"\>View Profile\<\/a\>",
                Response)
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain, "Account",
                                                     Task_ID,
                                                     Concat_Plugin_Name)

            if Link_Regex:
                Current_Step = 0

                for Item_URL, WWW in Link_Regex:
                    Responses = General.Request_Handler(
                        Item_URL, Filter=True, Host=f"https://{Domain}")
                    Response = Responses["Filtered"]

                    if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Response, Item_URL,
                            The_File_Extension)

                        if Output_file:
                            Title = f"Username Search | {Item_URL}"
                            Output_Connections.Output([Main_File, Output_file],
                                                      Item_URL, Title,
                                                      Concat_Plugin_Name)
                            Data_to_Cache.append(Item_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #22
0
def Transaction_Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Local_Plugin_Name = Plugin_Name + "-Transaction-Search"
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Local_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            if Type != "monero":

                if Type == "btc" or Type == "bch":
                    Query_Regex = re.search(r"[\d\w]{64}", Query)

                elif Type == "eth":
                    Query_Regex = re.search(r"(0x[\d\w]{64})", Query)

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Invalid type provided."
                    )

                if Query_Regex:
                    Main_URL = f"https://www.{Domain}/{Type}/tx/{Query}"
                    Main_Response = General.Request_Handler(Main_URL)

                    if Type == "btc":
                        Address_Regex = re.findall(
                            r"\/btc\/address\/([\d\w]{26,34})", Main_Response)

                    elif Type == "bch":
                        Address_Regex = re.findall(r"([\d\w]{42})",
                                                   Main_Response)

                    elif Type == "eth":
                        Address_Regex = re.findall(r"(0x[\w\d]{40})",
                                                   Main_Response)

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Invalid type provided."
                        )

                    if Address_Regex:
                        Current_Step = 0
                        Output_Connections = General.Connections(
                            Query, Local_Plugin_Name, Domain,
                            "Blockchain Address", Task_ID, Plugin_Name.lower())

                        for Transaction in Address_Regex:
                            Query_URL = f"https://www.{Domain}/{Type}/address/{Transaction}"

                            if Query_URL not in Cached_Data and Query_URL not in Data_to_Cache and Current_Step < int(
                                    Limit):
                                Transaction_Responses = General.Request_Handler(
                                    Query_URL,
                                    Filter=True,
                                    Host=f"https://www.{Domain}")
                                Transaction_Response = Transaction_Responses[
                                    "Filtered"]
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Local_Plugin_Name,
                                    Transaction_Response, Transaction,
                                    The_File_Extension)

                                if Output_file:
                                    Output_Connections.Output(
                                        [Output_file], Query_URL,
                                        General.Get_Title(Query_URL),
                                        Plugin_Name.lower())
                                    Data_to_Cache.append(Query_URL)

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                                Current_Step += 1

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                        )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                    )

        else:
            Alt_Domain = "localmonero.co"
            Query_URL = f"https://{Alt_Domain}/blocks/search/{Query}"
            Transaction_Response = General.Request_Handler(Query_URL)

            if "Whoops, looks like something went wrong." not in Transaction_Response and Query_URL not in Cached_Data and Query_URL not in Data_to_Cache:
                Transaction_Responses = General.Request_Handler(
                    Query_URL, Filter=True, Host=f"https://{Alt_Domain}")
                Transaction_Response = Transaction_Responses["Filtered"]
                Output_file = General.Create_Query_Results_Output_File(
                    Directory, Query, Local_Plugin_Name, Transaction_Response,
                    Query, The_File_Extension)

                if Output_file:
                    Output_Connections = General.Connections(
                        Query, Local_Plugin_Name, Alt_Domain,
                        "Blockchain Transaction", Task_ID, Plugin_Name.lower())
                    Output_Connections.Output(
                        [Output_file], Query_URL,
                        General.Get_Title_Requests_Module(Query_URL),
                        Plugin_Name.lower())
                    Data_to_Cache.append(Query_URL)

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                    )

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name,
                                "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name,
                                "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #23
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:
            URL_Regex = General.Regex_Checker(Query, "URL")

            if URL_Regex:
                BW_Info = builtwith(Query)

                if BW_Info:
                    BW_JSON_Output = json.dumps(BW_Info,
                                                indent=4,
                                                sort_keys=True)
                    URL_Body = URL_Regex.group(3)

                    if URL_Regex.group(5) and URL_Regex.group(6):
                        URL_Extension = URL_Regex.group(4) + URL_Regex.group(
                            5) + URL_Regex.group(6)

                    elif URL_Regex.group(5):
                        URL_Extension = URL_Regex.group(4) + URL_Regex.group(5)

                    else:
                        URL_Extension = URL_Regex.group(4)

                    Query_Domain = URL_Body + URL_Extension
                    Title = f"Built With | {Query_Domain}"
                    Main_File = General.Main_File_Create(
                        Directory, Plugin_Name, BW_JSON_Output, Query_Domain,
                        The_File_Extensions["Main"])
                    BW_Search_URL = f"https://{Domain}/{Query_Domain}"
                    Responses = General.Request_Handler(
                        BW_Search_URL, Filter=True, Host=f"https://{Domain}")
                    Response = Responses["Filtered"]
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain,
                        "Web Application Architecture", Task_ID,
                        Plugin_Name.lower())

                    if BW_Search_URL not in Cached_Data and BW_Search_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Response, Query,
                            The_File_Extensions['Query'])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      BW_Search_URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(BW_Search_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                else:
                    logging.info(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to get result for provided query."
                    )

            else:
                logging.info(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #24
0
    def Character_Switch(self, Alphabets, Comprehensive_Search):

        try:
            Local_Plugin_Name = self.Plugin_Name + "-Character-Switch"
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, Local_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            self.Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)
            logging.info(
                f"{General.Date()} {__name__.strip('plugins.')} - Character Switching Selected."
            )
            self.Query_List = General.Convert_to_List(self.Query_List)

            for Query in self.Query_List:
                URL_Regex = General.Regex_Checker(Query, "URL")

                if URL_Regex:
                    self.URL_Prefix = URL_Regex.group(1)
                    self.URL_Body = URL_Regex.group(3)

                    if URL_Regex.group(5) and URL_Regex.group(6):
                        self.URL_Extension = URL_Regex.group(
                            4) + URL_Regex.group(5) + URL_Regex.group(6)

                    elif URL_Regex.group(5):
                        self.URL_Extension = URL_Regex.group(
                            4) + URL_Regex.group(5)

                    else:
                        self.URL_Extension = URL_Regex.group(4)

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Please provide valid URLs."
                    )

                logging.info(
                    f'{General.Date()} - Provided domain body - {self.URL_Body}'
                )
                URL_List = list(self.URL_Body.lower())
                Local_Plugin_Name = f"{Local_Plugin_Name}-{Alphabets}"

                if Alphabets == "Latin":

                    if not Comprehensive_Search:

                        if len(self.URL_Body) > 15:
                            logging.error(
                                f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Condensed punycode domain fuzzing only allows a maximum of 15 characters."
                            )
                            return None

                        else:
                            Altered_URLs = Rotor.Search(
                                URL_List,
                                English_Upper=False,
                                Numbers=False,
                                Special_Characters=False,
                                Asian=False,
                                Latin=True,
                                Middle_Eastern=False,
                                Native_American=False,
                                North_African=False,
                                Latin_Alternatives=True,
                                Comprehensive=False)

                    else:

                        if len(self.URL_Body) > 10:
                            logging.error(
                                f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 10 characters in length. Comprehensive punycode domain fuzzing searching only allows a maximum of 10 characters."
                            )
                            return None

                        else:
                            Altered_URLs = Rotor.Search(
                                URL_List,
                                English_Upper=False,
                                Numbers=False,
                                Special_Characters=False,
                                Asian=False,
                                Latin=True,
                                Middle_Eastern=False,
                                Native_American=False,
                                North_African=False,
                                Latin_Alternatives=True,
                                Comprehensive=True)

                elif Alphabets == "Asian":

                    if len(self.URL_Body) > 10:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Punycode domain fuzzing for Asian alphabets only allows a maximum of 10 characters."
                        )
                        return None

                    else:
                        Altered_URLs = Rotor.Search(URL_List,
                                                    English_Upper=False,
                                                    Numbers=False,
                                                    Special_Characters=False,
                                                    Asian=True,
                                                    Latin=False,
                                                    Middle_Eastern=False,
                                                    Native_American=False,
                                                    North_African=False,
                                                    Latin_Alternatives=False,
                                                    Comprehensive=False)

                elif Alphabets == "Middle Eastern":

                    if len(self.URL_Body) > 10:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Punycode domain fuzzing for Middle Eastern alphabets only allows a maximum of 10 characters."
                        )
                        return None

                    else:
                        Altered_URLs = Rotor.Search(URL_List,
                                                    English_Upper=False,
                                                    Numbers=False,
                                                    Special_Characters=False,
                                                    Asian=False,
                                                    Latin=False,
                                                    Middle_Eastern=True,
                                                    Native_American=False,
                                                    North_African=False,
                                                    Latin_Alternatives=False,
                                                    Comprehensive=False)

                elif Alphabets == "Native American":

                    if len(self.URL_Body) > 10:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Punycode domain fuzzing for Asian alphabets only allows a maximum of 10 characters."
                        )
                        return None

                    else:
                        Altered_URLs = Rotor.Search(URL_List,
                                                    English_Upper=False,
                                                    Numbers=False,
                                                    Special_Characters=False,
                                                    Asian=False,
                                                    Latin=False,
                                                    Middle_Eastern=False,
                                                    Native_American=True,
                                                    North_African=False,
                                                    Latin_Alternatives=False,
                                                    Comprehensive=False)

                elif Alphabets == "North African":

                    if len(self.URL_Body) > 10:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Punycode domain fuzzing for Middle Eastern alphabets only allows a maximum of 10 characters."
                        )
                        return None

                    else:
                        Altered_URLs = Rotor.Search(URL_List,
                                                    English_Upper=False,
                                                    Numbers=False,
                                                    Special_Characters=False,
                                                    Asian=False,
                                                    Latin=False,
                                                    Middle_Eastern=False,
                                                    Native_American=False,
                                                    North_African=True,
                                                    Latin_Alternatives=False,
                                                    Comprehensive=False)

                logging.info(
                    f'{General.Date()} - Generated domain combinations - {", ".join(Altered_URLs)}'
                )
                Pool = mpool.ThreadPool(
                    int(multiprocessing.cpu_count()) *
                    int(multiprocessing.cpu_count()))
                Pool_Threads = []

                for Altered_URL in Altered_URLs:

                    if not Altered_URL == self.URL_Body:
                        Thread = Pool.apply_async(self.Query_URL,
                                                  args=(
                                                      Altered_URL,
                                                      self.URL_Extension,
                                                  ))
                        Pool_Threads.append(Thread)

                [Pool_Thread.wait() for Pool_Thread in Pool_Threads]
                logging.info(f'{General.Date()} {Directory}')
                URL_Domain = self.URL_Body + self.URL_Extension
                logging.info(URL_Domain)
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name,
                    "\n".join(self.Valid_Results), self.URL_Body,
                    self.The_File_Extensions["Main"])
                logging.info(Main_File)

                if Main_File:

                    for Host in self.Valid_Hosts:
                        Current_Domain = Host[0].strip('https://').strip(
                            'http://')

                        try:
                            Current_Responses = General.Request_Handler(
                                Host[0],
                                Filter=True,
                                Host=Host[0],
                                Risky_Plugin=True)
                            Current_Response = Current_Responses["Filtered"]
                            Output_File = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name,
                                Current_Response, Current_Domain,
                                self.The_File_Extensions["Query"])

                            if Output_File:
                                Output_File_List = [Main_File, Output_File]
                                Output_Connections = General.Connections(
                                    Query, Local_Plugin_Name, Current_Domain,
                                    "Domain Spoof", self.Task_ID,
                                    Local_Plugin_Name.lower())
                                Output_Connections.Output(
                                    Output_File_List,
                                    Host[0],
                                    f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}",
                                    Directory_Plugin_Name=self.
                                    Concat_Plugin_Name)

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                        except requests.exceptions.ConnectionError:
                            Output_File_List = [Main_File]
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, Current_Domain,
                                "Domain Spoof", self.Task_ID,
                                Local_Plugin_Name.lower())
                            Output_Connections.Output(
                                Output_File_List,
                                Host[0],
                                f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}",
                                Directory_Plugin_Name=self.Concat_Plugin_Name)

            if self.Cached_Data:
                General.Write_Cache(Directory, self.Data_to_Cache,
                                    Local_Plugin_Name, "a")

            else:
                General.Write_Cache(Directory, self.Data_to_Cache,
                                    Local_Plugin_Name, "w")

        except Exception as e:
            logging.warning(
                f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #25
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if General.Regex_Checker(Query, "Domain"):
                Request = f"https://{Domain}/?q={Query}"
                Responses = General.Request_Handler(Request,
                                                    Accept_XML=True,
                                                    Accept_Language_EN_US=True,
                                                    Filter=True,
                                                    Host=f"https://{Domain}")
                Response = Responses["Regular"]
                Filtered_Response = Responses["Filtered"]

                if "<TD class=\"outer\"><I>None found</I></TD>" not in Response:

                    if Request not in Cached_Data and Request not in Data_to_Cache:

                        try:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query,
                                Plugin_Name.lower(), Filtered_Response,
                                SSLMate_Regex.group(1), The_File_Extension)

                            if Output_file:
                                Output_Connections = General.Connections(
                                    Query, Plugin_Name, Domain, "Certificate",
                                    Task_ID, Plugin_Name.lower())
                                Output_Connections.Output(
                                    [Output_file], Request,
                                    f"Subdomain Certificate Search for {Query}",
                                    Plugin_Name.lower())
                                Data_to_Cache.append(Request)

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                        except:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create file."
                            )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Query does not exist."
                    )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #26
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            Tor_Pull_URL = Tor_General_URL + Query
            Tor_Scrape_URLs = General.Request_Handler(
                Tor_Pull_URL, Scrape_Regex_URL=Tor_Scrape_Regex_URL)

            if Tor_Scrape_URLs:
                Output_file = General.Main_File_Create(
                    Directory, Tor_Plugin_Name.lower(),
                    "\n".join(Tor_Scrape_URLs), Query, The_File_Extension)

                if Output_file:
                    Current_Step = 0
                    Output_Connections = General.Connections(
                        Query, Tor_Plugin_Name, "ahmia.fl", "Darkweb Link",
                        Task_ID, Plugin_Name.lower())

                    for URL in Tor_Scrape_URLs:

                        if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                                Limit):
                            Title = f"Ahmia Tor | {URL}"
                            Output_Connections.Output([Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)
                            Current_Step += 1

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                    )

            else:
                logging.info(
                    f"{General.Date()} - {__name__.strip('plugins.')} - No Tor links scraped."
                )

            I2P_Pull_URL = I2P_General_URL + Query
            I2P_Scrape_URLs = General.Request_Handler(
                I2P_Pull_URL, Scrape_Regex_URL=I2P_Scrape_Regex_URL)

            if I2P_Scrape_URLs:
                Output_file = General.Main_File_Create(
                    Directory, I2P_Plugin_Name.lower(),
                    "\n".join(I2P_Scrape_URLs), Query, The_File_Extension)

                if Output_file:
                    Current_Step = 0
                    Output_Connections = General.Connections(
                        Query, I2P_Plugin_Name, "ahmia.fl", "Darkweb Link",
                        Task_ID, Plugin_Name.lower())

                    for URL in I2P_Scrape_URLs:

                        if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                                Limit):
                            Title = f"Ahmia I2P | {URL}"
                            Output_Connections.Output([Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)
                            Current_Step += 1

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                    )

            else:
                logging.info(
                    f"{General.Date()} - {__name__.strip('plugins.')} - No I2P links scraped."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #27
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            # Query can be Title or ISBN
            Main_URL = f"http://{Domain}/search.php?req={Query}&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def"
            Lib_Gen_Response = General.Request_Handler(Main_URL)
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 Lib_Gen_Response, Query,
                                                 The_File_Extension)
            Lib_Gen_Regex = re.findall("book\/index\.php\?md5=[A-Fa-f0-9]{32}",
                                       Lib_Gen_Response)

            if Lib_Gen_Regex:
                Current_Step = 0

                for Regex in Lib_Gen_Regex:
                    Item_URL = f"http://{Domain}/{Regex}"
                    Title = General.Get_Title(Item_URL).replace(
                        "Genesis:", "Genesis |")
                    Lib_Item_Responses = General.Request_Handler(
                        Item_URL, Filter=True, Host=f"http://{Domain}")
                    Lib_Item_Response = Lib_Item_Responses["Filtered"]

                    if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Lib_Item_Response,
                            Regex, The_File_Extension)

                        if Output_file:
                            Output_Connections = General.Connections(
                                Query, Plugin_Name, Domain, "Publication",
                                Task_ID, Concat_Plugin_Name)
                            Output_Connections.Output([Main_File, Output_file],
                                                      Item_URL, Title,
                                                      Concat_Plugin_Name)
                            Data_to_Cache.append(Item_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #28
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Subdomains = Load_Configuration()
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if Subdomains:
                Request = f'https://api.certspotter.com/v1/issuances?domain={Query}&include_subdomains=true&expand=dns_names&expand=issuer&expand=cert'

            else:
                Request = f'https://api.certspotter.com/v1/issuances?domain={Query}&expand=dns_names&expand=issuer&expand=cert'

            Response = General.Request_Handler(Request)
            JSON_Response = json.loads(Response)

            if 'exists' not in JSON_Response:

                if JSON_Response:

                    if Request not in Cached_Data and Request not in Data_to_Cache:

                        try:

                            if General.Regex_Checker(Query, "Domain"):
                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name.lower(), json.dumps(JSON_Response, indent=4, sort_keys=True), SSLMate_Regex.group(1), The_File_Extension)

                                if Output_file:
                                    Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Certificate", Task_ID, Plugin_Name.lower())
                                    Data_to_Cache.append(Request)

                                    if Subdomains:
                                        Output_Connections.Output([Output_file], Request, f"Subdomain Certificate Search for {Query}", Plugin_Name.lower())

                                    else:
                                        Output_Connections.Output([Output_file], Request, f"Domain Certificate Search for {Query}", Plugin_Name.lower())

                                else:
                                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                            else:
                                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression.")

                        except:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create file.")

                else:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - No response.")

            else:
                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Query does not exist.")

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            try:

                if Type == "ABN":
                    Main_URL = f'https://{Domain}/ABN/View?id=' + Query
                    Responses = General.Request_Handler(
                        Main_URL, Filter=True, Host=f"https://www.{Domain}")
                    Response = Responses["Regular"]

                    try:

                        if 'Error searching ABN Lookup' not in Response:
                            Query = str(int(Query))
                            Response = Responses["Filtered"]

                            if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name, Response,
                                    General.Get_Title(Main_URL),
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections = General.Connections(
                                        Query, Plugin_Name, Domain,
                                        "Company Details", Task_ID,
                                        Plugin_Name)
                                    Output_Connections.Output(
                                        [Output_file], Main_URL,
                                        General.Get_Title(Main_URL).strip(
                                            " | ABN Lookup"),
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(Main_URL)

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - ABN Lookup returned error."
                            )

                    except:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for ABN Search."
                        )

                elif Type == "ACN":
                    Main_URL = f'https://{Domain}/Search/Run'
                    Data = {
                        'SearchParameters.SearchText':
                        Query,
                        'SearchParameters.AllNames':
                        'true',
                        'ctl00%24ContentPagePlaceholder%24SearchBox%24MainSearchButton':
                        'Search'
                    }
                    Responses = General.Request_Handler(
                        Main_URL,
                        Method="POST",
                        Filter=True,
                        Host=f"https://www.{Domain}",
                        Data=Data)
                    Response = Responses["Regular"]
                    Filtered_Response = Responses["Filtered"]
                    Limit = General.Get_Limit(kwargs)

                    try:
                        ACN_Regex = re.search(r".*[a-zA-Z].*", Query)

                        if ACN_Regex:
                            Main_File = General.Main_File_Create(
                                Directory, Plugin_Name, Filtered_Response,
                                Query, The_File_Extensions["Main"])
                            Current_Step = 0
                            ABNs_Regex = re.findall(
                                r"\<input\sid\=\"Results\_NameItems\_\d+\_\_Compressed\"\sname\=\"Results\.NameItems\[\d+\]\.Compressed\"\stype\=\"hidden\"\svalue\=\"(\d{11})\,\d{2}\s\d{3}\s\d{3}\s\d{3}\,0000000001\,Active\,active\,([\d\w\s\&\-\_\.]+)\,Current\,",
                                Response)

                            if ABNs_Regex:
                                Output_Connections = General.Connections(
                                    Query, Plugin_Name, Domain,
                                    "Company Details", Task_ID, Plugin_Name)

                                for ABN_URL, ACN in ABNs_Regex:
                                    Full_ABN_URL = f'https://{Domain}/ABN/View?abn={ABN_URL}'

                                    if Full_ABN_URL not in Cached_Data and Full_ABN_URL not in Data_to_Cache and Current_Step < int(
                                            Limit):
                                        ACN = ACN.rstrip()
                                        Current_Responses = General.Request_Handler(
                                            Full_ABN_URL,
                                            Filter=True,
                                            Host=f"https://www.{Domain}")
                                        Current_Response = Current_Responses[
                                            "Filtered"]
                                        Output_file = General.Create_Query_Results_Output_File(
                                            Directory, Query, Plugin_Name,
                                            str(Current_Response),
                                            ACN.replace(' ', '-'),
                                            The_File_Extensions["Query"])

                                        if Output_file:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                Full_ABN_URL,
                                                General.Get_Title(Full_ABN_URL)
                                                .strip(" | ABN Lookup"),
                                                Concat_Plugin_Name)
                                            Data_to_Cache.append(Full_ABN_URL)

                                        else:
                                            logging.warning(
                                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                            )

                                        Current_Step += 1

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Response did not match regular expression."
                                )

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Query did not match regular expression."
                            )

                    except:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for ACN Search."
                        )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Invalid request type."
                    )

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make request."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #30
0
def Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            if Type == "User":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = instagram_explore.user(Query)
                CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                      indent=4,
                                                      sort_keys=True)
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])
                Posts = CSE_Response[0]["edge_owner_to_timeline_media"][
                    "edges"]
                Output_Connections = General.Connections(
                    Query, Local_Plugin_Name, Domain, "Social Media - Person",
                    Task_ID, Local_Plugin_Name.lower())
                Current_Step = 0

                for Post in Posts:
                    Shortcode = Post["node"]["shortcode"]
                    URL = f"https://www.{Domain}/p/{Shortcode}/"
                    Title = "IG | " + General.Get_Title(URL)

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Responses = General.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Response = Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name, Response,
                            Shortcode, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            elif Type == "Tag":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = instagram_explore.tag(Query)
                CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                      indent=4,
                                                      sort_keys=True)
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])
                Posts = CSE_Response[0]["edge_hashtag_to_media"]["edges"]
                Output_Connections = General.Connections(
                    Query, Local_Plugin_Name, Domain, "Social Media - Person",
                    Task_ID, Local_Plugin_Name.lower())
                Current_Step = 0

                for Post in Posts:
                    Shortcode = Post["node"]["shortcode"]
                    URL = f"https://www.{Domain}/p/{Shortcode}/"
                    Title = "IG | " + General.Get_Title(URL)

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Responses = General.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Response = Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name, Response,
                            Shortcode, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            elif Type == "Location":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = location(Query)
                CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                      indent=4,
                                                      sort_keys=True)
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])
                Posts = CSE_Response[0]["edge_location_to_media"]["edges"]
                Output_Connections = General.Connections(
                    Query, Local_Plugin_Name, Domain, "Social Media - Place",
                    Task_ID, Local_Plugin_Name.lower())
                Current_Step = 0

                for Post in Posts:
                    Shortcode = Post["node"]["shortcode"]
                    URL = f"https://www.{Domain}/p/{Shortcode}/"
                    Title = "IG | " + General.Get_Title(URL)

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Responses = General.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Response = Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name, Response,
                            Shortcode, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            elif Type == "Media":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = instagram_explore.media(Query)

                if CSE_Response:
                    CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                          indent=4,
                                                          sort_keys=True)
                    Main_File = General.Main_File_Create(
                        Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                        Query, The_File_Extensions["Main"])
                    URL = f"https://www.{Domain}/p/{Query}/"
                    Title = "IG | " + General.Get_Title(URL)

                    if URL not in Cached_Data and URL not in Data_to_Cache:
                        Responses = General.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Response = Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name, Response,
                            Shortcode, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, Domain,
                                "Social Media - Media", Task_ID,
                                Local_Plugin_Name.lower())
                            Output_Connections.Output([Main_File, Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Invalid response."
                    )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Invalid type provided."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")