예제 #1
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:
            vulners_api = vulners.Vulners(api_key=Load_Configuration())
            Search_Response = vulners_api.search(Query, limit=int(Limit))
            JSON_Object = Common.JSON_Handler(Search_Response)
            JSON_Response = JSON_Object.Dump_JSON()
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 JSON_Response, Query,
                                                 The_File_Extensions["Main"])
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain, "Exploit",
                                                     Task_ID,
                                                     Plugin_Name.lower())

            for Search_Result in Search_Response:

                if Search_Result[
                        "bulletinFamily"] not in Unacceptable_Bulletins:
                    Result_Title = Search_Result["title"]
                    Result_URL = Search_Result["vhref"]
                    Search_Result_Responses = Common.Request_Handler(
                        Result_URL, Filter=True, Host=f"https://{Domain}")
                    Search_Result_Response = Search_Result_Responses[
                        "Filtered"]

                    if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name,
                            Search_Result_Response, Result_Title,
                            The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Result_URL, Result_Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(Result_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                else:
                    logging.info(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Skipping as bulletin type is not supported."
                    )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #2
0
    def Character_Switch(self, Alphabets, Comprehensive_Search):

        try:
            Local_Plugin_Name = self.Plugin_Name + "-Character-Switch"
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, Local_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            self.Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)
            logging.info(
                f"{General.Date()} {__name__.strip('plugins.')} - Character Switching Selected."
            )
            self.Query_List = General.Convert_to_List(self.Query_List)

            for Query in self.Query_List:
                URL_Regex = General.Regex_Checker(Query, "URL")

                if URL_Regex:
                    self.URL_Prefix = URL_Regex.group(1)
                    self.URL_Body = URL_Regex.group(3)

                    if URL_Regex.group(5) and URL_Regex.group(6):
                        self.URL_Extension = URL_Regex.group(
                            4) + URL_Regex.group(5) + URL_Regex.group(6)

                    elif URL_Regex.group(5):
                        self.URL_Extension = URL_Regex.group(
                            4) + URL_Regex.group(5)

                    else:
                        self.URL_Extension = URL_Regex.group(4)

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Please provide valid URLs."
                    )

                logging.info(
                    f'{General.Date()} - Provided domain body - {self.URL_Body}'
                )
                URL_List = list(self.URL_Body.lower())
                Local_Plugin_Name = f"{Local_Plugin_Name}-{Alphabets}"

                if Alphabets == "Latin":

                    if not Comprehensive_Search:

                        if len(self.URL_Body) > 15:
                            logging.error(
                                f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Condensed punycode domain fuzzing only allows a maximum of 15 characters."
                            )
                            return None

                        else:
                            Altered_URLs = Rotor.Search(
                                URL_List,
                                English_Upper=False,
                                Numbers=False,
                                Special_Characters=False,
                                Asian=False,
                                Latin=True,
                                Middle_Eastern=False,
                                Native_American=False,
                                North_African=False,
                                Latin_Alternatives=True,
                                Comprehensive=False)

                    else:

                        if len(self.URL_Body) > 10:
                            logging.error(
                                f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 10 characters in length. Comprehensive punycode domain fuzzing searching only allows a maximum of 10 characters."
                            )
                            return None

                        else:
                            Altered_URLs = Rotor.Search(
                                URL_List,
                                English_Upper=False,
                                Numbers=False,
                                Special_Characters=False,
                                Asian=False,
                                Latin=True,
                                Middle_Eastern=False,
                                Native_American=False,
                                North_African=False,
                                Latin_Alternatives=True,
                                Comprehensive=True)

                elif Alphabets == "Asian":

                    if len(self.URL_Body) > 10:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Punycode domain fuzzing for Asian alphabets only allows a maximum of 10 characters."
                        )
                        return None

                    else:
                        Altered_URLs = Rotor.Search(URL_List,
                                                    English_Upper=False,
                                                    Numbers=False,
                                                    Special_Characters=False,
                                                    Asian=True,
                                                    Latin=False,
                                                    Middle_Eastern=False,
                                                    Native_American=False,
                                                    North_African=False,
                                                    Latin_Alternatives=False,
                                                    Comprehensive=False)

                elif Alphabets == "Middle Eastern":

                    if len(self.URL_Body) > 10:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Punycode domain fuzzing for Middle Eastern alphabets only allows a maximum of 10 characters."
                        )
                        return None

                    else:
                        Altered_URLs = Rotor.Search(URL_List,
                                                    English_Upper=False,
                                                    Numbers=False,
                                                    Special_Characters=False,
                                                    Asian=False,
                                                    Latin=False,
                                                    Middle_Eastern=True,
                                                    Native_American=False,
                                                    North_African=False,
                                                    Latin_Alternatives=False,
                                                    Comprehensive=False)

                elif Alphabets == "Native American":

                    if len(self.URL_Body) > 10:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Punycode domain fuzzing for Asian alphabets only allows a maximum of 10 characters."
                        )
                        return None

                    else:
                        Altered_URLs = Rotor.Search(URL_List,
                                                    English_Upper=False,
                                                    Numbers=False,
                                                    Special_Characters=False,
                                                    Asian=False,
                                                    Latin=False,
                                                    Middle_Eastern=False,
                                                    Native_American=True,
                                                    North_African=False,
                                                    Latin_Alternatives=False,
                                                    Comprehensive=False)

                elif Alphabets == "North African":

                    if len(self.URL_Body) > 10:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Punycode domain fuzzing for Middle Eastern alphabets only allows a maximum of 10 characters."
                        )
                        return None

                    else:
                        Altered_URLs = Rotor.Search(URL_List,
                                                    English_Upper=False,
                                                    Numbers=False,
                                                    Special_Characters=False,
                                                    Asian=False,
                                                    Latin=False,
                                                    Middle_Eastern=False,
                                                    Native_American=False,
                                                    North_African=True,
                                                    Latin_Alternatives=False,
                                                    Comprehensive=False)

                logging.info(
                    f'{General.Date()} - Generated domain combinations - {", ".join(Altered_URLs)}'
                )
                Pool = mpool.ThreadPool(
                    int(multiprocessing.cpu_count()) *
                    int(multiprocessing.cpu_count()))
                Pool_Threads = []

                for Altered_URL in Altered_URLs:

                    if not Altered_URL == self.URL_Body:
                        Thread = Pool.apply_async(self.Query_URL,
                                                  args=(
                                                      Altered_URL,
                                                      self.URL_Extension,
                                                  ))
                        Pool_Threads.append(Thread)

                [Pool_Thread.wait() for Pool_Thread in Pool_Threads]
                logging.info(f'{General.Date()} {Directory}')
                URL_Domain = self.URL_Body + self.URL_Extension
                logging.info(URL_Domain)
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name,
                    "\n".join(self.Valid_Results), self.URL_Body,
                    self.The_File_Extensions["Main"])
                logging.info(Main_File)

                if Main_File:

                    for Host in self.Valid_Hosts:
                        Current_Domain = Host[0].strip('https://').strip(
                            'http://')

                        try:
                            Current_Responses = General.Request_Handler(
                                Host[0],
                                Filter=True,
                                Host=Host[0],
                                Risky_Plugin=True)
                            Current_Response = Current_Responses["Filtered"]
                            Output_File = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name,
                                Current_Response, Current_Domain,
                                self.The_File_Extensions["Query"])

                            if Output_File:
                                Output_File_List = [Main_File, Output_File]
                                Output_Connections = General.Connections(
                                    Query, Local_Plugin_Name, Current_Domain,
                                    "Domain Spoof", self.Task_ID,
                                    Local_Plugin_Name.lower())
                                Output_Connections.Output(
                                    Output_File_List,
                                    Host[0],
                                    f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}",
                                    Directory_Plugin_Name=self.
                                    Concat_Plugin_Name)

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                        except requests.exceptions.ConnectionError:
                            Output_File_List = [Main_File]
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, Current_Domain,
                                "Domain Spoof", self.Task_ID,
                                Local_Plugin_Name.lower())
                            Output_Connections.Output(
                                Output_File_List,
                                Host[0],
                                f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}",
                                Directory_Plugin_Name=self.Concat_Plugin_Name)

            if self.Cached_Data:
                General.Write_Cache(Directory, self.Data_to_Cache,
                                    Local_Plugin_Name, "a")

            else:
                General.Write_Cache(Directory, self.Data_to_Cache,
                                    Local_Plugin_Name, "w")

        except Exception as e:
            logging.warning(
                f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #3
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Concat_Plugin_Name)
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Location = General.Load_Location_Configuration()
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        Main_URL = "https://www.microsoft.com/en-" + Location + "/search?q=" + Query
        headers = {
            'Content-Type': 'application/json',
            'User-Agent':
            'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
            'Accept':
            'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
            'Accept-Language': 'en-US,en;q=0.5'
        }
        Win_Store_Response = requests.get(Main_URL, headers=headers).text
        General.Main_File_Create(Directory, Plugin_Name, Win_Store_Response,
                                 Query, The_File_Extension)
        Win_Store_Regex = re.findall(r"\/en\-au\/p\/([\w\-]+)\/([\w\d]+)",
                                     Win_Store_Response)

        if Win_Store_Regex:
            Current_Step = 0

            for Regex_Group_1, Regex_Group_2 in Win_Store_Regex:
                Item_URL = "https://www.microsoft.com/en-au/p/" + Regex_Group_1 + "/" + Regex_Group_2
                headers = {
                    'Content-Type': 'application/json',
                    'User-Agent':
                    'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
                    'Accept':
                    'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                    'Accept-Language': 'en-US,en;q=0.5'
                }
                Win_Store_Response = requests.get(Item_URL,
                                                  headers=headers).text

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                        Limit):
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, Win_Store_Response,
                        Regex_Group_1, The_File_Extension)

                    if Output_file:
                        General.Connections(Output_file, Query, Plugin_Name,
                                            Item_URL, "microsoft.com",
                                            "Data Leakage", Task_ID,
                                            General.Get_Title(Item_URL),
                                            Concat_Plugin_Name)

                    Data_to_Cache.append(Item_URL)
                    Current_Step += 1

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    logging.info('Windows Store Search Plugin Terminated.')
예제 #4
0
def Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            try:

                if Type == "NZBN":
                    Main_URL = f'https://{Domain}/companies/app/ui/pages/companies/search?q={Query}&entityTypes=ALL&entityStatusGroups=ALL&incorpFrom=&incorpTo=&addressTypes=ALL&addressKeyword=&start=0&limit=1&sf=&sd=&advancedPanel=true&mode=advanced#results'
                    Responses = General.Request_Handler(
                        Main_URL, Filter=True, Host=f"https://{Domain}")
                    Response = Responses["Filtered"]

                    try:

                        if 'An error has occurred and the requested action cannot be performed.' not in Response:
                            Query = str(int(Query))

                            if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name, Response,
                                    f"new-zealand-business-number-{Query.lower()}",
                                    The_File_Extension)

                                if Output_file:
                                    Output_Connections = General.Connections(
                                        Query, Plugin_Name, Domain,
                                        "Company Details", Task_ID,
                                        Plugin_Name)
                                    Output_Connections.Output(
                                        [Output_file], Main_URL,
                                        f"New Zealand Business Number {Query}",
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(Main_URL)

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                    except:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for NZBN Search."
                        )

                elif Type == "NZCN":

                    try:
                        Limit = General.Get_Limit(kwargs)
                        URL_Query = urllib.parse.quote(Query)
                        Main_URL = f'https://{Domain}/companies/app/ui/pages/companies/search?q={URL_Query}&entityTypes=ALL&entityStatusGroups=ALL&incorpFrom=&incorpTo=&addressTypes=ALL&addressKeyword=&start=0&limit={str(Limit)}&sf=&sd=&advancedPanel=true&mode=advanced#results'
                        Responses = General.Request_Handler(
                            Main_URL, Filter=True, Host=f"https://{Domain}")
                        Response = Responses["Filtered"]
                        NZCN_Regex = re.search(r".*[a-zA-Z].*", Query)

                        if NZCN_Regex:
                            Main_File = General.Main_File_Create(
                                Directory, Plugin_Name, Response, Query,
                                The_File_Extension)
                            NZBNs_Regex = re.findall(
                                r"\<span\sclass\=\"entityName\"\>([\w\d\s\-\_\&\|\!\@\#\$\%\^\*\(\)\.\,]+)\<\/span\>\s<span\sclass\=\"entityInfo\"\>\((\d+)\)\s\(NZBN\:\s(\d+)\)",
                                Response)

                            if NZBNs_Regex:
                                Output_Connections = General.Connections(
                                    Query, Plugin_Name, Domain,
                                    "Company Details", Task_ID, Plugin_Name)

                                for NZCN, NZ_ID, NZBN_URL in NZBNs_Regex:
                                    Full_NZBN_URL = f'https://{Domain}/companies/app/ui/pages/companies/{NZ_ID}?backurl=H4sIAAAAAAAAAEXLuwrCQBCF4bfZNtHESIpBbLQwhWBeYNgddSF7cWai5O2NGLH7zwenyHgjKWwKGaOfSwjZ3ncPaOt1W9bbsmqaamMoqtepnzIJ7Ltu2RdFHeXIacxf9tEmzgdOAZbuExh0jknk%2F17gRNMrsQMjiqxQmsEHr7Aycp3NfY5PjJbcGSMNoDySCckR%2FPwNLgXMiL4AAAA%3D'

                                    if Full_NZBN_URL not in Cached_Data and Full_NZBN_URL not in Data_to_Cache:
                                        Current_Response = General.Request_Handler(
                                            Full_NZBN_URL)
                                        Output_file = General.Create_Query_Results_Output_File(
                                            Directory, Query, Plugin_Name,
                                            str(Current_Response),
                                            NZCN.replace(' ', '-'),
                                            The_File_Extension)

                                        if Output_file:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                Full_NZBN_URL,
                                                f"New Zealand Business Number {NZ_ID} for Query {Query}",
                                                Concat_Plugin_Name)
                                            Data_to_Cache.append(Full_NZBN_URL)

                                        else:
                                            logging.warning(
                                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                            )

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Response did not match regular expression."
                                )

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Query did not match regular expression."
                            )

                    except:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for NZCN Search."
                        )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Invalid request type."
                    )

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make request."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #5
0
def Search(Query_List, Task_ID, Type):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        VT_API_Key = Load_Configuration()
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if Type == "Domain":

                if Common.Regex_Handler(Query, Type=Type):
                    Response = Common.Request_Handler(f"https://www.{Domain}/api/v3/domains/{Query}", Optional_Headers={"x-apikey": VT_API_Key}, Full_Response=True)

                    if Response.status_code == 200:
                        JSON_Object = Common.JSON_Handler(Response.text)
                        JSON_Object.To_JSON_Loads()
                        JSON_Output_Response = JSON_Object.Dump_JSON()
                        Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                        Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Domain Information", Task_ID, Concat_Plugin_Name)
                        Link = f"https://www.{Domain}/gui/domain/{Query}/detection"
                        Main_URL_Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://www.{Domain}")
                        Main_URL_Response = Main_URL_Responses["Filtered"]
                        Title = f"Virus Total Domain | {Query}"

                        if Link not in Cached_Data and Link not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Concat_Plugin_Name, Main_URL_Response, Link, The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output([Main_File, Output_file], Link, Title, Concat_Plugin_Name)
                                Data_to_Cache.append(Link)

                            else:
                                logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

            elif Type == "IP":

                if Common.Regex_Handler(Query, Type=Type):
                    Response = Common.Request_Handler(f"https://www.{Domain}/api/v3/ip_addresses/{Query}", Optional_Headers={"x-apikey": VT_API_Key}, Full_Response=True)

                    if Response.status_code == 200:
                        JSON_Object = Common.JSON_Handler(Response.text)
                        JSON_Object.To_JSON_Loads()
                        JSON_Output_Response = JSON_Object.Dump_JSON()
                        Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                        Output_Connections = General.Connections(Query, Plugin_Name, Domain, "IP Address Information", Task_ID, Concat_Plugin_Name)
                        Link = f"https://www.{Domain}/gui/ip-address/{Query}/detection"
                        Main_URL_Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://www.{Domain}")
                        Main_URL_Response = Main_URL_Responses["Filtered"]
                        Title = f"Virus Total IP Address | {Query}"

                        if Link not in Cached_Data and Link not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Concat_Plugin_Name, Main_URL_Response, Link, The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output([Main_File, Output_file], Link, Title, Concat_Plugin_Name)
                                Data_to_Cache.append(Link)

                            else:
                                logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

            elif Type == "URL":

                if Common.Regex_Handler(Query, Type=Type):
                    import base64
                    Query_Encoded = base64.urlsafe_b64encode(Query.encode()).decode().strip("=")
                    Response = Common.Request_Handler(f"https://www.{Domain}/api/v3/urls/{Query_Encoded}", Optional_Headers={"x-apikey": VT_API_Key}, Full_Response=True)

                    if Response.status_code == 200:
                        JSON_Object = Common.JSON_Handler(Response.text)
                        JSON_Object.To_JSON_Loads()
                        JSON_Output_Response = JSON_Object.Dump_JSON()
                        Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                        Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Domain Information", Task_ID, Concat_Plugin_Name)
                        Link = f"https://www.{Domain}/gui/url/{Query_Encoded}/detection"
                        Main_URL_Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://www.{Domain}")
                        Main_URL_Response = Main_URL_Responses["Filtered"]
                        Title = f"Virus Total URL | {Query}"

                        if Link not in Cached_Data and Link not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Concat_Plugin_Name, Main_URL_Response, Link, The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output([Main_File, Output_file], Link, Title, Concat_Plugin_Name)
                                Data_to_Cache.append(Link)

                            else:
                                logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

            elif Type == "Hash":
                Response = Common.Request_Handler(f"https://www.{Domain}/api/v3/files/{Query}", Optional_Headers={"x-apikey": VT_API_Key}, Full_Response=True)

                if Response.status_code == 200:
                    JSON_Object = Common.JSON_Handler(Response.text)
                    JSON_Object.To_JSON_Loads()
                    JSON_Output_Response = JSON_Object.Dump_JSON()
                    Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                    Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Virus", Task_ID, Concat_Plugin_Name)
                    Link = f"https://www.{Domain}/gui/file/{Query}/detection"
                    Main_URL_Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://www.{Domain}")
                    Main_URL_Response = Main_URL_Responses["Filtered"]
                    Title = f"Virus Total File | {Query}"

                    if Link not in Cached_Data and Link not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Concat_Plugin_Name, Main_URL_Response, Link, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file], Link, Title, Concat_Plugin_Name)
                            Data_to_Cache.append(Link)

                        else:
                            logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #6
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        API_Key = Load_Configuration()
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if Common.Regex_Handler(Query, Type="IP"):
                URL = f"https://api.{Domain}/v3/community/{Query}"
                headers = {"Accept": "application/json"}

                if type(API_Key) == str and len(API_Key) > 0:
                    headers["key"] = API_Key
                    logging.info(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Using provided API Key for search."
                    )

                else:
                    logging.info(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - No API Key provided, using community edition for search."
                    )

                Registration_Response_Full = Common.Request_Handler(
                    URL, Optional_Headers=headers, Full_Response=True)
                JSON_Object = Common.JSON_Handler(
                    Registration_Response_Full.text)
                Registration_Response = JSON_Object.To_JSON_Loads()
                Indented_Registration_Response = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, Plugin_Name, Indented_Registration_Response,
                    Query, The_File_Extensions["Main"])
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "IP Address Information",
                    Task_ID, Concat_Plugin_Name)

                if Registration_Response_Full.ok:

                    try:
                        Title = f"Greynoise IP Search | {Query}"
                        Search_Result_Responses = Common.Request_Handler(
                            Registration_Response["link"],
                            Filter=True,
                            Host=f"https://viz.{Domain}")
                        Search_Result_Response = Search_Result_Responses[
                            "Filtered"]

                        if URL not in Cached_Data and URL not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query,
                                Plugin_Name, Search_Result_Response,
                                Title.replace(" ", "-"),
                                The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], URL, Title,
                                    Concat_Plugin_Name)
                                Data_to_Cache.append(URL)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                    except:
                        logging.info(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - No result found for given query {Query}."
                        )

                else:
                    logging.warning(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Received an invalid response from the API."
                    )

            else:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - The provided query is not a valid IP address."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #7
0
def General_Pull(Handle, Limit, Directory, API, Task_ID):

    try:
        Data_to_Cache = []
        JSON_Response = []
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Latest_Tweets = API.user_timeline(screen_name=Handle, count=Limit)

        for Tweet in Latest_Tweets:

            try:
                JSON_Response.append({
                    'id':
                    Tweet.id,
                    'text':
                    Tweet.text,
                    'author_name':
                    Tweet.user.screen_name,
                    'url':
                    Tweet.entities['urls'][0]["expanded_url"]
                })

            except:
                JSON_Response.append({
                    'id': Tweet.id,
                    'text': Tweet.text,
                    'author_name': Tweet.user.screen_name
                })

        JSON_Output = json.dumps(JSON_Response, indent=4, sort_keys=True)
        Output_Connections = General.Connections(Handle, Plugin_Name, Domain,
                                                 "Social Media - Media",
                                                 Task_ID, Plugin_Name.lower())
        Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                             JSON_Output, Handle,
                                             The_File_Extensions["Main"])

        for JSON_Item in JSON_Response:

            if all(Item in JSON_Item for Item in ['id', 'url', 'text']):
                Link = JSON_Item['url']

                if Link not in Cached_Data and Link not in Data_to_Cache:
                    Title = "Twitter | " + JSON_Item['text']
                    Item_Responses = General.Request_Handler(
                        Link, Filter=True, Host=f"https://{Domain}")
                    Item_Response = Item_Responses["Filtered"]

                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Handle, Plugin_Name, Item_Response,
                        str(JSON_Item['id']), The_File_Extensions["Query"])

                    if Output_file:
                        Output_Connections.Output([Main_File, Output_file],
                                                  Link, Title,
                                                  Plugin_Name.lower())
                        Data_to_Cache.append(Link)

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Output file not returned."
                        )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Insufficient parameters provided."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #8
0
    def Search(self):

        try:

            def Recursive_Dict_Check(Items, Dict_to_Check):

                try:

                    for Item in Items:

                        if Item in Dict_to_Check:
                            Dict_to_Check = Dict_to_Check[Item]

                        else:
                            return False

                    return Dict_to_Check

                except:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}"
                    )

            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            VK_Access_Token = self.Load_Configuration()
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:

                if self.Type == "User":
                    VK_Response = Common.Request_Handler(
                        f"https://api.{self.Domain}/method/users.search?v=5.52&access_token={VK_Access_Token}&fields=verified, blacklisted, sex, bdate, city, country, home_town, photo_50, photo_100, photo_200_orig, photo_200, photo_400_orig, photo_max, photo_max_orig, online, lists, self.Domain, has_mobile, contacts, site, education, universities, schools, status, last_seen, followers_count, common_count, counters, occupation, nickname, relatives, relation, personal, connections, exports, wall_comments, activities, interests, music, movies, tv, books, games, about, quotes, can_post, can_see_all_posts, can_see_audio, can_write_private_message, timezone, screen_name&q={Query}&count={str(self.Limit)}"
                    )
                    JSON_Object = Common.JSON_Handler(VK_Response)
                    JSON_Response = JSON_Object.To_JSON_Loads()
                    JSON_Output_Response = JSON_Object.Dump_JSON()
                    Main_File = General.Main_File_Create(
                        Directory, self.Plugin_Name, JSON_Output_Response,
                        Query, self.The_File_Extensions["Main"])
                    Output_Connections = General.Connections(
                        Query, self.Plugin_Name, self.Domain,
                        "Social Media - Person", self.Task_ID,
                        self.Plugin_Name.lower())
                    New_JSON_Response = Recursive_Dict_Check(
                        ["response", "items"], JSON_Response)

                    if New_JSON_Response:

                        for VK_Item_Line in New_JSON_Response:

                            try:

                                if all(
                                        Item in VK_Item_Line for Item in
                                    ["first_name", "last_name", "screen_name"
                                     ]):
                                    VK_URL = f"https://{self.Domain}/" + VK_Item_Line[
                                        'screen_name']
                                    Full_Name = VK_Item_Line[
                                        "first_name"] + " " + VK_Item_Line[
                                            "last_name"]
                                    Title = f"VK User | {Full_Name}"

                                    if VK_URL not in Cached_Data and VK_URL not in Data_to_Cache:
                                        VK_Item_Responses = Common.Request_Handler(
                                            VK_URL,
                                            Filter=True,
                                            Host=f"https://{self.Domain}")
                                        VK_Item_Response = VK_Item_Responses[
                                            "Filtered"]
                                        Output_file = General.Create_Query_Results_Output_File(
                                            Directory, Query, self.Plugin_Name,
                                            VK_Item_Response, VK_URL,
                                            self.The_File_Extensions["Query"])

                                        if Output_file:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                VK_URL, Title,
                                                self.Plugin_Name.lower())
                                            Data_to_Cache.append(VK_URL)

                                        else:
                                            logging.warning(
                                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                            )

                            except Exception as e:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}"
                                )

                    else:
                        logging.warning(
                            f"{Common.Date()} - {self.Logging_Plugin_Name} - No results found."
                        )

                elif self.Type == "Group":
                    VK_Response = Common.Request_Handler(
                        f"https://api.{self.Domain}/method/groups.search?v=5.52&access_token={VK_Access_Token}&q={Query}&count={str(self.Limit)}"
                    )
                    JSON_Object = Common.JSON_Handler(VK_Response)
                    JSON_Response = JSON_Object.To_JSON_Loads()
                    JSON_Output_Response = JSON_Object.Dump_JSON()
                    Main_File = General.Main_File_Create(
                        Directory, self.Plugin_Name, JSON_Output_Response,
                        Query, self.The_File_Extensions["Main"])
                    Output_Connections = General.Connections(
                        Query, self.Plugin_Name, self.Domain,
                        "Social Media - Group", self.Task_ID,
                        self.Plugin_Name.lower())
                    New_JSON_Response = Recursive_Dict_Check(
                        ["response", "items"], JSON_Response)

                    if New_JSON_Response:

                        for VK_Item_Line in New_JSON_Response:

                            try:

                                if all(Item in VK_Item_Line
                                       for Item in ["name", "screen_name"]):
                                    VK_URL = f"https://{self.Domain}/" + VK_Item_Line[
                                        'screen_name']
                                    Full_Name = VK_Item_Line["name"]
                                    Title = f"VK Group | {Full_Name}"

                                    if VK_URL not in Cached_Data and VK_URL not in Data_to_Cache:
                                        VK_Item_Responses = Common.Request_Handler(
                                            VK_URL,
                                            Filter=True,
                                            Host=f"https://{self.Domain}")
                                        VK_Item_Response = VK_Item_Responses[
                                            "Filtered"]
                                        Output_file = General.Create_Query_Results_Output_File(
                                            Directory, Query, self.Plugin_Name,
                                            VK_Item_Response, VK_URL,
                                            self.The_File_Extensions["Query"])

                                        if Output_file:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                VK_URL, Title,
                                                self.Plugin_Name.lower())
                                            Data_to_Cache.append(VK_URL)

                                        else:
                                            logging.warning(
                                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                            )

                            except Exception as e:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}"
                                )

                    else:
                        logging.warning(
                            f"{Common.Date()} - {self.Logging_Plugin_Name} - No results found."
                        )

                else:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid self.Type supplied."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
예제 #9
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Google_Details = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        if int(Limit) > 100:
            logging.fatal(
                f"{General.Date()} - {__name__.strip('plugins.')} - This plugin does not support limits over 100."
            )
            return None

        for Query in Query_List:
            Current_Start = 1
            Current_Step = 0

            while Current_Start <= int(Limit):
                Service = build("customsearch",
                                Google_Details[2],
                                developerKey=Google_Details[3],
                                cache_discovery=False)
                CSE_Response = Service.cse().list(q=Query,
                                                  cx=Google_Details[0],
                                                  start=Current_Start,
                                                  num=10).execute()
                CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                      indent=4,
                                                      sort_keys=True)
                CSE_JSON_Response = json.loads(CSE_JSON_Output_Response)
                Output_Name = f"{Query}-{str(Current_Start)}"
                Main_File = General.Main_File_Create(
                    Directory, Plugin_Name, CSE_JSON_Output_Response,
                    Output_Name, The_File_Extensions["Main"])
                Output_Connections = General.Connections(
                    Query, Plugin_Name, "google.com", "Search Result", Task_ID,
                    Plugin_Name.lower())

                if 'items' in CSE_JSON_Response:

                    for Google_Item_Line in CSE_JSON_Response['items']:

                        try:

                            if 'link' in Google_Item_Line and 'title' in Google_Item_Line:
                                Google_Item_URL = Google_Item_Line['link']
                                Title = "Google | " + Google_Item_Line['title']

                                if Google_Item_URL not in Cached_Data and Google_Item_URL not in Data_to_Cache and Current_Step < int(
                                        Limit):
                                    Path_Regex = re.search(
                                        r"https?\:\/\/(www\.)?[\w\d\.]+\.\w{2,3}(\.\w{2,3})?(\.\w{2,3})?\/([\w\d\-\_\/]+)?",
                                        Google_Item_URL)

                                    if Path_Regex:
                                        headers = {
                                            'Content-Type': 'application/json',
                                            'User-Agent':
                                            'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0',
                                            'Accept':
                                            'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                                            'Accept-Language': 'en-US,en;q=0.5'
                                        }
                                        Google_Item_Response = requests.get(
                                            Google_Item_URL,
                                            headers=headers).text
                                        Output_Path = Path_Regex.group(
                                            4).replace("/", "-")
                                        Output_file = General.Create_Query_Results_Output_File(
                                            Directory, Output_Name,
                                            Plugin_Name, Google_Item_Response,
                                            Output_Path,
                                            The_File_Extensions["Query"])

                                        if Output_file:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                Google_Item_URL, Title,
                                                Plugin_Name.lower())
                                            Data_to_Cache.append(
                                                Google_Item_URL)

                                        else:
                                            logging.warning(
                                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                            )

                                        Current_Step += 1

                                    else:
                                        logging.warning(
                                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                                        )

                        except Exception as e:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}"
                            )

                    Current_Start += 10

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - No results found."
                    )
                    break

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #10
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            URL_Query = urllib.parse.quote(Query)
            URL = f"https://api.duckduckgo.com/?q={URL_Query}&format=json"
            DDG_Response = General.Request_Handler(URL)
            JSON_Response = json.loads(DDG_Response)
            JSON_Output_Response = json.dumps(JSON_Response,
                                              indent=4,
                                              sort_keys=True)
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 JSON_Output_Response, Query,
                                                 The_File_Extensions["Main"])
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain, "Search Result",
                                                     Task_ID,
                                                     Plugin_Name.lower())

            if JSON_Response.get('RelatedTopics'):
                Current_Step = 0

                for DDG_Item_Link in JSON_Response['RelatedTopics']:

                    try:

                        if 'FirstURL' in DDG_Item_Link:
                            DDG_URL = DDG_Item_Link['FirstURL']
                            Title = General.Get_Title(DDG_URL)
                            Title = f"DuckDuckGo | {Title}"

                            if DDG_URL not in Cached_Data and DDG_URL not in Data_to_Cache and Current_Step < int(
                                    Limit):
                                DDG_Item_Responses = General.Request_Handler(
                                    DDG_URL,
                                    Filter=True,
                                    Host=f"https://www.{Domain}")
                                DDG_Item_Response = DDG_Item_Responses[
                                    "Filtered"]
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    DDG_Item_Response, DDG_URL,
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file], DDG_URL,
                                        Title, Plugin_Name.lower())
                                    Data_to_Cache.append(DDG_URL)

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                                Current_Step += 1

                            else:
                                break

                        elif 'Topics' in DDG_Item_Link:

                            if type(DDG_Item_Link['Topics']) == list:
                                JSON_Response['RelatedTopics'].extend(
                                    DDG_Item_Link['Topics'])

                    except Exception as e:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}"
                        )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - No results found."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #11
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            IX_Access_Token = self.Load_Configuration()
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                Data = {
                    "term": Query,
                    "buckets": [],
                    "lookuplevel": 0,
                    "maxresults": self.Limit,
                    "timeout": 0,
                    "datefrom": "",
                    "dateto": "",
                    "sort": 2,
                    "media": 0,
                    "terminate": []
                }
                IX_Response = Common.Request_Handler(
                    f"https://2.{self.Domain}/intelligent/search?k={IX_Access_Token}",
                    Method="POST",
                    JSON_Data=Data)
                JSON_Object = Common.JSON_Handler(IX_Response)
                JSON_Response = JSON_Object.To_JSON_Loads()
                JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File_1 = General.Main_File_Create(
                    Directory, self.Plugin_Name, JSON_Output_Response,
                    Query + "-Request-1", self.The_File_Extensions["Main"])

                if "id" in JSON_Response:
                    Search_ID = JSON_Response["id"]
                    IX_Response = Common.Request_Handler(
                        f"https://2.{self.Domain}/intelligent/search/result?k={IX_Access_Token}&id={Search_ID}"
                    )
                    JSON_Object = Common.JSON_Handler(IX_Response)
                    JSON_Response = JSON_Object.To_JSON_Loads()
                    JSON_Output_Response = JSON_Object.Dump_JSON()
                    Main_File_2 = General.Main_File_Create(
                        Directory, self.Plugin_Name, JSON_Output_Response,
                        Query + "-Request-2", self.The_File_Extensions["Main"])
                    Output_Connections = General.Connections(
                        Query, self.Plugin_Name, self.Domain, self.Result_Type,
                        self.Task_ID, self.Plugin_Name.lower())

                    if "records" in JSON_Response:

                        for IX_Item in JSON_Response["records"]:

                            if "systemid" in IX_Item and "name" in IX_Item:
                                IX_URL = f"https://{self.Domain}/?did=" + IX_Item[
                                    'systemid']

                                if IX_Item["name"] != "":
                                    Title = f"IntelligenceX Data Leak | " + IX_Item[
                                        "name"]

                                else:
                                    TItle = "IntelligenceX Data Leak | Untitled Document"

                                if IX_URL not in Cached_Data and IX_URL not in Data_to_Cache:
                                    IX_Item_Responses = Common.Request_Handler(
                                        IX_URL,
                                        Filter=True,
                                        Host=f"https://{self.Domain}")
                                    IX_Item_Response = IX_Item_Responses[
                                        "Filtered"]
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, self.Plugin_Name,
                                        IX_Item_Response, IX_URL,
                                        self.The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output(
                                            [
                                                Main_File_1, Main_File_2,
                                                Output_file
                                            ], IX_URL, Title,
                                            self.Plugin_Name.lower())
                                        Data_to_Cache.append(IX_URL)

                                    else:
                                        logging.warning(
                                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                        )

                else:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - No results found."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
예제 #12
0
def Search(Query_List, Task_ID, Type, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:

            if Type == "CBN":
                Main_API_URL = 'https://searchapi.mrasservice.com/Search/api/v1/search?fq=keyword:%7B' + Query + '%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc'
                Response = requests.get(Main_API_URL).text
                JSON_Response = json.loads(Response)

                try:

                    if JSON_Response['count'] != 0:
                        Query = str(int(Query))
                        Main_URL = 'https://beta.canadasbusinessregistries.ca/search/results?search=%7B' + Query + '%7D&status=Active'
                        Response = requests.get(Main_URL).text

                        if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name, Response,
                                General.Get_Title(Main_URL),
                                The_File_Extension)

                            if Output_file:
                                General.Connections(
                                    Output_file, Query, Plugin_Name, Main_URL,
                                    "canadasbusinessregistries.ca",
                                    "Data Leakage", Task_ID,
                                    General.Get_Title(Main_URL), Plugin_Name)
                                Data_to_Cache.append(Main_URL)

                except:
                    logging.warning(General.Date() +
                                    " Invalid query provided for ABN Search.")

            elif Type == "CCN":
                Main_URL = 'https://searchapi.mrasservice.com/Search/api/v1/search?fq=keyword:%7B' + urllib.parse.quote(
                    Query
                ) + '%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc'
                Response = requests.get(Main_URL).text
                JSON_Response = json.loads(Response)
                Indented_JSON_Response = json.dumps(JSON_Response,
                                                    indent=4,
                                                    sort_keys=True)

                if kwargs.get('Limit'):

                    if int(kwargs["Limit"]) > 0:
                        Limit = kwargs["Limit"]

                else:
                    Limit = 10

                try:
                    General.Main_File_Create(Directory, Plugin_Name,
                                             Indented_JSON_Response, Query,
                                             ".json")
                    Current_Step = 0

                    for JSON_Item in JSON_Response['docs']:

                        if JSON_Item.get('BN'):
                            CCN = JSON_Item['Company_Name']
                            CBN = JSON_Item['BN']

                            Full_ABN_URL = 'https://beta.canadasbusinessregistries.ca/search/results?search=%7B' + CBN + '%7D&status=Active'

                            if Full_ABN_URL not in Cached_Data and Full_ABN_URL not in Data_to_Cache and Current_Step < int(
                                    Limit):
                                Current_Response = requests.get(
                                    Full_ABN_URL).text
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    str(Current_Response),
                                    CCN.replace(' ', '-'), The_File_Extension)

                                if Output_file:
                                    General.Connections(
                                        Output_file, Query, Plugin_Name,
                                        Full_ABN_URL,
                                        "canadasbusinessregistries.ca",
                                        "Data Leakage", Task_ID,
                                        General.Get_Title(Full_ABN_URL),
                                        Plugin_Name)
                                    Data_to_Cache.append(Full_ABN_URL)
                                    Current_Step += 1

                except:
                    logging.warning(General.Date() +
                                    " Invalid query provided for CCN Search.")

            else:
                logging.warning(General.Date() + " Invalid request type.")

        except:
            logging.warning(General.Date() + " Failed to make request.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
예제 #13
0
def Search(Query_List, Task_ID, Type, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        if Type == "User":
            Local_Plugin_Name = Plugin_Name + "-" + Type
            CSE_Response = instagram_explore.user(Query)
            CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                  indent=4,
                                                  sort_keys=True)
            Output_file = General.Main_File_Create(Directory,
                                                   Local_Plugin_Name,
                                                   CSE_JSON_Output_Response,
                                                   Query, ".json")
            Posts = CSE_Response[0]["edge_owner_to_timeline_media"]["edges"]
            Current_Step = 0

            for Post in Posts:
                Shortcode = Post["node"]["shortcode"]
                URL = "https://www.instagram.com/p/" + Shortcode + "/"

                if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                        Limit):

                    if Output_file:
                        General.Connections(Output_file, Query,
                                            Local_Plugin_Name, URL,
                                            "instagram.com", "Data Leakage",
                                            Task_ID, General.Get_Title(URL),
                                            Local_Plugin_Name.lower())

                Data_to_Cache.append(URL)
                Current_Step += 1

        elif Type == "Tag":
            Local_Plugin_Name = Plugin_Name + "-" + Type
            CSE_Response = instagram_explore.tag(Query)
            CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                  indent=4,
                                                  sort_keys=True)
            Output_file = General.Main_File_Create(Directory,
                                                   Local_Plugin_Name,
                                                   CSE_JSON_Output_Response,
                                                   Query, ".json")
            Posts = CSE_Response[0]["edge_hashtag_to_media"]["edges"]
            Current_Step = 0

            for Post in Posts:
                Shortcode = Post["node"]["shortcode"]
                URL = "https://www.instagram.com/p/" + Shortcode + "/"

                if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                        Limit):

                    if Output_file:
                        General.Connections(Output_file, Query,
                                            Local_Plugin_Name, URL,
                                            "instagram.com", "Data Leakage",
                                            Task_ID, General.Get_Title(URL),
                                            Local_Plugin_Name.lower())

                Data_to_Cache.append(URL)
                Current_Step += 1

        elif Type == "Location":
            Local_Plugin_Name = Plugin_Name + "-" + Type
            CSE_Response = location(Query)
            CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                  indent=4,
                                                  sort_keys=True)
            Output_file = General.Main_File_Create(Directory,
                                                   Local_Plugin_Name,
                                                   CSE_JSON_Output_Response,
                                                   Query, ".json")
            Posts = CSE_Response[0]["edge_location_to_media"]["edges"]
            Current_Step = 0

            for Post in Posts:
                Shortcode = Post["node"]["shortcode"]
                URL = "https://www.instagram.com/p/" + Shortcode + "/"

                if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                        Limit):

                    if Output_file:
                        General.Connections(Output_file, Query,
                                            Local_Plugin_Name, URL,
                                            "instagram.com", "Data Leakage",
                                            Task_ID, General.Get_Title(URL),
                                            Local_Plugin_Name.lower())

                Data_to_Cache.append(URL)
                Current_Step += 1

        elif Type == "Media":
            Local_Plugin_Name = Plugin_Name + "-" + Type
            CSE_Response = instagram_explore.media(Query)

            if CSE_Response:
                CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                      indent=4,
                                                      sort_keys=True)
                Output_file = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, ".json")
                URL = "https://www.instagram.com/p/" + Query + "/"

                if URL not in Cached_Data and URL not in Data_to_Cache:

                    if Output_file:
                        General.Connections(Output_file, Query,
                                            Local_Plugin_Name, URL,
                                            "instagram.com", "Data Leakage",
                                            Task_ID, General.Get_Title(URL),
                                            Local_Plugin_Name.lower())

                Data_to_Cache.append(URL)

            else:
                logging.warning(General.Date() + " Invalid response.")

        else:
            logging.warning(General.Date() + " Invalid type provided.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
예제 #14
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:
            URL_Components = Common.Regex_Handler(Query,
                                                  Type="URL",
                                                  Get_URL_Components=True)

            if URL_Components:
                BW_Info = builtwith(Query)

                if BW_Info:
                    JSON_Object = Common.JSON_Handler(BW_Info)
                    BW_JSON_Output = JSON_Object.Dump_JSON()
                    Query_Domain = URL_Components["Body"] + URL_Components[
                        "Extension"]
                    Title = f"Built With | {Query_Domain}"
                    Main_File = General.Main_File_Create(
                        Directory, Plugin_Name, BW_JSON_Output, Query_Domain,
                        The_File_Extensions["Main"])
                    BW_Search_URL = f"https://{Domain}/{Query_Domain}"
                    Responses = Common.Request_Handler(
                        BW_Search_URL, Filter=True, Host=f"https://{Domain}")
                    Response = Responses["Filtered"]
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain,
                        "Web Application Architecture", Task_ID,
                        Plugin_Name.lower())

                    if BW_Search_URL not in Cached_Data and BW_Search_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Response, Query,
                            The_File_Extensions['Query'])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      BW_Search_URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(BW_Search_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                else:
                    logging.info(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to get result for provided query."
                    )

            else:
                logging.info(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #15
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    try:
        Flickr_Details = Load_Configuration()
        flickr_api.set_keys(api_key=Flickr_Details[0],
                            api_secret=Flickr_Details[1])

    except:
        logging.info(
            str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) +
            " Failed to establish API identity.")

    for Query in Query_List:
        Email_Regex = re.search(r"[^@]+@[^\.]+\..+", Query)

        if Email_Regex:

            try:
                User = flickr_api.Person.findByEmail(Query)
                Photos = User.getPhotos()
                General.Main_File_Create(Directory, Plugin_Name, Photos, Query,
                                         ".txt")

                for Photo in Photos:
                    Photo_URL = "https://www.flickr.com/photos/" + Query + "/" + Photo[
                        "id"]
                    Current_Step = 0

                    if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        headers = {
                            'Content-Type': 'application/json',
                            'User-Agent':
                            'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
                            'Accept':
                            'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                            'Accept-Language': 'en-US,en;q=0.5'
                        }
                        Photo_Response = requests.get(Photo_URL,
                                                      headers=headers).text
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Photo_Response,
                            Photo, The_File_Extension)

                        if Output_file:
                            General.Connections(Output_file, Query,
                                                Plugin_Name, Photo_URL,
                                                "flickr.com", "Data Leakage",
                                                Task_ID,
                                                General.Get_Title(Photo_URL),
                                                Plugin_Name.lower())

                        Data_to_Cache.append(Photo_URL)
                        Current_Step += 1

            except:
                logging.info(
                    str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
                    + " Failed to make API call.")

        else:

            try:
                print(Query)
                User = flickr_api.Person.findByUserName(Query)
                Photos = User.getPhotos()
                General.Main_File_Create(Directory, Plugin_Name, Photos, Query,
                                         ".txt")

                for Photo in Photos:
                    Photo_URL = "https://www.flickr.com/photos/" + Query + "/" + Photo[
                        "id"]
                    Current_Step = 0

                    if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        headers = {
                            'Content-Type': 'application/json',
                            'User-Agent':
                            'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
                            'Accept':
                            'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                            'Accept-Language': 'en-US,en;q=0.5'
                        }
                        Photo_Response = requests.get(Photo_URL,
                                                      headers=headers).text
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Photo_Response,
                            str(Photo['id']), The_File_Extension)

                        if Output_file:
                            General.Connections(Output_file, Query,
                                                Plugin_Name, Photo_URL,
                                                "flickr.com", "Data Leakage",
                                                Task_ID,
                                                General.Get_Title(Photo_URL),
                                                Plugin_Name.lower())

                        Data_to_Cache.append(Photo_URL)
                        Current_Step += 1

            except:
                logging.info(
                    str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
                    + " Failed to make API call.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
예제 #16
0
def Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Shodan_API_Key = Load_Configuration()
        API_Session = PyHunter(Shodan_API_Key)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            try:

                if Type == "Domain":

                    if General.Regex_Checker(Query, "Domain"):
                        Local_Plugin_Name = Plugin_Name + "-Domain"
                        API_Response = API_Session.domain_search(Query)
                        JSON_Output_Response = json.dumps(API_Response, indent=4, sort_keys=True)

                        if API_Response["domain"] and API_Response['emails']:
                            Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                            Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Account", Task_ID, Plugin_Name.lower())
                            Current_Step = 0

                            for Hunter_Item in API_Response["emails"]:
                                Current_Email_Address = Hunter_Item["value"]
                                Current_Hunter_Item_Host = f"https://{Domain}/verify/{Current_Email_Address}"
                                Current_Hunter_Item_Responses = General.Request_Handler(Current_Hunter_Item_Host, Filter=True, Host=f"https://{Domain}")
                                Filtered_Response = Current_Hunter_Item_Responses["Filtered"]
                                Title = "Hunter | " + Current_Email_Address

                                if Current_Email_Address not in Cached_Data and Current_Email_Address not in Data_to_Cache and Current_Step < int(Limit):
                                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Filtered_Response, Current_Hunter_Item_Host, The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output([Main_File, Output_file], Current_Hunter_Item_Host, Title, Plugin_Name.lower())
                                        Data_to_Cache.append(Current_Email_Address)

                                    else:
                                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                                    Current_Step += 1

                elif Type == "Email":

                    if General.Regex_Checker(Query, "Email"):
                        Local_Plugin_Name = Plugin_Name + "-Email"
                        API_Response = API_Session.email_verifier(Query)
                        JSON_Output_Response = json.dumps(API_Response, indent=4, sort_keys=True)

                        if API_Response["email"] and API_Response['sources']:
                            Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                            Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Account Source", Task_ID, Plugin_Name.lower())
                            Current_Step = 0

                            for Hunter_Item in API_Response["sources"]:
                                Current_Hunter_Item_Host = Hunter_Item["uri"]
                                Current_Hunter_Item_Domain = Hunter_Item["domain"]

                                if 'http://' in Current_Hunter_Item_Host:
                                    Current_Hunter_Item_Responses = General.Request_Handler(Current_Hunter_Item_Host, Filter=True, Host=f"http://{Current_Hunter_Item_Domain}")
                                    Filtered_Response = Current_Hunter_Item_Responses["Filtered"]

                                elif 'https://' in Current_Hunter_Item_Host:
                                    Current_Hunter_Item_Responses = General.Request_Handler(Current_Hunter_Item_Host, Filter=True, Host=f"https://{Current_Hunter_Item_Domain}")
                                    Filtered_Response = Current_Hunter_Item_Responses["Filtered"]

                                else:
                                    Filtered_Response = General.Request_Handler(Current_Hunter_Item_Host)

                                Title = "Hunter | " + Current_Hunter_Item_Host

                                if Current_Hunter_Item_Host not in Cached_Data and Current_Hunter_Item_Host not in Data_to_Cache and Current_Step < int(Limit):
                                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Filtered_Response, Current_Hunter_Item_Host, The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output([Main_File, Output_file], Current_Hunter_Item_Host, Title, Plugin_Name.lower())
                                        Data_to_Cache.append(Current_Hunter_Item_Host)

                                    else:
                                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                                    Current_Step += 1

            except Exception as e:
                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to complete task - {str(e)}")

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #17
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            try:
                Pull_URL = f"https://{Domain}:2096/api/phishing?_where=(url,like,~{Query}~)&_sort=-id&_size={Limit}"
                Results = json.loads(General.Request_Handler(Pull_URL))
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "Phishing", Task_ID,
                    Plugin_Name.lower())
                Main_File = General.Main_File_Create(
                    Directory, Plugin_Name,
                    json.dumps(Results, indent=4, sort_keys=True), Query,
                    The_File_Extensions["Main"])

                for Result in Results:
                    Current_Link = Result["url"]
                    Current_Domain = Current_Link.strip("https://")
                    Current_Domain = Current_Domain.strip("http://")
                    Current_Domain = Current_Domain.strip("www.")
                    Current_Title = Result["title"]

                    try:
                        Current_Result = General.Request_Handler(
                            Current_Link,
                            Filter=True,
                            Risky_Plugin=True,
                            Host=Current_Link)
                        Current_Result_Filtered = Current_Result["Filtered"]
                        Response_Regex = re.search(
                            r"\<title\>([^\<\>]+)\<\/title\>", Current_Result)
                        Output_file_Query = Query.replace(" ", "-")

                        if Current_Link not in Cached_Data and Current_Link not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Output_file_Query, Plugin_Name,
                                Current_Result_Filtered, Current_Domain,
                                The_File_Extensions["Query"])

                            if Output_file:

                                if Response_Regex:
                                    Current_Title = Response_Regex.group(1)
                                    Current_Title = Current_Title.strip()
                                    Output_Connections.Output(
                                        [Main_File, Output_file], Current_Link,
                                        Current_Title, Plugin_Name.lower())

                                else:

                                    if not "Phishstats" in Current_Title:
                                        Output_Connections.Output(
                                            [Main_File, Output_file],
                                            Current_Link, Current_Title,
                                            Plugin_Name.lower())

                                    else:
                                        Output_Connections.Output(
                                            [Main_File, Output_file],
                                            Current_Link,
                                            General.Get_Title(Current_Link),
                                            Plugin_Name.lower())

                                Data_to_Cache.append(Current_Link)

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                    except:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make request for result, link may no longer be available."
                        )

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make request."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #18
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            try:
                body = {
                    "f.req":
                    f'''[[["lGYRle","[[[],[[10,[10,50]],true,null,[96,27,4,8,57,30,110,11,16,49,1,3,9,12,104,55,56,51,10,34,31,77,145],[null,null,null,[[[[7,31],[[1,52,43,112,92,58,69,31,19,96,103]]]]]]],[\\"{Query}\\"],7,[null,1]]]",null,"2"]]]'''
                }
                Play_Store_Response = Common.Request_Handler(
                    f"https://{Domain}/_/PlayStoreUi/data/batchexecute",
                    Method="POST",
                    Data=body)
                Play_Store_Response = Play_Store_Response.replace(
                    ')]}\'\n\n', "").replace("\\\\u003d", "=")
                JSON_Object = Common.JSON_Handler(Play_Store_Response)
                Play_Store_Response_JSON = JSON_Object.To_JSON_Loads()
                Play_Store_Response_JSON = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, Plugin_Name, Play_Store_Response_JSON, Query,
                    The_File_Extensions["Main"])
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "Application", Task_ID,
                    Concat_Plugin_Name)
                Win_Store_Regex = Common.Regex_Handler(
                    Play_Store_Response,
                    Custom_Regex=
                    r"(\/store\/apps\/details\?id\\\\([\w\d\.]+))\\\"",
                    Findall=True)
                Current_Step = 0

                for Result, Item in Win_Store_Regex:
                    Result = Result.replace("\\\\u003d", "=")
                    Result_URL = f"https://{Domain}{Result}"
                    Item = Item.replace("u003d", "")
                    Title = f"Play Store | {Item}"

                    if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Play_Store_Responses = Common.Request_Handler(
                            Result_URL, Filter=True, Host=f"https://{Domain}")
                        Play_Store_Response = Play_Store_Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Play_Store_Response,
                            Item, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Result_URL, Title,
                                                      Concat_Plugin_Name)
                            Data_to_Cache.append(Result_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            except:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to get results, this may be due to the query provided."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #19
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:
            URL_Body = f'https://{Domain}'
            Main_URL = URL_Body + '/' + Query.lower().replace(' ', '-')
            Responses = Common.Request_Handler(Main_URL,
                                               Filter=True,
                                               Host=f"https://www.{Domain}")
            Response = Responses["Regular"]
            Filtered_Response = Responses["Filtered"]
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 Filtered_Response, Query,
                                                 The_File_Extension)
            Regex = Common.Regex_Handler(
                Response,
                Custom_Regex=
                r"\<tr\>\s+\<td\sclass\=\"name\"\>\s+\<a\shref\=\"([\/\d\w\-\+\?\.]+)\"\>([\/\d\w\-\+\?\.\(\)\s\,\;\:\~\`\!\@\#\$\%\^\&\*\[\]\{\}]+)\<\/a\>\s+\<\/td\>",
                Findall=True)

            if Regex:
                Current_Step = 0
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "Credentials", Task_ID,
                    Concat_Plugin_Name)

                for URL, Title in Regex:
                    Item_URL = URL_Body + URL
                    Current_Response = Common.Request_Handler(Item_URL)
                    Current_Item_Regex = Common.Regex_Handler(
                        Current_Response,
                        Custom_Regex=
                        r"\<button\sclass\=\"btn\sbtn\-primary\spassword\"\s+data\-data\=\"([\-\d\w\?\/]+)\"\s+data\-toggle\=\"modal\"\s+data\-target\=\"\#modal\"\s+\>show\sme\!\<\/button\>"
                    )

                    if Current_Item_Regex:

                        try:
                            Detailed_Item_URL = URL_Body + Current_Item_Regex.group(
                                1)
                            Detailed_Responses = Common.Request_Handler(
                                Item_URL,
                                Filter=True,
                                Host=f"https://www.{Domain}")
                            Detailed_Response = Detailed_Responses["Regular"]
                            JSON_Object = Common.JSON_Handler(
                                Detailed_Response)
                            Output_Dict = JSON_Object.Is_JSON()

                            if JSON_Response:
                                Output_Response = "<head><title>" + JSON_Response[
                                    "title"] + "</title></head>\n"
                                Output_Response = Output_Response + JSON_Response[
                                    "data"]

                            else:
                                Output_Response = Detailed_Responses[
                                    "Filtered"]

                            if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                                    Limit):
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    Output_Response, Title, The_File_Extension)

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file], Item_URL,
                                        General.Get_Title(Item_URL),
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(Item_URL)

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                                Current_Step += 1

                        except:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to generate output, may have a blank detailed response."
                            )

                    else:
                        logging.warning(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression for current result."
                        )

            else:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression for provided query."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #20
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            OK_API_Details = self.Load_Configuration()
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:

                if self.Type == "User":
                    Query = str(int(Query))
                    String_to_Hash = f"application_key={OK_API_Details[1]}fields=ACCESSIBLE,AGE,ALLOWS_ANONYM_ACCESS,ALLOWS_MESSAGING_ONLY_FOR_FRIENDS,ALLOW_ADD_TO_FRIEND,BECOME_VIP_ALLOWED,BIRTHDAY,BLOCKED,BLOCKS,BUSINESS,CAN_USE_REFERRAL_INVITE,CAN_VCALL,CAN_VMAIL,CITY_OF_BIRTH,CLOSE_COMMENTS_ALLOWED,COMMON_FRIENDS_COUNT,CURRENT_LOCATION,CURRENT_STATUS,CURRENT_STATUS_DATE,CURRENT_STATUS_DATE_MS,CURRENT_STATUS_ID,CURRENT_STATUS_MOOD,CURRENT_STATUS_TRACK_ID,EMAIL,EXECUTOR,FIRST_NAME,FIRST_NAME_INSTRUMENTAL,FOLLOWERS_COUNT,FORBIDS_MENTIONING,FRIEND,FRIENDS_COUNT,FRIEND_INVITATION,FRIEND_INVITE_ALLOWED,GENDER,GROUP_INVITE_ALLOWED,HAS_DAILY_PHOTO,HAS_EMAIL,HAS_GROUPS_TO_COMMENT,HAS_PHONE,HAS_PRODUCTS,HAS_SERVICE_INVISIBLE,INTERNAL_PIC_ALLOW_EMPTY,INVITED_BY_FRIEND,IS_MERCHANT,LAST_NAME,LAST_NAME_INSTRUMENTAL,LAST_ONLINE,LAST_ONLINE_MS,LOCALE,LOCATION,LOCATION_OF_BIRTH,MODIFIED_MS,NAME,NAME_INSTRUMENTAL,ODKL_BLOCK_REASON,ODKL_EMAIL,ODKL_LOGIN,ODKL_MOBILE,ODKL_MOBILE_ACTIVATION_DATE,ODKL_MOBILE_STATUS,ODKL_USER_OPTIONS,ODKL_USER_STATUS,ODKL_VOTING,ONLINE,PHOTO_ID,PIC1024X768,PIC128MAX,PIC128X128,PIC180MIN,PIC190X190,PIC224X224,PIC240MIN,PIC288X288,PIC320MIN,PIC50X50,PIC600X600,PIC640X480,PIC_1,PIC_2,PIC_3,PIC_4,PIC_5,PIC_BASE,PIC_FULL,PIC_MAX,POSSIBLE_RELATIONS,PREMIUM,PRESENTS,PRIVATE,PROFILE_BUTTONS,PROFILE_COVER,PROFILE_PHOTO_SUGGEST_ALLOWED,PYMK_PIC224X224,PYMK_PIC288X288,PYMK_PIC600X600,PYMK_PIC_FULL,REF,REGISTERED_DATE,REGISTERED_DATE_MS,RELATIONS,RELATIONSHIP,SEND_MESSAGE_ALLOWED,SHORTNAME,SHOW_LOCK,STATUS,TOTAL_PHOTOS_COUNT,UID,URL_CHAT,URL_CHAT_MOBILE,URL_PROFILE,URL_PROFILE_MOBILE,VIPformat=jsonmethod=users.getInfouids={Query}{OK_API_Details[4]}"
                    Signature = hashlib.md5(String_to_Hash.encode()).hexdigest()
                    OK_Response = Common.Request_Handler(f"https://api.{self.Domain}/fb.do?application_key={OK_API_Details[1]}&fields=ACCESSIBLE%2CAGE%2CALLOWS_ANONYM_ACCESS%2CALLOWS_MESSAGING_ONLY_FOR_FRIENDS%2CALLOW_ADD_TO_FRIEND%2CBECOME_VIP_ALLOWED%2CBIRTHDAY%2CBLOCKED%2CBLOCKS%2CBUSINESS%2CCAN_USE_REFERRAL_INVITE%2CCAN_VCALL%2CCAN_VMAIL%2CCITY_OF_BIRTH%2CCLOSE_COMMENTS_ALLOWED%2CCOMMON_FRIENDS_COUNT%2CCURRENT_LOCATION%2CCURRENT_STATUS%2CCURRENT_STATUS_DATE%2CCURRENT_STATUS_DATE_MS%2CCURRENT_STATUS_ID%2CCURRENT_STATUS_MOOD%2CCURRENT_STATUS_TRACK_ID%2CEMAIL%2CEXECUTOR%2CFIRST_NAME%2CFIRST_NAME_INSTRUMENTAL%2CFOLLOWERS_COUNT%2CFORBIDS_MENTIONING%2CFRIEND%2CFRIENDS_COUNT%2CFRIEND_INVITATION%2CFRIEND_INVITE_ALLOWED%2CGENDER%2CGROUP_INVITE_ALLOWED%2CHAS_DAILY_PHOTO%2CHAS_EMAIL%2CHAS_GROUPS_TO_COMMENT%2CHAS_PHONE%2CHAS_PRODUCTS%2CHAS_SERVICE_INVISIBLE%2CINTERNAL_PIC_ALLOW_EMPTY%2CINVITED_BY_FRIEND%2CIS_MERCHANT%2CLAST_NAME%2CLAST_NAME_INSTRUMENTAL%2CLAST_ONLINE%2CLAST_ONLINE_MS%2CLOCALE%2CLOCATION%2CLOCATION_OF_BIRTH%2CMODIFIED_MS%2CNAME%2CNAME_INSTRUMENTAL%2CODKL_BLOCK_REASON%2CODKL_EMAIL%2CODKL_LOGIN%2CODKL_MOBILE%2CODKL_MOBILE_ACTIVATION_DATE%2CODKL_MOBILE_STATUS%2CODKL_USER_OPTIONS%2CODKL_USER_STATUS%2CODKL_VOTING%2CONLINE%2CPHOTO_ID%2CPIC1024X768%2CPIC128MAX%2CPIC128X128%2CPIC180MIN%2CPIC190X190%2CPIC224X224%2CPIC240MIN%2CPIC288X288%2CPIC320MIN%2CPIC50X50%2CPIC600X600%2CPIC640X480%2CPIC_1%2CPIC_2%2CPIC_3%2CPIC_4%2CPIC_5%2CPIC_BASE%2CPIC_FULL%2CPIC_MAX%2CPOSSIBLE_RELATIONS%2CPREMIUM%2CPRESENTS%2CPRIVATE%2CPROFILE_BUTTONS%2CPROFILE_COVER%2CPROFILE_PHOTO_SUGGEST_ALLOWED%2CPYMK_PIC224X224%2CPYMK_PIC288X288%2CPYMK_PIC600X600%2CPYMK_PIC_FULL%2CREF%2CREGISTERED_DATE%2CREGISTERED_DATE_MS%2CRELATIONS%2CRELATIONSHIP%2CSEND_MESSAGE_ALLOWED%2CSHORTNAME%2CSHOW_LOCK%2CSTATUS%2CTOTAL_PHOTOS_COUNT%2CUID%2CURL_CHAT%2CURL_CHAT_MOBILE%2CURL_PROFILE%2CURL_PROFILE_MOBILE%2CVIP&format=json&method=users.getInfo&uids={Query}&sig={Signature}&access_token={OK_API_Details[3]}")
                    JSON_Object = Common.JSON_Handler(OK_Response)
                    JSON_Response = JSON_Object.To_JSON_Loads()
                    JSON_Output_Response = JSON_Object.Dump_JSON()
                    Main_File = General.Main_File_Create(Directory, self.Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"])
                    Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, "Social Media - Person", self.Task_ID, self.Plugin_Name.lower())

                    try:

                        if "error_code" not in JSON_Response and self.Type(JSON_Response) == list:
                            # These conditions could be so much simpler if the API returned a response code other than 200 for both successful requests and errors.
                            OK_Item = JSON_Response[0]

                            if all(Item in OK_Item for Item in ["first_name", "last_name"]):
                                OK_URL = f"https://{self.Domain}/profile/{Query}"

                                if OK_Item["last_name"] not in ["", " "]:
                                    Full_Name = OK_Item["first_name"] + " " + OK_Item["last_name"]

                                else:
                                    Full_Name = OK_Item["first_name"]

                                Title = f"OK User | {Full_Name}"

                                if OK_URL not in Cached_Data and OK_URL not in Data_to_Cache:
                                    OK_Item_Responses = Common.Request_Handler(OK_URL, Filter=True, Host=f"https://{self.Domain}")
                                    OK_Item_Response = OK_Item_Responses["Filtered"]
                                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, OK_Item_Response, OK_URL, self.The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output([Main_File, Output_file], OK_URL, Title, self.Plugin_Name.lower())
                                        Data_to_Cache.append(OK_URL)

                                    else:
                                        logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                        else:
                            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided, the user ID provided possibly doesn't exist.")

                    except Exception as e:
                        logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")

                elif self.Type == "Group":
                    Query = str(int(Query))
                    String_to_Hash = f"application_key={OK_API_Details[1]}fields=ABBREVIATION,ACCESS_TYPE,ADDRESS,ADD_CHANNEL_ALLOWED,ADD_PAID_THEME_ALLOWED,ADD_PHOTOALBUM_ALLOWED,ADD_THEME_ALLOWED,ADD_VIDEO_ALLOWED,ADMIN_ID,ADS_MANAGER_ALLOWED,ADVANCED_PUBLICATION_ALLOWED,AGE_RESTRICTED,BLOCKED,BUSINESS,CALL_ALLOWED,CATALOG_CREATE_ALLOWED,CATEGORY,CHANGE_AVATAR_ALLOWED,CHANGE_TYPE_ALLOWED,CITY,COMMENT_AS_OFFICIAL,COMMUNITY,CONTENT_AS_OFFICIAL,COUNTRY,COVER,COVER_BUTTONS,COVER_SERIES,CREATED_MS,CREATE_ADS_ALLOWED,DELETE_ALLOWED,DESCRIPTION,DISABLE_PHOTO_UPLOAD,EDIT_ALLOWED,EDIT_APPS_ALLOWED,END_DATE,FEED_SUBSCRIPTION,FOLLOWERS_COUNT,FOLLOW_ALLOWED,FRIENDS_COUNT,GRADUATE_YEAR,GROUP_CHALLENGE_CREATE_ALLOWED,GROUP_JOURNAL_ALLOWED,GROUP_NEWS,HOMEPAGE_NAME,HOMEPAGE_URL,INVITATIONS_COUNT,INVITATION_SENT,INVITE_ALLOWED,INVITE_FREE_ALLOWED,JOIN_ALLOWED,JOIN_REQUESTS_COUNT,LEAVE_ALLOWED,LINK_CAROUSEL_ALLOWED,LINK_POSTING_ALLOWED,LOCATION_ID,LOCATION_LATITUDE,LOCATION_LONGITUDE,LOCATION_ZOOM,MAIN_PAGE_TAB,MAIN_PHOTO,MANAGE_MEMBERS,MANAGE_MESSAGING_ALLOWED,MEMBERS_COUNT,MEMBER_STATUS,MENTIONS_SUBSCRIPTION,MENTIONS_SUBSCRIPTION_ALLOWED,MESSAGES_ALLOWED,MESSAGING_ALLOWED,MESSAGING_ENABLED,MIN_AGE,MOBILE_COVER,NAME,NEW_CHATS_COUNT,NOTIFICATIONS_SUBSCRIPTION,ONLINE_PAYMENT_ALLOWED,PAID_ACCESS,PAID_ACCESS_DESCRIPTION,PAID_ACCESS_PRICE,PAID_CONTENT,PAID_CONTENT_DESCRIPTION,PAID_CONTENT_PRICE,PARTNER_PROGRAM_ALLOWED,PARTNER_PROGRAM_STATUS,PENALTY_POINTS_ALLOWED,PHONE,PHOTOS_TAB_HIDDEN,PHOTO_ID,PIC_AVATAR,PIN_NOTIFICATIONS_OFF,POSSIBLE_MEMBERS_COUNT,PREMIUM,PRIVATE,PRODUCTS_TAB_HIDDEN,PRODUCT_CREATE_ALLOWED,PRODUCT_CREATE_SUGGESTED_ALLOWED,PRODUCT_CREATE_ZERO_LIFETIME_ALLOWED,PROFILE_BUTTONS,PROMO_THEME_ALLOWED,PUBLISH_DELAYED_THEME_ALLOWED,REF,REQUEST_SENT,REQUEST_SENT_DATE,RESHARE_ALLOWED,ROLE,SCOPE_ID,SHOP_VISIBLE_ADMIN,SHOP_VISIBLE_PUBLIC,SHORTNAME,START_DATE,STATS_ALLOWED,STATUS,SUBCATEGORY_ID,SUGGEST_THEME_ALLOWED,TAGS,TRANSFERS_ALLOWED,UID,UNFOLLOW_ALLOWED,USER_PAID_ACCESS,USER_PAID_ACCESS_TILL,USER_PAID_CONTENT,USER_PAID_CONTENT_TILL,VIDEO_TAB_HIDDEN,VIEW_MEMBERS_ALLOWED,VIEW_MODERATORS_ALLOWED,VIEW_PAID_THEMES_ALLOWED,YEAR_FROM,YEAR_TOformat=jsonmethod=group.getInfouids={Query}{OK_API_Details[4]}"
                    Signature = hashlib.md5(String_to_Hash.encode()).hexdigest()
                    OK_Response = Common.Request_Handler(f"https://api.{self.Domain}/fb.do?application_key={OK_API_Details[1]}&fields=ABBREVIATION%2CACCESS_TYPE%2CADDRESS%2CADD_CHANNEL_ALLOWED%2CADD_PAID_THEME_ALLOWED%2CADD_PHOTOALBUM_ALLOWED%2CADD_THEME_ALLOWED%2CADD_VIDEO_ALLOWED%2CADMIN_ID%2CADS_MANAGER_ALLOWED%2CADVANCED_PUBLICATION_ALLOWED%2CAGE_RESTRICTED%2CBLOCKED%2CBUSINESS%2CCALL_ALLOWED%2CCATALOG_CREATE_ALLOWED%2CCATEGORY%2CCHANGE_AVATAR_ALLOWED%2CCHANGE_TYPE_ALLOWED%2CCITY%2CCOMMENT_AS_OFFICIAL%2CCOMMUNITY%2CCONTENT_AS_OFFICIAL%2CCOUNTRY%2CCOVER%2CCOVER_BUTTONS%2CCOVER_SERIES%2CCREATED_MS%2CCREATE_ADS_ALLOWED%2CDELETE_ALLOWED%2CDESCRIPTION%2CDISABLE_PHOTO_UPLOAD%2CEDIT_ALLOWED%2CEDIT_APPS_ALLOWED%2CEND_DATE%2CFEED_SUBSCRIPTION%2CFOLLOWERS_COUNT%2CFOLLOW_ALLOWED%2CFRIENDS_COUNT%2CGRADUATE_YEAR%2CGROUP_CHALLENGE_CREATE_ALLOWED%2CGROUP_JOURNAL_ALLOWED%2CGROUP_NEWS%2CHOMEPAGE_NAME%2CHOMEPAGE_URL%2CINVITATIONS_COUNT%2CINVITATION_SENT%2CINVITE_ALLOWED%2CINVITE_FREE_ALLOWED%2CJOIN_ALLOWED%2CJOIN_REQUESTS_COUNT%2CLEAVE_ALLOWED%2CLINK_CAROUSEL_ALLOWED%2CLINK_POSTING_ALLOWED%2CLOCATION_ID%2CLOCATION_LATITUDE%2CLOCATION_LONGITUDE%2CLOCATION_ZOOM%2CMAIN_PAGE_TAB%2CMAIN_PHOTO%2CMANAGE_MEMBERS%2CMANAGE_MESSAGING_ALLOWED%2CMEMBERS_COUNT%2CMEMBER_STATUS%2CMENTIONS_SUBSCRIPTION%2CMENTIONS_SUBSCRIPTION_ALLOWED%2CMESSAGES_ALLOWED%2CMESSAGING_ALLOWED%2CMESSAGING_ENABLED%2CMIN_AGE%2CMOBILE_COVER%2CNAME%2CNEW_CHATS_COUNT%2CNOTIFICATIONS_SUBSCRIPTION%2CONLINE_PAYMENT_ALLOWED%2CPAID_ACCESS%2CPAID_ACCESS_DESCRIPTION%2CPAID_ACCESS_PRICE%2CPAID_CONTENT%2CPAID_CONTENT_DESCRIPTION%2CPAID_CONTENT_PRICE%2CPARTNER_PROGRAM_ALLOWED%2CPARTNER_PROGRAM_STATUS%2CPENALTY_POINTS_ALLOWED%2CPHONE%2CPHOTOS_TAB_HIDDEN%2CPHOTO_ID%2CPIC_AVATAR%2CPIN_NOTIFICATIONS_OFF%2CPOSSIBLE_MEMBERS_COUNT%2CPREMIUM%2CPRIVATE%2CPRODUCTS_TAB_HIDDEN%2CPRODUCT_CREATE_ALLOWED%2CPRODUCT_CREATE_SUGGESTED_ALLOWED%2CPRODUCT_CREATE_ZERO_LIFETIME_ALLOWED%2CPROFILE_BUTTONS%2CPROMO_THEME_ALLOWED%2CPUBLISH_DELAYED_THEME_ALLOWED%2CREF%2CREQUEST_SENT%2CREQUEST_SENT_DATE%2CRESHARE_ALLOWED%2CROLE%2CSCOPE_ID%2CSHOP_VISIBLE_ADMIN%2CSHOP_VISIBLE_PUBLIC%2CSHORTNAME%2CSTART_DATE%2CSTATS_ALLOWED%2CSTATUS%2CSUBCATEGORY_ID%2CSUGGEST_THEME_ALLOWED%2CTAGS%2CTRANSFERS_ALLOWED%2CUID%2CUNFOLLOW_ALLOWED%2CUSER_PAID_ACCESS%2CUSER_PAID_ACCESS_TILL%2CUSER_PAID_CONTENT%2CUSER_PAID_CONTENT_TILL%2CVIDEO_TAB_HIDDEN%2CVIEW_MEMBERS_ALLOWED%2CVIEW_MODERATORS_ALLOWED%2CVIEW_PAID_THEMES_ALLOWED%2CYEAR_FROM%2CYEAR_TO&format=json&method=group.getInfo&uids={Query}&sig={Signature}&access_token={OK_API_Details[3]}")
                    JSON_Object = Common.JSON_Handler(OK_Response)
                    JSON_Response = JSON_Object.To_JSON_Loads()
                    JSON_Output_Response = JSON_Object.Dump_JSON()
                    Main_File = General.Main_File_Create(Directory, self.Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"])
                    Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, "Social Media - Group", self.Task_ID, self.Plugin_Name.lower())

                    try:

                        if "error_code" not in JSON_Response and self.Type(JSON_Response) == list:
                            OK_Item = JSON_Response[0]

                            if all(Item in OK_Item for Item in ["name", "shortname"]):
                                OK_URL = f"https://{self.Domain}/" + OK_Item["shortname"]
                                Full_Name = OK_Item["name"]
                                Title = f"OK Group | {Full_Name}"

                                if OK_URL not in Cached_Data and OK_URL not in Data_to_Cache:
                                    OK_Item_Responses = Common.Request_Handler(OK_URL, Filter=True, Host=f"https://{self.Domain}")
                                    OK_Item_Response = OK_Item_Responses["Filtered"]
                                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, OK_Item_Response, OK_URL, self.The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output([Main_File, Output_file], OK_URL, Title, self.Plugin_Name.lower())
                                        Data_to_Cache.append(OK_URL)

                                    else:
                                        logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                        else:
                            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided, the group ID provided possibly doesn't exist.")

                    except Exception as e:
                        logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")

                else:
                    logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid self.Type supplied.")

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
예제 #21
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        try:
            DNS_Info = checkdmarc.check_domains(Query_List)

            if len(Query_List) > 1:

                for DNS_Item in DNS_Info:
                    Query = DNS_Item['base_domain']
                    Output_Dict = json.dumps(DNS_Item, indent=4, sort_keys=True)
                    Link = "https://www." + Query
                    Title = "DNS Information for " + DNS_Item['base_domain']

                    if Link not in Data_to_Cache and Link not in Cached_Data:
                        Responses = General.Request_Handler(Link, Filter=True, Host=f"https://www.{Query}")
                        Response = Responses["Filtered"]
                        Main_File = General.Main_File_Create(Directory, Plugin_Name, Output_Dict, Query, The_File_Extensions["Main"])
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Response, Title, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections = General.Connections(Query, Plugin_Name, Query, "Domain Information", Task_ID, Concat_Plugin_Name)
                            Output_Connections.Output([Main_File, Output_file], Link, Title, Concat_Plugin_Name)
                            Data_to_Cache.append(Link)

                        else:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

            else:
                Query = DNS_Info['base_domain']
                Output_Dict = json.dumps(DNS_Info, indent=4, sort_keys=True)
                Link = "https://www." + Query
                Title = "DNS Information for " + Query

                if Link not in Data_to_Cache and Link not in Cached_Data:
                    Responses = General.Request_Handler(Link, Filter=True, Host=f"https://www.{Query}")
                    Response = Responses["Filtered"]
                    Main_File = General.Main_File_Create(Directory, Plugin_Name, Output_Dict, Query, The_File_Extensions["Main"])
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Response, Title, The_File_Extensions["Query"])

                    if Output_file:
                        Output_Connections = General.Connections(Query, Plugin_Name, Query, "Domain Information", Task_ID, Concat_Plugin_Name)
                        Output_Connections.Output([Main_File, Output_file], Link, Title, Concat_Plugin_Name)
                        Data_to_Cache.append(Link)

                    else:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

        except:
            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Error retrieving DNS details.")

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #22
0
def Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            try:

                if Type == "ABN":
                    Main_URL = 'https://abr.business.gov.au/ABN/View?id=' + Query
                    Response = requests.get(Main_URL).text

                    try:

                        if 'Error searching ABN Lookup' not in Response:
                            Query = str(int(Query))

                            if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Response, General.Get_Title(Main_URL), The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections = General.Connections(Query, Plugin_Name, "abr.business.gov.au", "Company Details", Task_ID, Plugin_Name)
                                    Output_Connections.Output([Output_file], Main_URL, General.Get_Title(Main_URL).strip(" | ABN Lookup"), Concat_Plugin_Name)
                                    Data_to_Cache.append(Main_URL)

                                else:
                                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                        else:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - ABN Lookup returned error.")

                    except:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for ABN Search.")

                elif Type == "ACN":
                    Main_URL = 'https://abr.business.gov.au/Search/Run'
                    Data = {'SearchParameters.SearchText': Query, 'SearchParameters.AllNames': 'true', 'ctl00%24ContentPagePlaceholder%24SearchBox%24MainSearchButton': 'Search'}
                    Response = requests.post(Main_URL, data=Data).text
                    Limit = General.Get_Limit(kwargs)

                    try:
                        ACN_Regex = re.search(r".*[a-zA-Z].*", Query)

                        if ACN_Regex:
                            Main_File = General.Main_File_Create(Directory, Plugin_Name, Response, Query, The_File_Extensions["Main"])
                            Current_Step = 0
                            ABNs_Regex = re.findall(r"\<input\sid\=\"Results\_NameItems\_\d+\_\_Compressed\"\sname\=\"Results\.NameItems\[\d+\]\.Compressed\"\stype\=\"hidden\"\svalue\=\"(\d{11})\,\d{2}\s\d{3}\s\d{3}\s\d{3}\,0000000001\,Active\,active\,([\d\w\s\&\-\_\.]+)\,Current\,", Response)

                            if ABNs_Regex:
                                Output_Connections = General.Connections(Query, Plugin_Name, "abr.business.gov.au", "Company Details", Task_ID, Plugin_Name)

                                for ABN_URL, ACN in ABNs_Regex:
                                    Full_ABN_URL = 'https://abr.business.gov.au/ABN/View?abn=' + ABN_URL

                                    if Full_ABN_URL not in Cached_Data and Full_ABN_URL not in Data_to_Cache and Current_Step < int(Limit):
                                        ACN = ACN.rstrip()
                                        Current_Response = requests.get(Full_ABN_URL).text
                                        print(Full_ABN_URL)
                                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, str(Current_Response), ACN.replace(' ', '-'), The_File_Extensions["Query"])

                                        if Output_file:
                                            Output_Connections.Output([Main_File, Output_file], Full_ABN_URL, General.Get_Title(Full_ABN_URL).strip(" | ABN Lookup"), Concat_Plugin_Name)
                                            Data_to_Cache.append(Full_ABN_URL)

                                        else:
                                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                                        Current_Step += 1

                            else:
                                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Response did not match regular expression.")

                        else:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Query did not match regular expression.")

                    except:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided for ACN Search.")

                else:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Invalid request type.")

            except:
                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make request.")

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #23
0
def Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            if Type == "User":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = instagram_explore.user(Query)
                CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                      indent=4,
                                                      sort_keys=True)
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])
                Posts = CSE_Response[0]["edge_owner_to_timeline_media"][
                    "edges"]
                Output_Connections = General.Connections(
                    Query, Local_Plugin_Name, Domain, "Social Media - Person",
                    Task_ID, Local_Plugin_Name.lower())
                Current_Step = 0

                for Post in Posts:
                    Shortcode = Post["node"]["shortcode"]
                    URL = f"https://www.{Domain}/p/{Shortcode}/"
                    Title = "IG | " + General.Get_Title(URL)

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Responses = General.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Response = Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name, Response,
                            Shortcode, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            elif Type == "Tag":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = instagram_explore.tag(Query)
                CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                      indent=4,
                                                      sort_keys=True)
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])
                Posts = CSE_Response[0]["edge_hashtag_to_media"]["edges"]
                Output_Connections = General.Connections(
                    Query, Local_Plugin_Name, Domain, "Social Media - Person",
                    Task_ID, Local_Plugin_Name.lower())
                Current_Step = 0

                for Post in Posts:
                    Shortcode = Post["node"]["shortcode"]
                    URL = f"https://www.{Domain}/p/{Shortcode}/"
                    Title = "IG | " + General.Get_Title(URL)

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Responses = General.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Response = Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name, Response,
                            Shortcode, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            elif Type == "Location":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = location(Query)
                CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                      indent=4,
                                                      sort_keys=True)
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])
                Posts = CSE_Response[0]["edge_location_to_media"]["edges"]
                Output_Connections = General.Connections(
                    Query, Local_Plugin_Name, Domain, "Social Media - Place",
                    Task_ID, Local_Plugin_Name.lower())
                Current_Step = 0

                for Post in Posts:
                    Shortcode = Post["node"]["shortcode"]
                    URL = f"https://www.{Domain}/p/{Shortcode}/"
                    Title = "IG | " + General.Get_Title(URL)

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Responses = General.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Response = Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name, Response,
                            Shortcode, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            elif Type == "Media":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = instagram_explore.media(Query)

                if CSE_Response:
                    CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                                          indent=4,
                                                          sort_keys=True)
                    Main_File = General.Main_File_Create(
                        Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                        Query, The_File_Extensions["Main"])
                    URL = f"https://www.{Domain}/p/{Query}/"
                    Title = "IG | " + General.Get_Title(URL)

                    if URL not in Cached_Data and URL not in Data_to_Cache:
                        Responses = General.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Response = Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name, Response,
                            Shortcode, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, Domain,
                                "Social Media - Media", Task_ID,
                                Local_Plugin_Name.lower())
                            Output_Connections.Output([Main_File, Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Invalid response."
                    )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Invalid type provided."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #24
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:

                try:

                    if self.Type == "CBN":
                        Main_API_URL = f'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B{Query}%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc'
                        Response = Common.Request_Handler(Main_API_URL)
                        JSON_Object = Common.JSON_Handler(Response)
                        JSON_Response = JSON_Object.To_JSON_Loads()
                        Indented_JSON_Response = JSON_Object.Dump_JSON()
                        Main_Output_File = General.Main_File_Create(
                            Directory, self.Plugin_Name,
                            Indented_JSON_Response, Query,
                            self.The_File_Extensions["Main"])

                        try:

                            if JSON_Response['count'] != 0:
                                Query = str(int(Query))
                                Main_URL = f'https://{self.Domain}/search/results?search=%7B{Query}%7D&status=Active'
                                Responses = Common.Request_Handler(
                                    Main_URL,
                                    Filter=True,
                                    Host=f"https://{self.Domain}")
                                Response = Responses["Filtered"]

                                if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, self.Plugin_Name,
                                        Response, General.Get_Title(Main_URL),
                                        self.The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections = General.Connections(
                                            Query, self.Plugin_Name,
                                            self.Domain.strip("beta."),
                                            self.Result_Type, self.Task_ID,
                                            self.Plugin_Name)
                                        Output_Connections.Output(
                                            [Main_Output_File, Output_file],
                                            Main_URL,
                                            f"Canadian Business Number {Query}",
                                            self.Concat_Plugin_Name)
                                        Data_to_Cache.append(Main_URL)

                                    else:
                                        logging.warning(
                                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                        )

                        except:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for CBN Search."
                            )

                    elif self.Type == "CCN":
                        Total_Results = 0
                        Iterator = "page=0"

                        while (self.Limit >
                               Total_Results) and Iterator is not None:
                            Main_URL = 'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B' + urllib.parse.quote(
                                Query
                            ) + f'%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc&{Iterator}'
                            Response = Common.Request_Handler(Main_URL)
                            JSON_Object = Common.JSON_Handler(Response)
                            JSON_Response = JSON_Object.To_JSON_Loads()
                            Total_Results += len(JSON_Response["docs"])

                            if "paging" in JSON_Response and "next" in JSON_Response.get(
                                    "paging"):
                                Iterator = JSON_Response["paging"]["next"]

                            else:
                                Iterator = None

                            Indented_JSON_Response = JSON_Object.Dump_JSON()

                            try:
                                Main_File = General.Main_File_Create(
                                    Directory, self.Plugin_Name,
                                    Indented_JSON_Response, Query,
                                    self.The_File_Extensions["Main"])
                                Current_Step = 0
                                Output_Connections = General.Connections(
                                    Query, self.Plugin_Name,
                                    self.Domain.strip("beta."),
                                    self.Result_Type, self.Task_ID,
                                    self.Plugin_Name)

                                for JSON_Item in JSON_Response['docs']:

                                    if JSON_Item.get('BN'):
                                        CCN = JSON_Item['Company_Name']
                                        CBN = str(int(JSON_Item['BN']))

                                        Full_CCN_URL = f'https://{self.Domain}/search/results?search=%7B{CBN}%7D&status=Active'

                                        if Full_CCN_URL not in Cached_Data and Full_CCN_URL not in Data_to_Cache and Current_Step < int(
                                                self.Limit):
                                            Current_Responses = Common.Request_Handler(
                                                Full_CCN_URL,
                                                Filter=True,
                                                Host=f"https://{self.Domain}")
                                            Current_Response = Current_Responses[
                                                "Filtered"]
                                            Output_file = General.Create_Query_Results_Output_File(
                                                Directory, Query,
                                                self.Plugin_Name,
                                                str(Current_Response),
                                                CCN.replace(' ', '-'), self.
                                                The_File_Extensions["Query"])

                                            if Output_file:
                                                Output_Connections.Output(
                                                    [Main_File, Output_file],
                                                    Full_CCN_URL,
                                                    f"Canadian Business Number {CBN} for Query {Query}",
                                                    self.Concat_Plugin_Name)
                                                Data_to_Cache.append(
                                                    Full_CCN_URL)

                                            else:
                                                logging.warning(
                                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                                )

                                            Current_Step += 1

                                    else:
                                        logging.warning(
                                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Unable to retrieve business numbers from the JSON response."
                                        )

                            except:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for CCN Search."
                                )

                    else:
                        logging.warning(
                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid request type."
                        )

                except:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make request."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
예제 #25
0
    def All_Extensions(self):

        try:
            Local_Plugin_Name = self.Plugin_Name + "-All-Extensions"
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, Local_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            self.Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)
            logging.info(
                f"{General.Date()} {__name__.strip('plugins.')} - All Extensions Selected."
            )
            self.Query_List = General.Convert_to_List(self.Query_List)

            for Query in self.Query_List:
                URL_Regex = General.Regex_Checker(Query, "URL")

                if URL_Regex:
                    self.URL_Prefix = URL_Regex.group(1)
                    self.URL_Body = URL_Regex.group(3)

                    if URL_Regex.group(5) and URL_Regex.group(6):
                        self.URL_Extension = URL_Regex.group(
                            4) + URL_Regex.group(5) + URL_Regex.group(6)

                    elif URL_Regex.group(5):
                        self.URL_Extension = URL_Regex.group(
                            4) + URL_Regex.group(5)

                    else:
                        self.URL_Extension = URL_Regex.group(4)

                else:
                    logging.warning(
                        f"{General.Date()} {__name__.strip('plugins.')} - Please provide valid URLs."
                    )

                Pool = mpool.ThreadPool(
                    int(multiprocessing.cpu_count()) *
                    int(multiprocessing.cpu_count()))
                Pool_Threads = []

                for Extension in self.Generic_Extensions:

                    for suffix in self.Global_Domain_Suffixes:
                        suffix = suffix.replace(".com", "")
                        suffix = suffix.replace(".co", "")

                        if not self.URL_Extension == suffix:
                            Thread = Pool.apply_async(self.Query_URL,
                                                      args=(
                                                          self.URL_Body,
                                                          Extension + suffix,
                                                      ))
                            Pool_Threads.append(Thread)

                [Pool_Thread.wait() for Pool_Thread in Pool_Threads]
                URL_Domain = self.URL_Body + self.URL_Extension
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name,
                    "\n".join(self.Valid_Results), self.URL_Body,
                    self.The_File_Extensions["Main"])

                if Main_File:

                    for Host in self.Valid_Hosts:
                        Current_Domain = Host[0].strip('https://').strip(
                            'http://')

                        try:
                            Current_Responses = General.Request_Handler(
                                Host[0],
                                Filter=True,
                                Host=Host[0],
                                Risky_Plugin=True)
                            Current_Response = Current_Responses["Filtered"]
                            Output_File = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name,
                                Current_Response, Current_Domain,
                                self.The_File_Extensions["Query"])

                            if Output_File:
                                Output_File_List = [Main_File, Output_File]
                                Output_Connections = General.Connections(
                                    Query, Local_Plugin_Name, Current_Domain,
                                    "Domain Spoof", self.Task_ID,
                                    Local_Plugin_Name.lower())
                                Output_Connections.Output(
                                    Output_File_List,
                                    Host[0],
                                    f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}",
                                    Directory_Plugin_Name=self.
                                    Concat_Plugin_Name)

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                        except requests.exceptions.ConnectionError:
                            Output_File_List = [Main_File]
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, Current_Domain,
                                "Domain Spoof", self.Task_ID,
                                Local_Plugin_Name.lower())
                            Output_Connections.Output(
                                Output_File_List,
                                Host[0],
                                f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}",
                                Directory_Plugin_Name=self.Concat_Plugin_Name)

                if self.Data_to_Cache:

                    if self.Cached_Data:
                        General.Write_Cache(Directory, self.Data_to_Cache,
                                            Local_Plugin_Name, "a")

                    else:
                        General.Write_Cache(Directory, self.Data_to_Cache,
                                            Local_Plugin_Name, "w")

        except Exception as e:
            logging.warning(
                f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #26
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            headers = {
                'User-Agent':
                'Mozilla/5.0 (Windows NT 6.0; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0'
            }
            Response = requests.get('https://tpbc.herokuapp.com/search/' +
                                    Query.replace(" ", "+") +
                                    '/?sort=seeds_desc',
                                    headers=headers).text
            Response = json.loads(Response)
            JSON_Response = json.dumps(Response, indent=4, sort_keys=True)
            Output_file = General.Main_File_Create(Directory, Plugin_Name,
                                                   JSON_Response, Query,
                                                   The_File_Extension)

            if Output_file:
                Current_Step = 0
                Output_Connections = General.Connections(
                    Query, Plugin_Name, "thepiratebay.org", "Torrent", Task_ID,
                    Plugin_Name.lower())

                for Search_Result in Response:
                    Result_Title = Search_Result["title"]
                    Result_URL = Search_Result["magnet"]

                    if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        #Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, JSON_Response, Result_Title, The_File_Extension)

                        if Output_file:
                            Output_Connections.Output(
                                [Output_file], Result_URL,
                                General.Get_Title(Result_URL),
                                Plugin_Name.lower())
                            Data_to_Cache.append(Result_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
예제 #27
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                Response = Common.Request_Handler(
                    'https://tpbc.herokuapp.com/search/' +
                    Query.replace(" ", "+") + '/?sort=seeds_desc')
                JSON_Object = Common.JSON_Handler(Response)
                Response = JSON_Object.To_JSON_Loads()
                JSON_Response = JSON_Object.Dump_JSON()
                Output_file = General.Main_File_Create(Directory,
                                                       self.Plugin_Name,
                                                       JSON_Response, Query,
                                                       self.The_File_Extension)

                if Output_file:
                    Current_Step = 0
                    Output_Connections = General.Connections(
                        Query, self.Plugin_Name, self.Domain, self.Result_Type,
                        self.Task_ID, self.Plugin_Name.lower())

                    for Search_Result in Response:
                        Result_Title = Search_Result["title"]
                        Result_URL = Search_Result["magnet"]

                        if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(
                                self.Limit):
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, self.Plugin_Name,
                                JSON_Response, Result_Title,
                                self.The_File_Extension)

                            if Output_file:
                                Output_Connections.Output(
                                    [Output_file], Result_URL,
                                    General.Get_Title(Result_URL),
                                    self.Plugin_Name.lower())
                                Data_to_Cache.append(Result_URL)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                )

                            Current_Step += 1

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
예제 #28
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Google_Details = Load_Configuration()
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        Service = build("customsearch",
                        Google_Details[3],
                        developerKey=Google_Details[1])
        CSE_Response = Service.cse().list(q=Query,
                                          cx=Google_Details[0],
                                          num=Limit).execute()
        CSE_JSON_Output_Response = json.dumps(CSE_Response,
                                              indent=4,
                                              sort_keys=True)
        CSE_JSON_Response = json.loads(CSE_JSON_Output_Response)

        General.Main_File_Create(Directory, Plugin_Name,
                                 CSE_JSON_Output_Response, Query, ".json")

        for JSON_Response_Items in CSE_JSON_Response['items']:

            try:
                Google_Item = JSON_Response_Items['pagemap']['metatags']

                for Google_Item_Line in Google_Item:
                    Google_Item_URL = Google_Item_Line['og:url']

                    if Google_Item_URL not in Cached_Data and Google_Item_URL not in Data_to_Cache:
                        Path_Regex = re.search(
                            r"https?\:\/\/(www\.)?[\w\d\.]+\.\w{2,3}(\.\w{2,3})?(\.\w{2,3})?\/([\w\d\-\_\/]+)?",
                            Google_Item_URL)

                        if Path_Regex:
                            headers = {
                                'Content-Type': 'application/json',
                                'User-Agent':
                                'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
                                'Accept':
                                'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                                'Accept-Language': 'en-US,en;q=0.5'
                            }
                            Google_Item_Response = requests.get(
                                Google_Item_URL, headers=headers).text
                            Output_Path = Path_Regex.group(4).replace("/", "-")
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name,
                                Google_Item_Response, Output_Path,
                                The_File_Extension)

                            if Output_file:
                                General.Connections(
                                    Output_file, Query, Plugin_Name,
                                    Google_Item_URL, "google.com",
                                    "Domain Spoof", Task_ID,
                                    General.Get_Title(Google_Item_URL),
                                    Plugin_Name.lower())

                        Data_to_Cache.append(Google_Item_URL)

            except Exception as e:
                logging.info(
                    str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
                    + str(e))

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
예제 #29
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Location = General.Load_Location_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            try:
                Request_Query = urllib.parse.quote(Query)
                Main_URL = f"http://{Domain}/search?term={Request_Query}&country={Location}&entity=software&limit={str(Limit)}"
                Response = General.Request_Handler(Main_URL)

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make request, are you connected to the internet?"
                )
                break

            JSON_Response = json.loads(Response)
            Main_File = General.Main_File_Create(
                Directory, "iTunes",
                json.dumps(JSON_Response, indent=4, sort_keys=True), Query,
                The_File_Extensions["Main"])

            if 'resultCount' in JSON_Response:

                if JSON_Response['resultCount'] > 0:
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain, "Application", Task_ID,
                        Concat_Plugin_Name)

                    for JSON_Object in JSON_Response['results']:
                        JSON_Object_Responses = General.Request_Handler(
                            JSON_Object['artistViewUrl'],
                            Filter=True,
                            Host=f"https://{Domain}")
                        JSON_Object_Response = JSON_Object_Responses[
                            "Filtered"]

                        if JSON_Object[
                                'artistViewUrl'] not in Cached_Data and JSON_Object[
                                    'artistViewUrl'] not in Data_to_Cache:
                            iTunes_Regex = re.search(
                                r"https\:\/\/apps\.apple\.com\/" +
                                rf"{Location}" +
                                r"\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?.+",
                                JSON_Object['artistViewUrl'])

                            if iTunes_Regex:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query,
                                    Plugin_Name, JSON_Object_Response,
                                    iTunes_Regex.group(1),
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file],
                                        JSON_Object['artistViewUrl'],
                                        General.Get_Title(
                                            JSON_Object['artistViewUrl']),
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(
                                        JSON_Object['artistViewUrl'])

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Invalid value provided, value not greater than 0."
                    )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Invalid value."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:

            if Type == "CIK":
                Main_URL = 'https://www.sec.gov/cgi-bin/browse-edgar?action=getcompany&CIK=' + Query + '&owner=exclude&count=40&hidefilings=0'
                Response = requests.get(Main_URL).text

                try:

                    if 'No matching CIK.' not in Response:
                        Query = str(int(Query))

                        if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name, Response,
                                General.Get_Title(Main_URL),
                                The_File_Extension)

                            if Output_file:
                                General.Connections(
                                    Output_file, Query, Plugin_Name, Main_URL,
                                    "sec.gov", "Data Leakage", Task_ID,
                                    General.Get_Title(Main_URL), Plugin_Name)
                                Data_to_Cache.append(Main_URL)

                except:
                    logging.warning(
                        General.Date() + " - " + __name__.strip('plugins.') +
                        " - Invalid query provided for CIK Search.")

            elif Type == "ACN":
                Main_URL = 'https://www.sec.gov/cgi-bin/browse-edgar?company=' + Query + '&owner=exclude&action=getcompany'
                Response = requests.get(Main_URL).text

                if kwargs.get('Limit'):

                    if int(kwargs["Limit"]) > 0:
                        Limit = kwargs["Limit"]

                else:
                    Limit = 10

                try:
                    ACN = re.search(r".*[a-zA-Z].*", Query)

                    if ACN:
                        General.Main_File_Create(Directory, Plugin_Name,
                                                 Response, Query,
                                                 The_File_Extension)
                        Current_Step = 0
                        CIKs_Regex = re.findall(
                            r"(\d{10})\<\/a\>\<\/td\>\s+\<td\sscope\=\"row\"\>(.*\S.*)\<\/td\>",
                            Response)

                        if CIKs_Regex:
                            Output_Connections = General.Connections(
                                Query, Plugin_Name, "sec.gov", "Data Leakage",
                                Task_ID, Plugin_Name)

                            for CIK_URL, ACN in CIKs_Regex:
                                Full_CIK_URL = 'https://www.sec.gov/cgi-bin/browse-edgar?action=getcompany&CIK=' + CIK_URL + '&owner=exclude&count=40&hidefilings=0'

                                if Full_CIK_URL not in Cached_Data and Full_CIK_URL not in Data_to_Cache and Current_Step < int(
                                        Limit):
                                    Current_Response = requests.get(
                                        Full_CIK_URL).text
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, Plugin_Name,
                                        str(Current_Response),
                                        ACN.replace(' ', '-'),
                                        The_File_Extension)

                                    if Output_file:
                                        Output_Connections.Output(
                                            Output_file, Full_CIK_URL,
                                            General.Get_Title(Full_CIK_URL))
                                        Data_to_Cache.append(Full_CIK_URL)
                                        Current_Step += 1

                        else:
                            logging.warning(
                                General.Date() + " - " +
                                __name__.strip('plugins.') +
                                " - Response did not match regular expression."
                            )

                    else:
                        logging.warning(
                            General.Date() + " - " +
                            __name__.strip('plugins.') +
                            " - Query did not match regular expression.")

                except:
                    logging.warning(
                        General.Date() + " - " + __name__.strip('plugins.') +
                        " - Invalid query provided for ACN Search.")

            else:
                logging.warning(General.Date() + " - " +
                                __name__.strip('plugins.') +
                                " - Invalid request type.")

        except:
            logging.warning(General.Date() + " - " +
                            __name__.strip('plugins.') +
                            " - Failed to make request.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")