Example #1
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if General.Regex_Checker(Query, "IP"):
                API_Key = Load_Configuration()
                Search_Response = General.Request_Handler(
                    f"http://api.{Domain}/{Query}?access_key={API_Key}")
                JSON_Response = json.loads(Search_Response)
                JSON_Output_Response = json.dumps(JSON_Response,
                                                  indent=4,
                                                  sort_keys=True)
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "IP Address Information",
                    Task_ID, Plugin_Name.lower())

                if Query not in Cached_Data and Query not in Data_to_Cache:
                    Result_URL = f"https://{Domain}/?{Query}"
                    Title = f"IP Stack | {Query}"
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, JSON_Output_Response,
                        Title, The_File_Extension)

                    if Output_file:
                        Output_Connections.Output([Output_file], Result_URL,
                                                  Title, Plugin_Name.lower())
                        Data_to_Cache.append(Result_URL)

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #2
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if General.Regex_Checker(Query, "Domain"):
                Request = f"https://{Domain}/?q={Query}"
                Responses = General.Request_Handler(Request,
                                                    Accept_XML=True,
                                                    Accept_Language_EN_US=True,
                                                    Filter=True,
                                                    Host=f"https://{Domain}")
                Response = Responses["Regular"]
                Filtered_Response = Responses["Filtered"]

                if "<TD class=\"outer\"><I>None found</I></TD>" not in Response:

                    if Request not in Cached_Data and Request not in Data_to_Cache:

                        try:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query,
                                Plugin_Name.lower(), Filtered_Response,
                                SSLMate_Regex.group(1), The_File_Extension)

                            if Output_file:
                                Output_Connections = General.Connections(
                                    Query, Plugin_Name, Domain, "Certificate",
                                    Task_ID, Plugin_Name.lower())
                                Output_Connections.Output(
                                    [Output_file], Request,
                                    f"Subdomain Certificate Search for {Query}",
                                    Plugin_Name.lower())
                                Data_to_Cache.append(Request)

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                        except:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create file."
                            )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Query does not exist."
                    )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #3
0
def Search(Query_List, Task_ID, Type):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if Type == "Email":

                if General.Regex_Checker(Query, Type):
                    Local_Plugin_Name = Plugin_Name + "-" + Type
                    URL = f"https://www.threatcrowd.org/searchApi/v2/email/report/?email={Query}"
                    Response = General.Request_Handler(URL)
                    JSON_Response = json.loads(Response)

                    if int(JSON_Response.get("response_code")) != 0:
                        JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)
                        Permalink = JSON_Response.get("permalink")
                        Permalink_Responses = General.Request_Handler(URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}")
                        Permalink_Response = Permalink_Responses["Filtered"]
                        Title = "Threat Crowd | " + General.Get_Title_Requests_Module(Permalink).replace(" | Threatcrowd.org Open Source Threat Intelligence", "")
                        Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Permalink_Response, Query, The_File_Extensions["Query"])
                        Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Account", Task_ID, Local_Plugin_Name.lower())

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file], Permalink, Title, Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                    else:
                        logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Provided query returned no results.")

                else:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match query to email regular expression.")

            elif Type == "Domain":

                if General.Regex_Checker(Query, Type):
                    Local_Plugin_Name = Plugin_Name + "-" + Type
                    URL = f"https://www.threatcrowd.org/searchApi/v2/domain/report/?domain={Query}"
                    Response = General.Request_Handler(URL)
                    JSON_Response = json.loads(Response)

                    if int(JSON_Response.get("response_code")) != 0:
                        JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)
                        Permalink = JSON_Response.get("permalink")
                        Permalink_Responses = General.Request_Handler(URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}")
                        Permalink_Response = Permalink_Responses["Filtered"]
                        Title = "Threat Crowd | " + General.Get_Title_Requests_Module(Permalink).replace(" | Threatcrowd.org Open Source Threat Intelligence", "")
                        Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Permalink_Response, Query, The_File_Extensions["Query"])
                        Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Domain Information", Task_ID, Local_Plugin_Name.lower())
                        
                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file], Permalink, Title, Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                    else:
                        logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Provided query returned no results.")

                else:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match query to domain regular expression.")

            elif Type == "IP Address":

                if General.Regex_Checker(Query, "IP"):
                    Local_Plugin_Name = Plugin_Name + "-" + Type
                    URL = f"https://www.threatcrowd.org/searchApi/v2/ip/report/?ip={Query}"
                    Response = General.Request_Handler(URL)
                    JSON_Response = json.loads(Response)

                    if int(JSON_Response.get("response_code")) != 0:
                        JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)
                        Permalink = JSON_Response.get("permalink")
                        Permalink_Responses = General.Request_Handler(URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}")
                        Permalink_Response = Permalink_Responses["Filtered"]
                        Title = "Threat Crowd | " + General.Get_Title_Requests_Module(Permalink).replace(" | Threatcrowd.org Open Source Threat Intelligence", "")
                        Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Permalink_Response, Query, The_File_Extensions["Query"])
                        Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Domain Information", Task_ID, Local_Plugin_Name.lower())
                        
                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file], Permalink, Title, Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                    else:
                        logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Provided query returned no results.")

                else:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match query to IP address regular expression.")

            elif Type == "AV":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                URL = f"https://www.threatcrowd.org/searchApi/v2/antivirus/report/?antivirus={Query}"
                Response = General.Request_Handler(URL)
                JSON_Response = json.loads(Response)

                if int(JSON_Response.get("response_code")) != 0:
                    JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)
                    Permalink = JSON_Response.get("permalink")
                    Permalink_Responses = General.Request_Handler(URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}")
                    Permalink_Response = Permalink_Responses["Filtered"]
                    Title = "Threat Crowd | " + General.Get_Title_Requests_Module(Permalink).replace(" | Threatcrowd.org Open Source Threat Intelligence", "")
                    Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Permalink_Response, Query, The_File_Extensions["Query"])
                    Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Virus", Task_ID, Local_Plugin_Name.lower())
                    
                    if Output_file:
                        Output_Connections.Output([Main_File, Output_file], Permalink, Title, Plugin_Name.lower())
                        Data_to_Cache.append(URL)

                    else:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                else:
                    logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Provided query returned no results.")

            elif Type == "Virus Report":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                URL = f"https://www.threatcrowd.org/searchApi/v2/file/report/?resource={Query}"
                Response = General.Request_Handler(URL)
                JSON_Response = json.loads(Response)

                if int(JSON_Response.get("response_code")) != 0:
                    JSON_Output_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)
                    Permalink = JSON_Response.get("permalink")
                    Permalink_Responses = General.Request_Handler(URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{Domain}")
                    Permalink_Response = Permalink_Responses["Filtered"]
                    Title = "Threat Crowd | " + General.Get_Title_Requests_Module(Permalink).replace(" | Threatcrowd.org Open Source Threat Intelligence", "")
                    Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Permalink_Response, Query, The_File_Extensions["Query"])
                    Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Virus Report", Task_ID, Local_Plugin_Name.lower())
                    
                    if Output_file:
                        Output_Connections.Output([Main_File, Output_file], Permalink, Title, Plugin_Name.lower())
                        Data_to_Cache.append(URL)

                    else:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                else:
                    logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Provided query returned no results.")

            else:
                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Invalid type provided.")

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #4
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Subdomains = Load_Configuration()
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if Subdomains:
                Request = f'https://api.certspotter.com/v1/issuances?domain={Query}&include_subdomains=true&expand=dns_names&expand=issuer&expand=cert'

            else:
                Request = f'https://api.certspotter.com/v1/issuances?domain={Query}&expand=dns_names&expand=issuer&expand=cert'

            Response = General.Request_Handler(Request)
            JSON_Response = json.loads(Response)

            if 'exists' not in JSON_Response:

                if JSON_Response:

                    if Request not in Cached_Data and Request not in Data_to_Cache:

                        try:

                            if General.Regex_Checker(Query, "Domain"):
                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name.lower(), json.dumps(JSON_Response, indent=4, sort_keys=True), SSLMate_Regex.group(1), The_File_Extension)

                                if Output_file:
                                    Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Certificate", Task_ID, Plugin_Name.lower())
                                    Data_to_Cache.append(Request)

                                    if Subdomains:
                                        Output_Connections.Output([Output_file], Request, f"Subdomain Certificate Search for {Query}", Plugin_Name.lower())

                                    else:
                                        Output_Connections.Output([Output_file], Request, f"Domain Certificate Search for {Query}", Plugin_Name.lower())

                                else:
                                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                            else:
                                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression.")

                        except:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create file.")

                else:
                    logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - No response.")

            else:
                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Query does not exist.")

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #5
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:
            URL_Regex = General.Regex_Checker(Query, "URL")

            if URL_Regex:
                BW_Info = builtwith(Query)

                if BW_Info:
                    BW_JSON_Output = json.dumps(BW_Info,
                                                indent=4,
                                                sort_keys=True)
                    URL_Body = URL_Regex.group(3)

                    if URL_Regex.group(5) and URL_Regex.group(6):
                        URL_Extension = URL_Regex.group(4) + URL_Regex.group(
                            5) + URL_Regex.group(6)

                    elif URL_Regex.group(5):
                        URL_Extension = URL_Regex.group(4) + URL_Regex.group(5)

                    else:
                        URL_Extension = URL_Regex.group(4)

                    Query_Domain = URL_Body + URL_Extension
                    Title = f"Built With | {Query_Domain}"
                    Main_File = General.Main_File_Create(
                        Directory, Plugin_Name, BW_JSON_Output, Query_Domain,
                        The_File_Extensions["Main"])
                    BW_Search_URL = f"https://{Domain}/{Query_Domain}"
                    Responses = General.Request_Handler(
                        BW_Search_URL, Filter=True, Host=f"https://{Domain}")
                    Response = Responses["Filtered"]
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain,
                        "Web Application Architecture", Task_ID,
                        Plugin_Name.lower())

                    if BW_Search_URL not in Cached_Data and BW_Search_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Response, Query,
                            The_File_Extensions['Query'])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      BW_Search_URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(BW_Search_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                else:
                    logging.info(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to get result for provided query."
                    )

            else:
                logging.info(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Invalid query provided."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #6
0
    def Character_Switch(self, Alphabets, Comprehensive_Search):

        try:
            Local_Plugin_Name = self.Plugin_Name + "-Character-Switch"
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, Local_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            self.Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)
            logging.info(
                f"{General.Date()} {__name__.strip('plugins.')} - Character Switching Selected."
            )
            self.Query_List = General.Convert_to_List(self.Query_List)

            for Query in self.Query_List:
                URL_Regex = General.Regex_Checker(Query, "URL")

                if URL_Regex:
                    self.URL_Prefix = URL_Regex.group(1)
                    self.URL_Body = URL_Regex.group(3)

                    if URL_Regex.group(5) and URL_Regex.group(6):
                        self.URL_Extension = URL_Regex.group(
                            4) + URL_Regex.group(5) + URL_Regex.group(6)

                    elif URL_Regex.group(5):
                        self.URL_Extension = URL_Regex.group(
                            4) + URL_Regex.group(5)

                    else:
                        self.URL_Extension = URL_Regex.group(4)

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Please provide valid URLs."
                    )

                logging.info(
                    f'{General.Date()} - Provided domain body - {self.URL_Body}'
                )
                URL_List = list(self.URL_Body.lower())
                Local_Plugin_Name = f"{Local_Plugin_Name}-{Alphabets}"

                if Alphabets == "Latin":

                    if not Comprehensive_Search:

                        if len(self.URL_Body) > 15:
                            logging.error(
                                f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Condensed punycode domain fuzzing only allows a maximum of 15 characters."
                            )
                            return None

                        else:
                            Altered_URLs = Rotor.Search(
                                URL_List,
                                English_Upper=False,
                                Numbers=False,
                                Special_Characters=False,
                                Asian=False,
                                Latin=True,
                                Middle_Eastern=False,
                                Native_American=False,
                                North_African=False,
                                Latin_Alternatives=True,
                                Comprehensive=False)

                    else:

                        if len(self.URL_Body) > 10:
                            logging.error(
                                f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 10 characters in length. Comprehensive punycode domain fuzzing searching only allows a maximum of 10 characters."
                            )
                            return None

                        else:
                            Altered_URLs = Rotor.Search(
                                URL_List,
                                English_Upper=False,
                                Numbers=False,
                                Special_Characters=False,
                                Asian=False,
                                Latin=True,
                                Middle_Eastern=False,
                                Native_American=False,
                                North_African=False,
                                Latin_Alternatives=True,
                                Comprehensive=True)

                elif Alphabets == "Asian":

                    if len(self.URL_Body) > 10:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Punycode domain fuzzing for Asian alphabets only allows a maximum of 10 characters."
                        )
                        return None

                    else:
                        Altered_URLs = Rotor.Search(URL_List,
                                                    English_Upper=False,
                                                    Numbers=False,
                                                    Special_Characters=False,
                                                    Asian=True,
                                                    Latin=False,
                                                    Middle_Eastern=False,
                                                    Native_American=False,
                                                    North_African=False,
                                                    Latin_Alternatives=False,
                                                    Comprehensive=False)

                elif Alphabets == "Middle Eastern":

                    if len(self.URL_Body) > 10:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Punycode domain fuzzing for Middle Eastern alphabets only allows a maximum of 10 characters."
                        )
                        return None

                    else:
                        Altered_URLs = Rotor.Search(URL_List,
                                                    English_Upper=False,
                                                    Numbers=False,
                                                    Special_Characters=False,
                                                    Asian=False,
                                                    Latin=False,
                                                    Middle_Eastern=True,
                                                    Native_American=False,
                                                    North_African=False,
                                                    Latin_Alternatives=False,
                                                    Comprehensive=False)

                elif Alphabets == "Native American":

                    if len(self.URL_Body) > 10:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Punycode domain fuzzing for Asian alphabets only allows a maximum of 10 characters."
                        )
                        return None

                    else:
                        Altered_URLs = Rotor.Search(URL_List,
                                                    English_Upper=False,
                                                    Numbers=False,
                                                    Special_Characters=False,
                                                    Asian=False,
                                                    Latin=False,
                                                    Middle_Eastern=False,
                                                    Native_American=True,
                                                    North_African=False,
                                                    Latin_Alternatives=False,
                                                    Comprehensive=False)

                elif Alphabets == "North African":

                    if len(self.URL_Body) > 10:
                        logging.error(
                            f"{General.Date()} - {__name__.strip('plugins.')} - The length of the body of the provided query: {Query} is greater than 15 characters in length. Punycode domain fuzzing for Middle Eastern alphabets only allows a maximum of 10 characters."
                        )
                        return None

                    else:
                        Altered_URLs = Rotor.Search(URL_List,
                                                    English_Upper=False,
                                                    Numbers=False,
                                                    Special_Characters=False,
                                                    Asian=False,
                                                    Latin=False,
                                                    Middle_Eastern=False,
                                                    Native_American=False,
                                                    North_African=True,
                                                    Latin_Alternatives=False,
                                                    Comprehensive=False)

                logging.info(
                    f'{General.Date()} - Generated domain combinations - {", ".join(Altered_URLs)}'
                )
                Pool = mpool.ThreadPool(
                    int(multiprocessing.cpu_count()) *
                    int(multiprocessing.cpu_count()))
                Pool_Threads = []

                for Altered_URL in Altered_URLs:

                    if not Altered_URL == self.URL_Body:
                        Thread = Pool.apply_async(self.Query_URL,
                                                  args=(
                                                      Altered_URL,
                                                      self.URL_Extension,
                                                  ))
                        Pool_Threads.append(Thread)

                [Pool_Thread.wait() for Pool_Thread in Pool_Threads]
                logging.info(f'{General.Date()} {Directory}')
                URL_Domain = self.URL_Body + self.URL_Extension
                logging.info(URL_Domain)
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name,
                    "\n".join(self.Valid_Results), self.URL_Body,
                    self.The_File_Extensions["Main"])
                logging.info(Main_File)

                if Main_File:

                    for Host in self.Valid_Hosts:
                        Current_Domain = Host[0].strip('https://').strip(
                            'http://')

                        try:
                            Current_Responses = General.Request_Handler(
                                Host[0],
                                Filter=True,
                                Host=Host[0],
                                Risky_Plugin=True)
                            Current_Response = Current_Responses["Filtered"]
                            Output_File = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name,
                                Current_Response, Current_Domain,
                                self.The_File_Extensions["Query"])

                            if Output_File:
                                Output_File_List = [Main_File, Output_File]
                                Output_Connections = General.Connections(
                                    Query, Local_Plugin_Name, Current_Domain,
                                    "Domain Spoof", self.Task_ID,
                                    Local_Plugin_Name.lower())
                                Output_Connections.Output(
                                    Output_File_List,
                                    Host[0],
                                    f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}",
                                    Directory_Plugin_Name=self.
                                    Concat_Plugin_Name)

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                        except requests.exceptions.ConnectionError:
                            Output_File_List = [Main_File]
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, Current_Domain,
                                "Domain Spoof", self.Task_ID,
                                Local_Plugin_Name.lower())
                            Output_Connections.Output(
                                Output_File_List,
                                Host[0],
                                f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}",
                                Directory_Plugin_Name=self.Concat_Plugin_Name)

            if self.Cached_Data:
                General.Write_Cache(Directory, self.Data_to_Cache,
                                    Local_Plugin_Name, "a")

            else:
                General.Write_Cache(Directory, self.Data_to_Cache,
                                    Local_Plugin_Name, "w")

        except Exception as e:
            logging.warning(
                f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #7
0
    def All_Extensions(self):

        try:
            Local_Plugin_Name = self.Plugin_Name + "-All-Extensions"
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, Local_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            self.Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)
            logging.info(
                f"{General.Date()} {__name__.strip('plugins.')} - All Extensions Selected."
            )
            self.Query_List = General.Convert_to_List(self.Query_List)

            for Query in self.Query_List:
                URL_Regex = General.Regex_Checker(Query, "URL")

                if URL_Regex:
                    self.URL_Prefix = URL_Regex.group(1)
                    self.URL_Body = URL_Regex.group(3)

                    if URL_Regex.group(5) and URL_Regex.group(6):
                        self.URL_Extension = URL_Regex.group(
                            4) + URL_Regex.group(5) + URL_Regex.group(6)

                    elif URL_Regex.group(5):
                        self.URL_Extension = URL_Regex.group(
                            4) + URL_Regex.group(5)

                    else:
                        self.URL_Extension = URL_Regex.group(4)

                else:
                    logging.warning(
                        f"{General.Date()} {__name__.strip('plugins.')} - Please provide valid URLs."
                    )

                Pool = mpool.ThreadPool(
                    int(multiprocessing.cpu_count()) *
                    int(multiprocessing.cpu_count()))
                Pool_Threads = []

                for Extension in self.Generic_Extensions:

                    for suffix in self.Global_Domain_Suffixes:
                        suffix = suffix.replace(".com", "")
                        suffix = suffix.replace(".co", "")

                        if not self.URL_Extension == suffix:
                            Thread = Pool.apply_async(self.Query_URL,
                                                      args=(
                                                          self.URL_Body,
                                                          Extension + suffix,
                                                      ))
                            Pool_Threads.append(Thread)

                [Pool_Thread.wait() for Pool_Thread in Pool_Threads]
                URL_Domain = self.URL_Body + self.URL_Extension
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name,
                    "\n".join(self.Valid_Results), self.URL_Body,
                    self.The_File_Extensions["Main"])

                if Main_File:

                    for Host in self.Valid_Hosts:
                        Current_Domain = Host[0].strip('https://').strip(
                            'http://')

                        try:
                            Current_Responses = General.Request_Handler(
                                Host[0],
                                Filter=True,
                                Host=Host[0],
                                Risky_Plugin=True)
                            Current_Response = Current_Responses["Filtered"]
                            Output_File = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name,
                                Current_Response, Current_Domain,
                                self.The_File_Extensions["Query"])

                            if Output_File:
                                Output_File_List = [Main_File, Output_File]
                                Output_Connections = General.Connections(
                                    Query, Local_Plugin_Name, Current_Domain,
                                    "Domain Spoof", self.Task_ID,
                                    Local_Plugin_Name.lower())
                                Output_Connections.Output(
                                    Output_File_List,
                                    Host[0],
                                    f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}",
                                    Directory_Plugin_Name=self.
                                    Concat_Plugin_Name)

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                        except requests.exceptions.ConnectionError:
                            Output_File_List = [Main_File]
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, Current_Domain,
                                "Domain Spoof", self.Task_ID,
                                Local_Plugin_Name.lower())
                            Output_Connections.Output(
                                Output_File_List,
                                Host[0],
                                f"Domain Spoof for {URL_Domain} - {Current_Domain} : {Host[1]}",
                                Directory_Plugin_Name=self.Concat_Plugin_Name)

                if self.Data_to_Cache:

                    if self.Cached_Data:
                        General.Write_Cache(Directory, self.Data_to_Cache,
                                            Local_Plugin_Name, "a")

                    else:
                        General.Write_Cache(Directory, self.Data_to_Cache,
                                            Local_Plugin_Name, "w")

        except Exception as e:
            logging.warning(
                f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #8
0
def Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Shodan_API_Key = Load_Configuration()
        API_Session = PyHunter(Shodan_API_Key)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            try:

                if Type == "Domain":

                    if General.Regex_Checker(Query, "Domain"):
                        Local_Plugin_Name = Plugin_Name + "-Domain"
                        API_Response = API_Session.domain_search(Query)
                        JSON_Output_Response = json.dumps(API_Response, indent=4, sort_keys=True)

                        if API_Response["domain"] and API_Response['emails']:
                            Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                            Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Account", Task_ID, Plugin_Name.lower())
                            Current_Step = 0

                            for Hunter_Item in API_Response["emails"]:
                                Current_Email_Address = Hunter_Item["value"]
                                Current_Hunter_Item_Host = f"https://{Domain}/verify/{Current_Email_Address}"
                                Current_Hunter_Item_Responses = General.Request_Handler(Current_Hunter_Item_Host, Filter=True, Host=f"https://{Domain}")
                                Filtered_Response = Current_Hunter_Item_Responses["Filtered"]
                                Title = "Hunter | " + Current_Email_Address

                                if Current_Email_Address not in Cached_Data and Current_Email_Address not in Data_to_Cache and Current_Step < int(Limit):
                                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Filtered_Response, Current_Hunter_Item_Host, The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output([Main_File, Output_file], Current_Hunter_Item_Host, Title, Plugin_Name.lower())
                                        Data_to_Cache.append(Current_Email_Address)

                                    else:
                                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                                    Current_Step += 1

                elif Type == "Email":

                    if General.Regex_Checker(Query, "Email"):
                        Local_Plugin_Name = Plugin_Name + "-Email"
                        API_Response = API_Session.email_verifier(Query)
                        JSON_Output_Response = json.dumps(API_Response, indent=4, sort_keys=True)

                        if API_Response["email"] and API_Response['sources']:
                            Main_File = General.Main_File_Create(Directory, Local_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                            Output_Connections = General.Connections(Query, Local_Plugin_Name, Domain, "Account Source", Task_ID, Plugin_Name.lower())
                            Current_Step = 0

                            for Hunter_Item in API_Response["sources"]:
                                Current_Hunter_Item_Host = Hunter_Item["uri"]
                                Current_Hunter_Item_Domain = Hunter_Item["domain"]

                                if 'http://' in Current_Hunter_Item_Host:
                                    Current_Hunter_Item_Responses = General.Request_Handler(Current_Hunter_Item_Host, Filter=True, Host=f"http://{Current_Hunter_Item_Domain}")
                                    Filtered_Response = Current_Hunter_Item_Responses["Filtered"]

                                elif 'https://' in Current_Hunter_Item_Host:
                                    Current_Hunter_Item_Responses = General.Request_Handler(Current_Hunter_Item_Host, Filter=True, Host=f"https://{Current_Hunter_Item_Domain}")
                                    Filtered_Response = Current_Hunter_Item_Responses["Filtered"]

                                else:
                                    Filtered_Response = General.Request_Handler(Current_Hunter_Item_Host)

                                Title = "Hunter | " + Current_Hunter_Item_Host

                                if Current_Hunter_Item_Host not in Cached_Data and Current_Hunter_Item_Host not in Data_to_Cache and Current_Step < int(Limit):
                                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, Filtered_Response, Current_Hunter_Item_Host, The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output([Main_File, Output_file], Current_Hunter_Item_Host, Title, Plugin_Name.lower())
                                        Data_to_Cache.append(Current_Hunter_Item_Host)

                                    else:
                                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                                    Current_Step += 1

            except Exception as e:
                logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to complete task - {str(e)}")

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")