Пример #1
0
def CSV_to_JSON(Query, CSV_Data):

    try:

        if type(CSV_Data) == list:
            JSON_Data = {Query: []}

            for CSV_Line in CSV_Data:

                if CSV_Line != CSV_Data[0]:
                    Split_CSV_Line = CSV_Line.split(",")
                    JSON_Data[Query].append({
                        "Domain": Split_CSV_Line[0],
                        "IP Address": Split_CSV_Line[1]
                    })

            Indented_Registration_Response = Common.JSON_Handler(
                JSON_Data).Dump_JSON()
            return Indented_Registration_Response

        else:
            return None

    except:
        logging.warning(
            f"{Common.Date()} - General Library - Failed to convert provided CSV data to JSON."
        )
Пример #2
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if Common.Regex_Handler(Query, Type="IP"):
                API_Key = Load_Configuration()
                Search_Response = Common.Request_Handler(
                    f"http://api.{Domain}/{Query}?access_key={API_Key}")
                JSON_Object = Common.JSON_Handler(Search_Response)
                JSON_Response = JSON_Object.To_JSON_Loads()
                JSON_Output_Response = JSON_Object.Dump_JSON()
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "IP Address Information",
                    Task_ID, Plugin_Name.lower())

                if Query not in Cached_Data and Query not in Data_to_Cache:
                    Result_URL = f"https://{Domain}/?{Query}"
                    Title = f"IP Stack | {Query}"
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, JSON_Output_Response,
                        Title, The_File_Extensions["Main"])
                    HTML_Output_File_Data = General.JSONDict_to_HTML(
                        JSON_Response, JSON_Output_Response,
                        f"IPStack Query {Query}")
                    HTML_Output_File = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, HTML_Output_File_Data,
                        Title.replace(" ", "-"),
                        The_File_Extensions["Main_Converted"])

                    if Output_file:
                        Output_Connections.Output([Output_file], Result_URL,
                                                  Title, Plugin_Name.lower())
                        Data_to_Cache.append(Result_URL)

                    else:
                        logging.warning(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #3
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:

                if Common.Regex_Handler(Query, Type="Email"):
                    Link = f"https://{self.Domain}/home/verify-as-guest/{Query}"
                    JSON_Response = Common.Request_Handler(Link)
                    JSON_Object = Common.JSON_Handler(JSON_Response)

                    if JSON_Object.Is_JSON():
                        JSON_Response = JSON_Object.To_JSON_Loads()
                        JSON_Output_Response = JSON_Object.Dump_JSON()
                        Table_JSON = {}

                        for Key, Value in JSON_Response.items():

                            if Key != "response":
                                Table_JSON[Key] = Value

                            else:

                                for Det_Key, Det_Val in JSON_Response["response"].items():
                                    Table_JSON[Det_Key] = Det_Val

                        Filter_JSON = [Table_JSON]
                        Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name)

                        if Query not in Cached_Data and Query not in Data_to_Cache:
                            Title = f"Email Verification | {Query}"
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Concat_Plugin_Name, JSON_Output_Response, Title, self.The_File_Extensions["Main"])
                            HTML_Output_File_Data = General.JSONDict_to_HTML(Filter_JSON, JSON_Output_Response, f"Email Verification Query {Query}")
                            HTML_Output_File = General.Create_Query_Results_Output_File(Directory, Query, self.Concat_Plugin_Name, HTML_Output_File_Data, Title, self.The_File_Extensions["Main_Converted"])

                            if Output_file and HTML_Output_File:
                                Output_Connections.Output([Output_file, HTML_Output_File], Link, Title, self.Concat_Plugin_Name)
                                Data_to_Cache.append(Link)

                            else:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #4
0
    def General_Pull(self, Handle, Directory, API):

        try:
            Data_to_Cache = []
            JSON_Response = []
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()
            Latest_Tweets = API.user_timeline(screen_name=Handle, count=self.Limit)

            for Tweet in Latest_Tweets:

                try:
                    JSON_Response.append({
                        'id': Tweet.id,
                        'text': Tweet.text,
                        'author_name': Tweet.user.screen_name,
                        'url': Tweet.entities['urls'][0]["expanded_url"]
                    })

                except:
                    JSON_Response.append({
                        'id': Tweet.id,
                        'text': Tweet.text,
                        'author_name': Tweet.user.screen_name
                    })

            JSON_Output = Common.JSON_Handler(JSON_Response).Dump_JSON()
            Output_Connections = General.Connections(Handle, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower())
            Main_File = General.Main_File_Create(Directory, self.Plugin_Name, JSON_Output, Handle, self.The_File_Extensions["Main"])

            for JSON_Item in JSON_Response:

                if all(Item in JSON_Item for Item in ['id', 'url', 'text']):
                    Link = JSON_Item['url']

                    if Link not in Cached_Data and Link not in Data_to_Cache:
                        Title = "Twitter | " + JSON_Item['text']
                        Item_Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://{self.Domain}")
                        Item_Response = Item_Responses["Filtered"]

                        Output_file = General.Create_Query_Results_Output_File(Directory, Handle, self.Plugin_Name, Item_Response, str(JSON_Item['id']), self.The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file], Link, Title, self.Plugin_Name.lower())
                            Data_to_Cache.append(Link)

                        else:
                            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Output file not returned.")

                else:
                    logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Insufficient parameters provided.")

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #5
0
def Convert_to_JSON(Data):
    Data = str(Data)
    Flickr_Regex = Common.Regex_Handler(Data, Custom_Regex=r"\[(.+)\]")

    if Flickr_Regex:
        New_Data = Flickr_Regex.group(1).replace("...", "").replace("id=b", "'id': ").replace("title=b", "'title': ").replace("(", "{").replace(")", "}").replace("\'}", "}").replace("}", "\'}")
        New_Data = New_Data.replace("Photo", "")
        New_Data = f"[{New_Data}]"
        New_Data = eval(New_Data)
        JSON_Object = Common.JSON_Handler(New_Data)
        New_Data = JSON_Object.Dump_JSON()
        return New_Data
Пример #6
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:
            Headers_Custom = {"Referer": f"https://www.doingbusiness.org/en/data/exploreeconomies/{Query}"}
            Main_URL = f"https://wbgindicatorsqa.azure-api.net/DoingBusiness/api/GetEconomyByURL/{Query}"
            Doing_Business_Response = Common.Request_Handler(Main_URL, Optional_Headers=Headers_Custom)
            JSON_Object = Common.JSON_Handler(Doing_Business_Response)
            JSON_Response = JSON_Object.To_JSON_Loads()
            JSON_Output_Response = JSON_Object.Dump_JSON()

            if 'message' not in JSON_Response:
                Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                Item_URL = f"https://www.{Domain}/en/data/exploreeconomies/{Query}"
                Title = f"Doing Business | {Query}"
                Current_Doing_Business_Responses = Common.Request_Handler(Item_URL, Filter=True, Host=f"https://www.{Domain}")
                Current_Doing_Business_Response = Current_Doing_Business_Responses["Filtered"]

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache:
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Current_Doing_Business_Response, Query, The_File_Extensions["Query"])

                    if Output_file:
                        Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Economic Details", Task_ID, Concat_Plugin_Name)
                        Output_Connections.Output([Main_File, Output_file], Item_URL, Title, Concat_Plugin_Name)
                        Data_to_Cache.append(Item_URL)

                    else:
                        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

            else:
                logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression.")

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #7
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:
            Response = Common.Request_Handler('https://tpbc.herokuapp.com/search/' + Query.replace(" ", "+") + '/?sort=seeds_desc')
            JSON_Object = Common.JSON_Handler(Response)
            Response = JSON_Object.To_JSON_Loads()
            JSON_Response = JSON_Object.Dump_JSON()
            Output_file = General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, The_File_Extension)

            if Output_file:
                Current_Step = 0
                Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Torrent", Task_ID, Plugin_Name.lower())

                for Search_Result in Response:
                    Result_Title = Search_Result["title"]
                    Result_URL = Search_Result["magnet"]

                    if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(Limit):
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, JSON_Response, Result_Title, The_File_Extension)

                        if Output_file:
                            Output_Connections.Output([Output_file], Result_URL, General.Get_Title(Result_URL), Plugin_Name.lower())
                            Data_to_Cache.append(Result_URL)

                        else:
                            logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                        Current_Step += 1

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #8
0
def Load_Configuration():
    logging.info(
        f"{Common.Date()} - {__name__.strip('plugins.')} - Loading configuration data."
    )

    try:

        with open(Common.Set_Configuration_File()) as JSON_File:
            JSON_Object = Common.JSON_Handler(JSON_File)
            Configuration_Data = JSON_Object.To_JSON_Load()
            Pinterest_Details = Configuration_Data["inputs"][
                Plugin_Name.lower()]

            if Pinterest_Details['oauth_token']:
                return Pinterest_Details['oauth_token']

            else:
                return None

    except:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to load location details."
        )
Пример #9
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Email_Rep_API_Key = Load_Configuration()

        for Query in Query_List:

            if Common.Regex_Handler(Query, Type="Email"):
                API = EmailRep(Email_Rep_API_Key)
                JSON_Output_Response = API.query(Query)
                Link = f"https://{Domain}/{Query}"
                JSON_Object = Common.JSON_Handler(JSON_Output_Response)
                JSON_Output_Response = JSON_Object.Dump_JSON()
                JSON_Response = JSON_Object.To_JSON_Loads()

                if JSON_Response["reputation"] != "none":
                    Table_JSON = {}

                    for Key, Value in JSON_Response.items():

                        if Key != "details":
                            Table_JSON[Key] = Value

                        else:

                            for Det_Key, Det_Val in JSON_Response["details"].items():
                                Table_JSON[Det_Key] = Det_Val

                    Filter_JSON = [Table_JSON]
                    Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Email Information", Task_ID, Concat_Plugin_Name)

                    if Query not in Cached_Data and Query not in Data_to_Cache:
                        Responses = Common.Request_Handler(Link, Filter=True, Host=f"https://{Domain}")
                        Filtered_Response = Responses["Filtered"]
                        Title = f"Email Reputation | {Query}"
                        Main_File = General.Main_File_Create(Directory, Concat_Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Concat_Plugin_Name, Filtered_Response, Title, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file], Link, Title, Concat_Plugin_Name)
                            Data_to_Cache.append(Link)

                        else:
                            logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #10
0
def Search(Query_List, Task_ID, Type, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Shodan_API_Key = Load_Configuration()
        API_Session = PyHunter(Shodan_API_Key)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            try:

                if Type == "Domain":

                    if Common.Regex_Handler(Query, Type="Domain"):
                        Local_Plugin_Name = Plugin_Name + "-Domain"
                        API_Response = API_Session.domain_search(Query)
                        JSON_Object = Common.JSON_Handler(API_Response)
                        JSON_Output_Response = JSON_Object.Dump_JSON()

                        if API_Response["domain"] and API_Response['emails']:
                            Main_File = General.Main_File_Create(
                                Directory, Local_Plugin_Name,
                                JSON_Output_Response, Query,
                                The_File_Extensions["Main"])
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, Domain, "Account",
                                Task_ID, Plugin_Name.lower())
                            Current_Step = 0

                            for Hunter_Item in API_Response["emails"]:
                                Current_Email_Address = Hunter_Item["value"]
                                Current_Hunter_Item_Host = f"https://{Domain}/verify/{Current_Email_Address}"
                                Current_Hunter_Item_Responses = Common.Request_Handler(
                                    Current_Hunter_Item_Host,
                                    Filter=True,
                                    Host=f"https://{Domain}")
                                Filtered_Response = Current_Hunter_Item_Responses[
                                    "Filtered"]
                                Title = "Hunter | " + Current_Email_Address

                                if Current_Email_Address not in Cached_Data and Current_Email_Address not in Data_to_Cache and Current_Step < int(
                                        Limit):
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, Local_Plugin_Name,
                                        Filtered_Response,
                                        Current_Hunter_Item_Host,
                                        The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output(
                                            [Main_File, Output_file],
                                            Current_Hunter_Item_Host, Title,
                                            Plugin_Name.lower())
                                        Data_to_Cache.append(
                                            Current_Email_Address)

                                    else:
                                        logging.warning(
                                            f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                        )

                                    Current_Step += 1

                elif Type == "Email":

                    if Common.Regex_Handler(Query, Type="Email"):
                        Local_Plugin_Name = Plugin_Name + "-Email"
                        API_Response = API_Session.email_verifier(Query)
                        JSON_Object = Common.JSON_Handler(API_Response)
                        JSON_Output_Response = JSON_Object.Dump_JSON()

                        if API_Response["email"] and API_Response['sources']:
                            Main_File = General.Main_File_Create(
                                Directory, Local_Plugin_Name,
                                JSON_Output_Response, Query,
                                The_File_Extensions["Main"])
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, Domain,
                                "Account Source", Task_ID, Plugin_Name.lower())
                            Current_Step = 0

                            for Hunter_Item in API_Response["sources"]:
                                Current_Hunter_Item_Host = Hunter_Item["uri"]
                                Current_Hunter_Item_Domain = Hunter_Item[
                                    "domain"]

                                if 'http://' in Current_Hunter_Item_Host:
                                    Current_Hunter_Item_Responses = Common.Request_Handler(
                                        Current_Hunter_Item_Host,
                                        Filter=True,
                                        Host=
                                        f"http://{Current_Hunter_Item_Domain}")
                                    Filtered_Response = Current_Hunter_Item_Responses[
                                        "Filtered"]

                                elif 'https://' in Current_Hunter_Item_Host:
                                    Current_Hunter_Item_Responses = Common.Request_Handler(
                                        Current_Hunter_Item_Host,
                                        Filter=True,
                                        Host=
                                        f"https://{Current_Hunter_Item_Domain}"
                                    )
                                    Filtered_Response = Current_Hunter_Item_Responses[
                                        "Filtered"]

                                else:
                                    Filtered_Response = Common.Request_Handler(
                                        Current_Hunter_Item_Host)

                                Title = "Hunter | " + Current_Hunter_Item_Host

                                if Current_Hunter_Item_Host not in Cached_Data and Current_Hunter_Item_Host not in Data_to_Cache and Current_Step < int(
                                        Limit):
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, Local_Plugin_Name,
                                        Filtered_Response,
                                        Current_Hunter_Item_Host,
                                        The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output(
                                            [Main_File, Output_file],
                                            Current_Hunter_Item_Host, Title,
                                            Plugin_Name.lower())
                                        Data_to_Cache.append(
                                            Current_Hunter_Item_Host)

                                    else:
                                        logging.warning(
                                            f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                        )

                                    Current_Step += 1

            except Exception as e:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to complete task - {str(e)}"
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #11
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            print(Directory)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Naver_Details = self.Load_Configuration()
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            if int(self.Limit) > 100:
                logging.fatal(
                    f"{Common.Date()} - {self.Logging_Plugin_Name} - This plugin does not support limits over 100."
                )
                return None

            for Query in self.Query_List:
                URL_Query = urllib.parse.quote(Query)
                URL = f"https://openapi.{self.Domain}/v1/search/webkr.json?query={URL_Query}&display={str(self.Limit)}&sort=sim"
                Headers = {
                    "X-Naver-Client-Id": Naver_Details[0],
                    "X-Naver-Client-Secret": Naver_Details[1]
                }
                Naver_Response = Common.Request_Handler(
                    URL, Optional_Headers=Headers)
                JSON_Object = Common.JSON_Handler(Naver_Response)
                JSON_Response = JSON_Object.To_JSON_Loads()
                JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, self.Plugin_Name, JSON_Output_Response, Query,
                    self.The_File_Extensions["Main"])
                Output_Connections = General.Connections(
                    Query, self.Plugin_Name, self.Domain, self.Result_Type,
                    self.Task_ID, self.Plugin_Name.lower())

                if JSON_Response.get('items'):

                    for Naver_Item_Link in JSON_Response['items']:

                        try:

                            if 'title' in Naver_Item_Link and 'link' in Naver_Item_Link:
                                Naver_URL = Naver_Item_Link['link']
                                Title = Naver_Item_Link['title']
                                Title = f"Naver | {Title}"

                                if Naver_URL not in Cached_Data and Naver_URL not in Data_to_Cache:
                                    Naver_Item_Responses = Common.Request_Handler(
                                        Naver_URL,
                                        Filter=True,
                                        Host=f"https://www.{self.Domain}")
                                    Naver_Item_Response = Naver_Item_Responses[
                                        "Filtered"]
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, self.Plugin_Name,
                                        Naver_Item_Response, Naver_URL,
                                        self.The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output(
                                            [Main_File, Output_file],
                                            Naver_URL, Title,
                                            self.Plugin_Name.lower())
                                        Data_to_Cache.append(Naver_URL)

                                    else:
                                        logging.warning(
                                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                        )

                        except Exception as e:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}"
                            )

                else:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - No results found."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #12
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            YouTube_Details = self.Load_Configuration()
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                YouTube_Handler = discovery.build(
                    YouTube_Details[1],
                    YouTube_Details[2],
                    developerKey=YouTube_Details[0],
                    cache_discovery=False)
                Search_Response = YouTube_Handler.search().list(
                    q=Query,
                    type='video',
                    part='id,snippet',
                    maxResults=self.Limit,
                ).execute()
                JSON_Output_Response = Common.JSON_Handler(
                    Search_Response.get('items', [])).Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, self.Plugin_Name, JSON_Output_Response, Query,
                    self.The_File_Extensions["Main"])
                Output_Connections = General.Connections(
                    Query, self.Plugin_Name, self.Domain, self.Result_Type,
                    self.Task_ID, self.Plugin_Name.lower())

                for Search_Result in Search_Response.get('items', []):
                    Full_Video_URL = f"https://www.{self.Domain}/watch?v=" + Search_Result[
                        'id']['videoId']
                    Search_Video_Responses = Common.Request_Handler(
                        Full_Video_URL,
                        Filter=True,
                        Host=f"https://www.{self.Domain}")
                    Search_Video_Response = Search_Video_Responses["Filtered"]
                    Title = "YouTube | " + Search_Result['snippet']['title']

                    if Full_Video_URL not in Cached_Data and Full_Video_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, self.Plugin_Name,
                            Search_Video_Response,
                            Search_Result['id']['videoId'],
                            self.The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Full_Video_URL, Title,
                                                      self.Plugin_Name.lower())
                            Data_to_Cache.append(Full_Video_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                            )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #13
0
def Search(Query_List, Task_ID, Type, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            try:

                if Type == "CBN":
                    Main_API_URL = f'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B{Query}%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc'
                    Response = Common.Request_Handler(Main_API_URL)
                    JSON_Object = Common.JSON_Handler(Response)
                    JSON_Response = JSON_Object.To_JSON_Loads()
                    Indented_JSON_Response = JSON_Object.Dump_JSON()
                    Main_Output_File = General.Main_File_Create(
                        Directory, Plugin_Name, Indented_JSON_Response, Query,
                        The_File_Extensions["Main"])

                    try:

                        if JSON_Response['count'] != 0:
                            Query = str(int(Query))
                            Main_URL = f'https://{Domain}/search/results?search=%7B{Query}%7D&status=Active'
                            Responses = Common.Request_Handler(
                                Main_URL,
                                Filter=True,
                                Host=f"https://{Domain}")
                            Response = Responses["Filtered"]

                            if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name, Response,
                                    General.Get_Title(Main_URL),
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections = General.Connections(
                                        Query, Plugin_Name,
                                        Domain.strip("beta."),
                                        "Company Details", Task_ID,
                                        Plugin_Name)
                                    Output_Connections.Output(
                                        [Main_Output_File, Output_file],
                                        Main_URL,
                                        f"Canadian Business Number {Query}",
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(Main_URL)

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                    except:
                        logging.warning(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CBN Search."
                        )

                elif Type == "CCN":
                    Total_Results = 0
                    Iterator = "page=0"

                    while Limit > Total_Results and Iterator is not None:
                        Main_URL = 'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B' + urllib.parse.quote(
                            Query
                        ) + f'%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc&{Iterator}'
                        Response = Common.Request_Handler(Main_URL)
                        JSON_Object = Common.JSON_Handler(Response)
                        JSON_Response = JSON_Object.To_JSON_Loads()
                        Total_Results += len(JSON_Response["docs"])

                        if "paging" in JSON_Response and "next" in JSON_Response.get(
                                "paging"):
                            Iterator = JSON_Response["paging"]["next"]

                        else:
                            Iterator = None

                        Indented_JSON_Response = JSON_Object.Dump_JSON()
                        Limit = General.Get_Limit(Limit)

                        try:
                            Main_File = General.Main_File_Create(
                                Directory, Plugin_Name, Indented_JSON_Response,
                                Query, The_File_Extensions["Main"])
                            Current_Step = 0
                            Output_Connections = General.Connections(
                                Query, Plugin_Name, Domain.strip("beta."),
                                "Company Details", Task_ID, Plugin_Name)

                            for JSON_Item in JSON_Response['docs']:

                                if JSON_Item.get('BN'):
                                    CCN = JSON_Item['Company_Name']
                                    CBN = str(int(JSON_Item['BN']))

                                    Full_CCN_URL = f'https://{Domain}/search/results?search=%7B{CBN}%7D&status=Active'

                                    if Full_CCN_URL not in Cached_Data and Full_CCN_URL not in Data_to_Cache and Current_Step < int(
                                            Limit):
                                        Current_Responses = Common.Request_Handler(
                                            Full_CCN_URL,
                                            Filter=True,
                                            Host=f"https://{Domain}")
                                        Current_Response = Current_Responses[
                                            "Filtered"]
                                        Output_file = General.Create_Query_Results_Output_File(
                                            Directory, Query, Plugin_Name,
                                            str(Current_Response),
                                            CCN.replace(' ', '-'),
                                            The_File_Extensions["Query"])

                                        if Output_file:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                Full_CCN_URL,
                                                f"Canadian Business Number {CBN} for Query {Query}",
                                                Concat_Plugin_Name)
                                            Data_to_Cache.append(Full_CCN_URL)

                                        else:
                                            logging.warning(
                                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                            )

                                        Current_Step += 1

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {__name__.strip('plugins.')} - Unable to retrieve business numbers from the JSON response."
                                    )

                        except:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CCN Search."
                            )

                else:
                    logging.warning(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid request type."
                    )

            except:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #14
0
def Search(Query_List, Task_ID, Type, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            if Type == "User":
                from instagramy import InstagramUser
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = InstagramUser(Query)
                JSON_Object = Common.JSON_Handler(vars(CSE_Response))
                CSE_JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])

                if not CSE_Response.is_private:
                    Posts = CSE_Response.posts
                    Output_Connections = General.Connections(
                        Query, Local_Plugin_Name, Domain,
                        "Social Media - Person", Task_ID,
                        Local_Plugin_Name.lower())
                    Current_Step = 0

                    for Post in Posts:
                        URL = Post['post_url']
                        Shortcode = URL.replace(f"https://www.{Domain}/p/",
                                                "").replace("/", "")
                        Title = "IG | " + General.Get_Title(URL, Requests=True)

                        if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                                Limit):
                            Responses = Common.Request_Handler(
                                URL,
                                Application_JSON_CT=True,
                                Accept_XML=True,
                                Accept_Language_EN_US=True,
                                Filter=True,
                                Host=f"https://www.{Domain}")
                            Response = Responses["Filtered"]
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name, Response,
                                Shortcode, The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], URL, Title,
                                    Plugin_Name.lower())
                                Data_to_Cache.append(URL)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                            Current_Step += 1

                else:
                    logging.info(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - The provided user's profile is private and cannot be scraped."
                    )

            elif Type == "Tag":
                from instagramy import InstagramHashTag
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = InstagramHashTag(Query)
                JSON_Object = Common.JSON_Handler(vars(CSE_Response))
                CSE_JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])
                Posts = vars(
                    CSE_Response)['tag_data']['edge_hashtag_to_media']['edges']
                Output_Connections = General.Connections(
                    Query, Local_Plugin_Name, Domain, "Social Media - Person",
                    Task_ID, Local_Plugin_Name.lower())
                Current_Step = 0

                for Post in Posts:
                    Shortcode = Post['node']['shortcode']
                    URL = f"https://www.{Domain}/p/{Shortcode}/"
                    Title = "IG | " + General.Get_Title(URL, Requests=True)

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Responses = Common.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Response = Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name, Response,
                            Shortcode, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            elif Type == "Post":
                from instagramy import InstagramPost
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = InstagramPost(Query)
                JSON_Object = Common.JSON_Handler(vars(CSE_Response))
                CSE_JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])
                Output_Connections = General.Connections(
                    Query, Local_Plugin_Name, Domain, "Social Media - Place",
                    Task_ID, Local_Plugin_Name.lower())
                URL = CSE_Response.url
                Shortcode = URL.replace(f"https://www.{Domain}/p/",
                                        "").replace("/", "")
                Title = "IG | " + General.Get_Title(URL, Requests=True)

                if URL not in Cached_Data and URL not in Data_to_Cache:
                    Responses = Common.Request_Handler(
                        URL,
                        Application_JSON_CT=True,
                        Accept_XML=True,
                        Accept_Language_EN_US=True,
                        Filter=True,
                        Host=f"https://www.{Domain}")
                    Response = Responses["Filtered"]
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Local_Plugin_Name, Response,
                        Shortcode, The_File_Extensions["Query"])

                    if Output_file:
                        Output_Connections.Output([Main_File, Output_file],
                                                  URL, Title,
                                                  Plugin_Name.lower())
                        Data_to_Cache.append(URL)

                    else:
                        logging.warning(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

            else:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid type provided."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #15
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain, "Vehicle Details",
                                                     Task_ID,
                                                     Concat_Plugin_Name)

            for State in States:
                Post_URL = f'https://{Domain}/bin/wesfarmers/search/vehiclerego'
                data = '''{"isRegoSearch":"YES","regoSearchCount":2,"regoMatchCount":1,"regoSearchFailureCount":0,"failPaymentAttempts":0,"pauseStep":"false","campaignBaseURL":"https://secure.colesinsurance.com.au/campaignimages/","sessionState":"OPEN","sessionStep":"0","policyHolders":[],"updateSessionURL":"http://dev.gtw.gp-mdl.auiag.corp:9000/sys/colessessionservice/motor/v1/update-session","insuranceType":"COMP","startDate":"03/07/2019","drivers":[{"driverRef":"MainDriver","yearsLicenced":{"vehRef":"veh1"}}],"priceBeatAttemptsRemaining":"2","currentInsurerOptions":[{"id":"AAMI","value":"AAMI","text":"AAMI"},{"id":"Allianz","value":"Allianz","text":"Allianz"},{"id":"Apia","value":"Apia","text":"Apia"},{"id":"Bingle","value":"Bingle","text":"Bingle"},{"id":"Broker","value":"Broker","text":"Broker"},{"id":"BudgDirect","value":"BudgDirect","text":"Budget Direct"},{"id":"Buzz","value":"Buzz","text":"Buzz"},{"id":"CGU","value":"CGU","text":"CGU"},{"id":"Coles","value":"Coles","text":"Coles"},{"id":"CommInsure","value":"CommInsure","text":"CommInsure"},{"id":"GIO","value":"GIO","text":"GIO"},{"id":"HBF","value":"HBF","text":"HBF"},{"id":"JustCar","value":"JustCar","text":"Just Car"},{"id":"NRMA","value":"NRMA","text":"NRMA"},{"id":"Progress","value":"Progress","text":"Progressive"},{"id":"QBE","value":"QBE","text":"QBE"},{"id":"RAA","value":"RAA","text":"RAA"},{"id":"RAC","value":"RAC","text":"RAC"},{"id":"RACQ","value":"RACQ","text":"RACQ"},{"id":"RACT","value":"RACT","text":"RACT"},{"id":"RACV","value":"RACV","text":"RACV"},{"id":"Real","value":"Real","text":"Real"},{"id":"SGIC","value":"SGIC","text":"SGIC"},{"id":"SGIO","value":"SGIO","text":"SGIO"},{"id":"Shannons","value":"Shannons","text":"Shannons"},{"id":"Suncorp","value":"Suncorp","text":"Suncorp"},{"id":"Youi","value":"Youi","text":"Youi"},{"id":"None","value":"None","text":"Car is not currently insured"},{"id":"Dontknow","value":"Dontknow","text":"Don't Know"},{"id":"Other","value":"Other","text":"Other"}],"coverLevelOptions":[{"id":"Gold","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"NRMA","code":"Gold","order":"1"},{"id":"Gold1","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"BudgDirect","code":"Gold","order":"1"},{"id":"Standard2","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIC","code":"Standard","order":"2"},{"id":"Gold6","value":"Comprehensive Advantages Car Insurance","text":"Comprehensive Advantages Car Insurance","flagname":"Suncorp","code":"Gold","order":"1"},{"id":"Standard","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"GIO","code":"Standard","order":"2"},{"id":"Standard0","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"NRMA","code":"Standard","order":"2"},{"id":"Gold4","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIC","code":"Gold","order":"1"},{"id":"Standard5","value":"Full Comprehensive Car Insurance","text":"Full Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Standard","order":"2"},{"id":"Gold5","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIO","code":"Gold","order":"1"},{"id":"Gold2","value":"Platinum Car Insurance","text":"Platinum Car Insurance","flagname":"GIO","code":"Gold","order":"1"},{"id":"Standard3","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIO","code":"Standard","order":"2"},{"id":"Gold3","value":"Complete Care Motor Insurance","text":"Complete Care Motor Insurance","flagname":"RACV","code":"Gold","order":"1"},{"id":"Standard4","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"Suncorp","code":"Standard","order":"2"},{"id":"Gold0","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Gold","order":"1"},{"id":"Standard1","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"RACV","code":"Standard","order":"2"}],"riskAddress":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"postcode":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"carRegistration":"''' + Query + '''","chooseValue":"","whatValueInsure":"Marketvalue","whatValueInsure_value":{"key":"Marketvalue","value":"Market Value"}}'''
                headers = {
                    'Content-Type': 'ext/plain;charset=UTF-8',
                    'Accept': '*/*',
                    'Accept-Encoding': 'gzip, deflate, br',
                    'Referer': f'https://{Domain}/motor/get-quote',
                    'Origin': f'https://{Domain}',
                    'Host': Domain
                }
                Registration_Response = Common.Request_Handler(
                    Post_URL,
                    Method="POST",
                    Data=data,
                    Optional_Headers=headers)
                JSON_Object = Common.JSON_Handler(Registration_Response)
                Registration_Response = JSON_Object.To_JSON_Loads()
                Indented_JSON_Response = JSON_Object.Dump_JSON()

                try:
                    Title = "Vehicle Registration | " + Registration_Response[
                        'vehicles'][0]['make'] + " " + Registration_Response[
                            'vehicles'][0]['model']
                    Item_URL = Post_URL + "?" + Query

                    if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query,
                            Plugin_Name, Indented_JSON_Response,
                            Title.replace(" ",
                                          "-"), The_File_Extensions["Main"])
                        HTML_Output_File_Data = General.JSONDict_to_HTML(
                            Registration_Response["vehicles"],
                            Indented_JSON_Response,
                            f"Vehicle Registration Query {Query}")
                        HTML_Output_File = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name,
                            HTML_Output_File_Data, Title.replace(" ", "-"),
                            The_File_Extensions["Main_Converted"])

                        if Output_file and HTML_Output_File:
                            Output_Connections.Output(
                                [Output_file, HTML_Output_File], Item_URL,
                                Title, Concat_Plugin_Name)
                            Data_to_Cache.append(Item_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                except:
                    logging.info(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - No result found for given query {Query} for state {State}."
                    )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #16
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Yandex_Details = Load_Configuration()
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:
            Yandex_Response = Common.Request_Handler(f"https://{Domain}/search/xml?user={Yandex_Details[0]}&key={Yandex_Details[1]}&query={Query}&l10n=en&sortby=rlv&filter=none&maxpassages=five&groupby=attr% 3D% 22% 22.mode% 3Dflat.groups-on-page% 3D{str(Limit)}.docs-in-group% 3D1")
            JSON_Response = xmltodict.parse(Yandex_Response)
            JSON_Object = Common.JSON_Handler(JSON_Response)
            JSON_Output_Response = JSON_Object.Dump_JSON()
            Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
            Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Search Result", Task_ID, Plugin_Name.lower())
            New_JSON_Response = Recursive_Dict_Check(["yandexsearch", "response", "results", "grouping", "group"], JSON_Response)

            if New_JSON_Response:

                for Yandex_Item_Line in New_JSON_Response:

                    try:

                        if Recursive_Dict_Check(["doc", "url"], Yandex_Item_Line):
                            Yandex_Item_Line = Yandex_Item_Line['doc']
                            Yandex_URL = Yandex_Item_Line['url']
                            Title = Recursive_Dict_Check(["title", "#text"], JSON_Response)

                            if Title:
                                Title = f"Yandex | {Title}"

                            else:
                                Title = General.Get_Title(Yandex_URL)
                                Title = f"Yandex | {Title}"

                            if Yandex_URL not in Cached_Data and Yandex_URL not in Data_to_Cache:
                                Yandex_Item_Responses = Common.Request_Handler(Yandex_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://{Domain}")
                                Yandex_Item_Response = Yandex_Item_Responses["Filtered"]
                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Yandex_Item_Response, Yandex_URL, The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output([Main_File, Output_file], Yandex_URL, Title, Plugin_Name.lower())
                                    Data_to_Cache.append(Yandex_URL)

                                else:
                                    logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                    except Exception as e:
                        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")

            else:
                logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - No results found.")

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #17
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:

                if self.Type == "Email":

                    if Common.Regex_Handler(Query, Type=self.Type):
                        Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                        URL = f"https://www.threatcrowd.org/searchApi/v2/email/report/?email={Query}"
                        Response = Common.Request_Handler(URL)
                        Search_Response = Common.Request_Handler(Request_URL)
                        JSON_Object = Common.JSON_Handler(Response)
                        JSON_Response = JSON_Object.To_JSON_Loads()

                        if int(JSON_Response.get("response_code")) != 0:
                            JSON_Output_Response = JSON_Object.Dump_JSON()
                            Permalink = JSON_Response.get("permalink")
                            Permalink_Responses = Common.Request_Handler(
                                URL,
                                Application_JSON_CT=True,
                                Accept_XML=True,
                                Accept_Language_EN_US=True,
                                Filter=True,
                                Host=f"https://www.{self.Domain}")
                            Permalink_Response = Permalink_Responses[
                                "Filtered"]
                            Title = "Threat Crowd | " + General.Get_Title(
                                Permalink, Requests=True
                            ).replace(
                                " | Threatcrowd.org Open Source Threat Intelligence",
                                "")
                            Main_File = General.Main_File_Create(
                                Directory, Local_Plugin_Name,
                                JSON_Output_Response, Query,
                                self.The_File_Extensions["Main"])
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name,
                                Permalink_Response, Query,
                                self.The_File_Extensions["Query"])
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, self.Domain,
                                "Account", self.Task_ID,
                                Local_Plugin_Name.lower())

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], Permalink, Title,
                                    self.Plugin_Name.lower())
                                Data_to_Cache.append(URL)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                )

                        else:
                            logging.info(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results."
                            )

                    else:
                        logging.warning(
                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match query to email regular expression."
                        )

                elif self.Type == "Domain":

                    if Common.Regex_Handler(Query, Type=self.Type):
                        Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                        URL = f"https://www.threatcrowd.org/searchApi/v2/self.Domain/report/?self.Domain={Query}"
                        Response = Common.Request_Handler(URL)
                        JSON_Object = Common.JSON_Handler(Response)
                        JSON_Response = JSON_Object.To_JSON_Loads()

                        if int(JSON_Response.get("response_code")) != 0:
                            JSON_Output_Response = JSON_Object.Dump_JSON()
                            Permalink = JSON_Response.get("permalink")
                            Permalink_Responses = Common.Request_Handler(
                                URL,
                                Application_JSON_CT=True,
                                Accept_XML=True,
                                Accept_Language_EN_US=True,
                                Filter=True,
                                Host=f"https://www.{self.Domain}")
                            Permalink_Response = Permalink_Responses[
                                "Filtered"]
                            Title = "Threat Crowd | " + General.Get_Title(
                                Permalink, Requests=True
                            ).replace(
                                " | Threatcrowd.org Open Source Threat Intelligence",
                                "")
                            Main_File = General.Main_File_Create(
                                Directory, Local_Plugin_Name,
                                JSON_Output_Response, Query,
                                self.The_File_Extensions["Main"])
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name,
                                Permalink_Response, Query,
                                self.The_File_Extensions["Query"])
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, self.Domain,
                                "Domain Information", self.Task_ID,
                                Local_Plugin_Name.lower())

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], Permalink, Title,
                                    self.Plugin_Name.lower())
                                Data_to_Cache.append(URL)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                )

                        else:
                            logging.info(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results."
                            )

                    else:
                        logging.warning(
                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match query to self.Domain regular expression."
                        )

                elif self.Type == "IP Address":

                    if Common.Regex_Handler(Query, Type="IP"):
                        Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                        URL = f"https://www.threatcrowd.org/searchApi/v2/ip/report/?ip={Query}"
                        Response = Common.Request_Handler(URL)
                        JSON_Object = Common.JSON_Handler(Response)
                        JSON_Response = JSON_Object.To_JSON_Loads()

                        if int(JSON_Response.get("response_code")) != 0:
                            JSON_Output_Response = JSON_Object.Dump_JSON()
                            Permalink = JSON_Response.get("permalink")
                            Permalink_Responses = Common.Request_Handler(
                                URL,
                                Application_JSON_CT=True,
                                Accept_XML=True,
                                Accept_Language_EN_US=True,
                                Filter=True,
                                Host=f"https://www.{self.Domain}")
                            Permalink_Response = Permalink_Responses[
                                "Filtered"]
                            Title = "Threat Crowd | " + General.Get_Title(
                                Permalink, Requests=True
                            ).replace(
                                " | Threatcrowd.org Open Source Threat Intelligence",
                                "")
                            Main_File = General.Main_File_Create(
                                Directory, Local_Plugin_Name,
                                JSON_Output_Response, Query,
                                self.The_File_Extensions["Main"])
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name,
                                Permalink_Response, Query,
                                self.The_File_Extensions["Query"])
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, self.Domain,
                                "Domain Information", self.Task_ID,
                                Local_Plugin_Name.lower())

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], Permalink, Title,
                                    self.Plugin_Name.lower())
                                Data_to_Cache.append(URL)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                )

                        else:
                            logging.info(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results."
                            )

                    else:
                        logging.warning(
                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match query to IP address regular expression."
                        )

                elif self.Type == "AV":
                    Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                    URL = f"https://www.threatcrowd.org/searchApi/v2/antivirus/report/?antivirus={Query}"
                    Response = Common.Request_Handler(URL)
                    JSON_Object = Common.JSON_Handler(Response)
                    JSON_Response = JSON_Object.To_JSON_Loads()

                    if int(JSON_Response.get("response_code")) != 0:
                        JSON_Output_Response = JSON_Object.Dump_JSON()
                        Permalink = JSON_Response.get("permalink")
                        Permalink_Responses = Common.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{self.Domain}")
                        Permalink_Response = Permalink_Responses["Filtered"]
                        Title = "Threat Crowd | " + General.Get_Title(
                            Permalink, Requests=True
                        ).replace(
                            " | Threatcrowd.org Open Source Threat Intelligence",
                            "")
                        Main_File = General.Main_File_Create(
                            Directory, Local_Plugin_Name, JSON_Output_Response,
                            Query, self.The_File_Extensions["Main"])
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name,
                            Permalink_Response, Query,
                            self.The_File_Extensions["Query"])
                        Output_Connections = General.Connections(
                            Query, Local_Plugin_Name, self.Domain, "Virus",
                            self.Task_ID, Local_Plugin_Name.lower())

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Permalink, Title,
                                                      self.Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                            )

                    else:
                        logging.info(
                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results."
                        )

                elif self.Type == "Virus Report":
                    Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                    URL = f"https://www.threatcrowd.org/searchApi/v2/file/report/?resource={Query}"
                    Response = Common.Request_Handler(URL)
                    JSON_Object = Common.JSON_Handler(Response)
                    JSON_Response = JSON_Object.To_JSON_Loads()

                    if int(JSON_Response.get("response_code")) != 0:
                        JSON_Output_Response = JSON_Object.Dump_JSON()
                        Permalink = JSON_Response.get("permalink")
                        Permalink_Responses = Common.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{self.Domain}")
                        Permalink_Response = Permalink_Responses["Filtered"]
                        Title = "Threat Crowd | " + General.Get_Title(
                            Permalink, Requests=True
                        ).replace(
                            " | Threatcrowd.org Open Source Threat Intelligence",
                            "")
                        Main_File = General.Main_File_Create(
                            Directory, Local_Plugin_Name, JSON_Output_Response,
                            Query, self.The_File_Extensions["Main"])
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name,
                            Permalink_Response, Query,
                            self.The_File_Extensions["Query"])
                        Output_Connections = General.Connections(
                            Query, Local_Plugin_Name, self.Domain,
                            "Virus Report", self.Task_ID,
                            Local_Plugin_Name.lower())

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Permalink, Title,
                                                      self.Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                            )

                    else:
                        logging.info(
                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Provided query returned no results."
                        )

                else:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid Type provided."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #18
0
def Search(Query_List, Task_ID, Type, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            if Type == "pin":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                Request_URL = f"https://api.{Domain}/v1/pins/{Query}/?access_token=" + Load_Configuration(
                ) + "&fields=id%2Clink%2Cnote%2Curl%2Ccreated_at%2Cmedia%2Coriginal_link%2Cmetadata%2Ccounts%2Ccolor%2Cboard%2Cattribution"
                Search_Response = Common.Request_Handler(Request_URL)
                JSON_Object = Common.JSON_Handler(Search_Response)
                Search_Response = JSON_Object.To_JSON_Loads()

                if Search_Response.get(
                        'message'
                ) != "You have exceeded your rate limit. Try again later.":
                    JSON_Response = JSON_Object.Dump_JSON()
                    Main_File = General.Main_File_Create(
                        Directory, Plugin_Name, JSON_Response, Query,
                        The_File_Extensions["Main"])
                    Result_Title = "Pinterest | " + Search_Response["data"][
                        "metadata"]["link"]["title"]
                    Result_URL = Search_Response["data"]["url"]
                    Search_Result_Response = Common.Request_Handler(Result_URL)

                    if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name,
                            Search_Result_Response, Result_Title,
                            The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections = General.Connections(
                                Query, Local_Plugin_Name, Domain,
                                "Social Media - Media", Task_ID,
                                Local_Plugin_Name.lower())
                            Output_Connections.Output([Main_File, Output_file],
                                                      Result_URL, Result_Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(Result_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                else:
                    logging.warning(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                    )

            elif Type == "board":
                Local_Plugin_Name = Plugin_Name + "-" + Type
                Request_URL = "https://api.pinterest.com/v1/boards/" + Query + "/pins/?access_token=" + Load_Configuration(
                ) + "&fields=id%2Clink%2Cnote%2Curl%2Coriginal_link%2Cmetadata%2Cmedia%2Cimage%2Ccreator%2Ccreated_at%2Ccounts%2Ccolor%2Cboard%2Cattribution&limit=" + str(
                    Limit) + ""
                Search_Response = Common.Request_Handler(Request_URL)
                JSON_Object = Common.JSON_Handler(Search_Response)
                Search_Response = JSON_Object.To_JSON_Loads()

                if Search_Response.get(
                        'message'
                ) != "You have exceeded your rate limit. Try again later.":
                    JSON_Response = JSON_Object.Dump_JSON()
                    Main_File = General.Main_File_Create(
                        Directory, Plugin_Name, JSON_Response, Query,
                        The_File_Extensions["Main"])
                    Output_Connections = General.Connections(
                        Query, Local_Plugin_Name, "pinterest.com",
                        "Social Media - Page", Task_ID,
                        Local_Plugin_Name.lower())
                    Current_Step = 0

                    for Response in Search_Response["data"]:
                        Result_Title = "Pinterest | " + Response["note"]
                        Result_URL = Response["url"]
                        Search_Result_Response = Common.Request_Handler(
                            Result_URL)

                        if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(
                                Limit):
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name,
                                Search_Result_Response, Result_Title,
                                The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], Result_URL,
                                    Result_Title, Plugin_Name.lower())
                                Data_to_Cache.append(Result_URL)
                                Current_Step += 1

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                else:
                    logging.warning(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                    )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #19
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            try:
                Pull_URL = f"https://{Domain}:2096/api/phishing?_where=(url,like,~{Query}~)&_sort=-id&_size={Limit}"
                JSON_Object = Common.JSON_Handler(
                    Common.Request_Handler(Pull_URL))
                Results = JSON_Object.To_JSON_Loads()
                Indented_Results = JSON_Object.Dump_JSON()
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "Phishing", Task_ID,
                    Plugin_Name.lower())
                Main_File = General.Main_File_Create(
                    Directory, Plugin_Name, Indented_Results, Query,
                    The_File_Extensions["Main"])

                for Result in Results:
                    Current_Link = Result["url"]
                    Current_Domain = Current_Link.strip("https://")
                    Current_Domain = Current_Domain.strip("http://")
                    Current_Domain = Current_Domain.strip("www.")
                    Current_Title = Result["title"]

                    try:
                        Current_Result = Common.Request_Handler(
                            Current_Link,
                            Filter=True,
                            Risky_Plugin=True,
                            Host=Current_Link)
                        Current_Result_Filtered = Current_Result["Filtered"]
                        Response_Regex = Common.Regex_Handler(
                            Current_Result,
                            Custom_Regex=r"\<title\>([^\<\>]+)\<\/title\>")
                        Output_file_Query = Query.replace(" ", "-")

                        if Current_Link not in Cached_Data and Current_Link not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Output_file_Query, Plugin_Name,
                                Current_Result_Filtered, Current_Domain,
                                The_File_Extensions["Query"])

                            if Output_file:

                                if Response_Regex:
                                    Current_Title = Response_Regex.group(1)
                                    Current_Title = Current_Title.strip()
                                    Output_Connections.Output(
                                        [Main_File, Output_file], Current_Link,
                                        Current_Title, Plugin_Name.lower())

                                else:

                                    if not "Phishstats" in Current_Title:
                                        Output_Connections.Output(
                                            [Main_File, Output_file],
                                            Current_Link, Current_Title,
                                            Plugin_Name.lower())

                                    else:
                                        Output_Connections.Output(
                                            [Main_File, Output_file],
                                            Current_Link,
                                            General.Get_Title(Current_Link),
                                            Plugin_Name.lower())

                                Data_to_Cache.append(Current_Link)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                    except:
                        logging.warning(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request for result, link may no longer be available."
                        )

            except:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #20
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:

                try:
                    Pull_URL = f"https://{self.Domain}:2096/api/phishing?_where=(url,like,~{Query}~)&_sort=-id&_size={self.Limit}"
                    JSON_Object = Common.JSON_Handler(
                        Common.Request_Handler(Pull_URL))
                    Results = JSON_Object.To_JSON_Loads()
                    Indented_Results = JSON_Object.Dump_JSON()
                    Output_Connections = General.Connections(
                        Query, self.Plugin_Name, self.Domain, self.Result_Type,
                        self.Task_ID, self.Plugin_Name.lower())
                    Main_File = General.Main_File_Create(
                        Directory, self.Plugin_Name, Indented_Results, Query,
                        self.The_File_Extensions["Main"])

                    for Result in Results:
                        Current_Link = Result["url"]
                        Current_Domain = urlparse(Current_Link).netloc
                        Current_Title = Result["title"]

                        try:
                            Response = socket.gethostbyname(Current_Domain)

                        except:
                            logging.info(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to resolve hostname {Current_Domain} to an IP address. Skipping."
                            )
                            Response = None

                        if Response:
                            Current_Result = Common.Request_Handler(
                                Current_Link,
                                Filter=True,
                                Risky_Plugin=True,
                                Host=Current_Link)
                            Current_Result_Filtered = Current_Result[
                                "Filtered"]
                            Response_Regex = Common.Regex_Handler(
                                Current_Result,
                                Custom_Regex=r"\<title\>([^\<\>]+)\<\/title\>")
                            Output_file_Query = Query.replace(" ", "-")

                            if Current_Link not in Cached_Data and Current_Link not in Data_to_Cache:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Output_file_Query,
                                    self.Plugin_Name, Current_Result_Filtered,
                                    Current_Domain,
                                    self.The_File_Extensions["Query"])

                                if Output_file:

                                    if Response_Regex:
                                        Current_Title = Response_Regex.group(1)
                                        Current_Title = Current_Title.strip()
                                        Output_Connections.Output(
                                            [Main_File, Output_file],
                                            Current_Link, Current_Title,
                                            self.Plugin_Name.lower())

                                    else:

                                        if not "Phishstats" in Current_Title:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                Current_Link, Current_Title,
                                                self.Plugin_Name.lower())

                                        else:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                Current_Link,
                                                General.Get_Title(
                                                    Current_Link),
                                                self.Plugin_Name.lower())

                                    Data_to_Cache.append(Current_Link)

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                    )

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to resolve DNS, this link probably isn't live."
                                )

                except:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make request."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #21
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:

                try:

                    if self.Type == "UKBN":
                        Authorization_Key = self.Load_Configuration()

                        if Authorization_Key:
                            Authorization_Key = "Basic " + Authorization_Key.decode('ascii')
                            headers_auth = {"Authorization": Authorization_Key}
                            Main_URL = f'https://api.{self.Domain}/company/{Query}'
                            Response = Common.Request_Handler(Main_URL, Optional_Headers=headers_auth)
                            JSON_Object = Common.JSON_Handler(Response)
                            JSON_Response = JSON_Object.To_JSON_Loads()
                            Indented_JSON_Response = JSON_Object.Dump_JSON()

                            try:
                                Query = str(int(Query))

                                if Response and '{"errors":[{"error":"company-profile-not-found","self.Type":"ch:service"}]}' not in Response:

                                    if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                        Current_Company_Number = str(JSON_Response["company_number"])
                                        Result_URL = f'https://beta.{self.Domain}/company/{Current_Company_Number}'
                                        Result_Responses = Common.Request_Handler(Result_URL, Filter=True, Host=f"https://beta.{self.Domain}")
                                        Result_Response = Result_Responses["Filtered"]
                                        UKCN = str(JSON_Response["company_name"])
                                        Main_Output_File = General.Main_File_Create(Directory, self.Plugin_Name, Indented_JSON_Response, Query, self.The_File_Extensions["Main"])
                                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Result_Response, UKCN, self.The_File_Extensions["Query"])

                                        if Output_file:
                                            Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name)
                                            Output_Connections.Output([Main_Output_File, Output_file], Result_URL, f"UK Business Number {Query}", self.Concat_Plugin_Name)
                                            Data_to_Cache.append(Main_URL)

                                        else:
                                            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                            except:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for UKBN Search.")

                        else:
                            logging.info(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to retrieve API key.")

                    elif self.Type == "UKCN":
                        Authorization_Key = self.Load_Configuration()

                        if Authorization_Key:
                            Authorization_Key = "Basic " + Authorization_Key.decode('ascii')

                            try:
                                Main_URL = f'https://api.{self.Domain}/search/companies?q={Query}&items_per_page={self.Limit}'
                                headers_auth = {"Authorization": Authorization_Key}
                                Response = Common.Request_Handler(Main_URL, Optional_Headers=headers_auth)
                                JSON_Object = Common.JSON_Handler(Response)
                                JSON_Response = JSON_Object.To_JSON_Loads()
                                Indented_JSON_Response = JSON_Object.Dump_JSON()

                                try:

                                    if JSON_Response['total_results'] > 0:
                                        Main_Output_File = General.Main_File_Create(Directory, self.Plugin_Name, Indented_JSON_Response, Query, self.The_File_Extensions["Main"])
                                        Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name)

                                        for Item in JSON_Response['items']:
                                            UKBN_URL = Item['links']['self']
                                            Full_UKBN_URL = f'https://beta.{self.Domain}{str(UKBN_URL)}'
                                            UKBN = UKBN_URL.strip("/company/")

                                            if Full_UKBN_URL not in Cached_Data and Full_UKBN_URL not in Data_to_Cache:
                                                UKCN = Item['title']
                                                Current_Responses = Common.Request_Handler(Full_UKBN_URL, Filter=True, Host=f"https://beta.{self.Domain}")
                                                Current_Response = Current_Responses["Filtered"]
                                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, str(Current_Response), UKCN, self.The_File_Extensions["Query"])

                                                if Output_file:
                                                    Output_Connections.Output([Main_Output_File, Output_file], Full_UKBN_URL, f"UK Business Number {UKBN} for Query {Query}", self.Concat_Plugin_Name)
                                                    Data_to_Cache.append(Full_UKBN_URL)

                                                else:
                                                    logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                                except:
                                    logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Error during UKCN Search, perhaps the rate limit has been exceeded.")

                            except:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for UKCN Search.")

                        else:
                            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to retrieve API key.")

                    else:
                        logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid request self.Type.")

                except:
                    logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make request.")

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #22
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            try:
                DNS_Info = checkdmarc.check_domains(self.Query_List)

                if len(self.Query_List) > 1:

                    for DNS_Item in DNS_Info:
                        Query = DNS_Item['base_domain']
                        Output_Dict = Common.JSON_Handler(DNS_Item).Dump_JSON()
                        Link = "https://www." + Query
                        Title = "DNS Information for " + DNS_Item['base_domain']

                        if Link not in Data_to_Cache and Link not in Cached_Data:
                            Responses = Common.Request_Handler(
                                Link, Filter=True, Host=f"https://www.{Query}")
                            Response = Responses["Filtered"]
                            Main_File = General.Main_File_Create(
                                Directory, self.Plugin_Name, Output_Dict,
                                Query, self.The_File_Extensions["Main"])
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, self.Plugin_Name, Response,
                                Title, self.The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections = General.Connections(
                                    Query, self.Plugin_Name, Query,
                                    self.Result_Type, self.Task_ID,
                                    self.Concat_Plugin_Name)
                                Output_Connections.Output(
                                    [Main_File, Output_file], Link, Title,
                                    self.Concat_Plugin_Name)
                                Data_to_Cache.append(Link)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                )

                else:
                    Query = DNS_Info['base_domain']
                    Output_Dict = Common.JSON_Handler(DNS_Item).Dump_JSON()
                    Link = "https://www." + Query
                    Title = "DNS Information for " + Query

                    if Link not in Data_to_Cache and Link not in Cached_Data:
                        Responses = Common.Request_Handler(
                            Link, Filter=True, Host=f"https://www.{Query}")
                        Response = Responses["Filtered"]
                        Main_File = General.Main_File_Create(
                            Directory, self.Plugin_Name, Output_Dict, Query,
                            self.The_File_Extensions["Main"])
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, self.Plugin_Name, Response,
                            Title, self.The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections = General.Connections(
                                Query, self.Plugin_Name, Query,
                                self.Result_Type, self.Task_ID,
                                self.Concat_Plugin_Name)
                            Output_Connections.Output([Main_File, Output_file],
                                                      Link, Title,
                                                      self.Concat_Plugin_Name)
                            Data_to_Cache.append(Link)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                            )

            except:
                logging.warning(
                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Error retrieving DNS details."
                )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #23
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                vulners_api = vulners.Vulners(api_key=Load_Configuration())
                Search_Response = vulners_api.search(Query,
                                                     limit=int(self.Limit))
                JSON_Response = Common.JSON_Handler(
                    Search_Response).Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, self.Plugin_Name, JSON_Response, Query,
                    self.The_File_Extensions["Main"])
                Output_Connections = General.Connections(
                    Query, self.Plugin_Name, self.Domain, self.Result_Type,
                    self.Task_ID, self.Plugin_Name.lower())

                for Search_Result in Search_Response:

                    if Search_Result[
                            "bulletinFamily"] not in self.Unacceptable_Bulletins:
                        Result_Title = Search_Result["title"]
                        Result_URL = Search_Result["vhref"]
                        Search_Result_Responses = Common.Request_Handler(
                            Result_URL,
                            Filter=True,
                            Host=f"https://{self.Domain}")
                        Search_Result_Response = Search_Result_Responses[
                            "Filtered"]

                        if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, self.Plugin_Name,
                                Search_Result_Response, Result_Title,
                                self.The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], Result_URL,
                                    Result_Title, self.Plugin_Name.lower())
                                Data_to_Cache.append(Result_URL)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                )

                    else:
                        logging.info(
                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Skipping as bulletin type is not supported."
                        )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #24
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                URL_Components = Common.Regex_Handler(Query,
                                                      Type="URL",
                                                      Get_URL_Components=True)

                if URL_Components:
                    BW_Info = builtwith(Query)

                    if BW_Info:
                        BW_JSON_Output = Common.JSON_Handler(
                            BW_Info).Dump_JSON()
                        Query_Domain = URL_Components["Body"] + URL_Components[
                            "Extension"]
                        Title = f"Built With | {Query_Domain}"
                        Main_File = General.Main_File_Create(
                            Directory, self.Plugin_Name, BW_JSON_Output,
                            Query_Domain, self.The_File_Extensions["Main"])
                        BW_Search_URL = f"https://{self.Domain}/{Query_Domain}"
                        Responses = Common.Request_Handler(
                            BW_Search_URL,
                            Filter=True,
                            Host=f"https://{self.Domain}")
                        Response = Responses["Filtered"]
                        Output_Connections = General.Connections(
                            Query, self.Plugin_Name, self.Domain,
                            self.Result_Type, self.Task_ID,
                            self.Plugin_Name.lower())

                        if BW_Search_URL not in Cached_Data and BW_Search_URL not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, self.Plugin_Name, Response,
                                Query, self.The_File_Extensions['Query'])

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], BW_Search_URL,
                                    Title, self.Plugin_Name.lower())
                                Data_to_Cache.append(BW_Search_URL)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                )

                    else:
                        logging.info(
                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to get result for provided query."
                        )

                else:
                    logging.info(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #25
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Shodan_API_Key = self.Load_Configuration()
            API_Session = Shodan(Shodan_API_Key)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:

                try:

                    if self.Type == "Search":
                        Local_Plugin_Name = self.Plugin_Name + "-Search"

                        try:
                            API_Response = API_Session.search(Query)

                        except Exception as e:
                            logging.error(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}."
                            )
                            break

                        JSON_Output_Response = Common.JSON_Handler(
                            API_Response).Dump_JSON()
                        Main_File = General.Main_File_Create(
                            Directory, Local_Plugin_Name, JSON_Output_Response,
                            Query, self.The_File_Extensions["Main"])
                        Output_Connections = General.Connections(
                            Query, Local_Plugin_Name, self.Domain,
                            self.Result_Type, self.Task_ID,
                            self.Plugin_Name.lower())
                        Current_Step = 0

                        for Shodan_Item in API_Response["matches"]:
                            Shodan_Item_Module = Shodan_Item['_shodan'][
                                'module']
                            Shodan_Item_Module = Shodan_Item_Module.replace(
                                '-simple-new', '')

                            if Shodan_Item_Module.startswith("http"):
                                Shodan_Item_Host = ""
                                Shodan_Item_Port = 0

                                if 'http' in Shodan_Item:
                                    Shodan_Item_Host = Shodan_Item['http'][
                                        'host']
                                    Shodan_Item_Response = Shodan_Item['http'][
                                        'html']

                                elif 'ip_str' in Shodan_Item and 'domains' in Shodan_Item and len(
                                        Shodan_Item['domains']) > 0:
                                    Shodan_Item_Host = Shodan_Item['domains'][
                                        0]
                                    Shodan_Item_Response = Shodan_Item['data']

                                elif 'ip_str' in Shodan_Item and 'domains' not in Shodan_Item:
                                    Shodan_Item_Host = Shodan_Item['ip_str']
                                    Shodan_Item_Response = Shodan_Item['data']

                                if Shodan_Item_Host:

                                    if 'port' in Shodan_Item_Host:

                                        if int(Shodan_Item['port']) not in [
                                                80, 443
                                        ]:
                                            Shodan_Item_Port = Shodan_Item[
                                                'port']

                                    if Shodan_Item_Port != 0:
                                        Shodan_Item_URL = f"{Shodan_Item_Module}://{Shodan_Item_Host}:{str(Shodan_Item_Port)}"

                                    else:
                                        Shodan_Item_URL = f"{Shodan_Item_Module}://{Shodan_Item_Host}"

                                    Title = f"Shodan | {str(Shodan_Item_Host)}"

                                    if Shodan_Item_URL not in Cached_Data and Shodan_Item_URL not in Data_to_Cache and Current_Step < int(
                                            self.Limit):
                                        Output_file = General.Create_Query_Results_Output_File(
                                            Directory, Query,
                                            Local_Plugin_Name,
                                            Shodan_Item_Response,
                                            Shodan_Item_Host,
                                            self.The_File_Extensions["Query"])

                                        if Output_file:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                Shodan_Item_URL, Title,
                                                self.Plugin_Name.lower())
                                            Data_to_Cache.append(
                                                Shodan_Item_URL)

                                        else:
                                            logging.warning(
                                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                            )

                                        Current_Step += 1

                    elif self.Type == "Host":
                        Local_Plugin_Name = self.Plugin_Name + "-Host"

                        try:
                            API_Response = API_Session.host(Query)

                        except Exception as e:
                            logging.error(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}."
                            )
                            break

                        JSON_Output_Response = Common.JSON_Handler(
                            API_Response).Dump_JSON()
                        Main_File = General.Main_File_Create(
                            Directory, Local_Plugin_Name, JSON_Output_Response,
                            Query, self.The_File_Extensions["Main"])
                        Output_Connections = General.Connections(
                            Query, Local_Plugin_Name, self.Domain,
                            self.Result_Type, self.Task_ID,
                            self.Plugin_Name.lower())
                        Shodan_URL = f"https://www.{self.Domain}/host/{Query}"
                        Title = f"Shodan | {Query}"

                        if Shodan_URL not in Cached_Data and Shodan_URL not in Data_to_Cache:
                            Shodan_Responses = Common.Request_Handler(
                                Shodan_URL,
                                Application_JSON_CT=True,
                                Accept_XML=True,
                                Accept_Language_EN_US=True,
                                Filter=True,
                                Host=f"https://www.{self.Domain}")
                            Shodan_Response = Shodan_Responses["Filtered"]
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, self.Plugin_Name,
                                Shodan_Response, Query,
                                self.The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], Shodan_URL,
                                    Title, self.Plugin_Name.lower())
                                Data_to_Cache.append(Shodan_URL)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                )

                        else:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - No results found."
                            )

                except:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to complete task."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #26
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                URL_Body = f'https://{self.Domain}'
                Main_URL = URL_Body + '/' + Query.lower().replace(' ', '-')
                Responses = Common.Request_Handler(
                    Main_URL, Filter=True, Host=f"https://www.{self.Domain}")
                Response = Responses["Regular"]
                Filtered_Response = Responses["Filtered"]
                Main_File = General.Main_File_Create(Directory,
                                                     self.Plugin_Name,
                                                     Filtered_Response, Query,
                                                     self.The_File_Extension)
                Regex = Common.Regex_Handler(
                    Response,
                    Custom_Regex=
                    r"\<tr\>\s+\<td\sclass\=\"name\"\>\s+\<a\shref\=\"([\/\d\w\-\+\?\.]+)\"\>([\/\d\w\-\+\?\.\(\)\s\,\;\:\~\`\!\@\#\$\%\^\&\*\[\]\{\}]+)\<\/a\>\s+\<\/td\>",
                    Findall=True)

                if Regex:
                    Current_Step = 0
                    Output_Connections = General.Connections(
                        Query, self.Plugin_Name, self.Domain, self.Result_Type,
                        self.Task_ID, self.Concat_Plugin_Name)

                    for URL, Title in Regex:
                        Item_URL = URL_Body + URL
                        Current_Response = Common.Request_Handler(Item_URL)
                        Current_Item_Regex = Common.Regex_Handler(
                            Current_Response,
                            Custom_Regex=
                            r"\<button\sclass\=\"btn\sbtn\-primary\spassword\"\s+data\-data\=\"([\-\d\w\?\/]+)\"\s+data\-toggle\=\"modal\"\s+data\-target\=\"\#modal\"\s+\>show\sme\!\<\/button\>"
                        )

                        if Current_Item_Regex:

                            try:
                                Detailed_Item_URL = URL_Body + Current_Item_Regex.group(
                                    1)
                                Detailed_Responses = Common.Request_Handler(
                                    Item_URL,
                                    Filter=True,
                                    Host=f"https://www.{self.Domain}")
                                Detailed_Response = Detailed_Responses[
                                    "Regular"]
                                Output_Dict = Common.JSON_Handler(
                                    Detailed_Response).Is_JSON()

                                if JSON_Response:
                                    Output_Response = "<head><title>" + JSON_Response[
                                        "title"] + "</title></head>\n"
                                    Output_Response = Output_Response + JSON_Response[
                                        "data"]

                                else:
                                    Output_Response = Detailed_Responses[
                                        "Filtered"]

                                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                                        self.Limit):
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, self.Plugin_Name,
                                        Output_Response, Title,
                                        self.The_File_Extension)

                                    if Output_file:
                                        Output_Connections.Output(
                                            [Main_File, Output_file], Item_URL,
                                            General.Get_Title(Item_URL),
                                            self.Concat_Plugin_Name)
                                        Data_to_Cache.append(Item_URL)

                                    else:
                                        logging.warning(
                                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                        )

                                    Current_Step += 1

                            except:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to generate output, may have a blank detailed response."
                                )

                        else:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression for current result."
                            )

                else:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression for provided query."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #27
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        IX_Access_Token = Load_Configuration()
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:
            Data = {"term": Query, "buckets": [], "lookuplevel": 0, "maxresults": Limit, "timeout": 0, "datefrom": "", "dateto": "", "sort": 2, "media": 0, "terminate": []}
            IX_Response = Common.Request_Handler(f"https://2.{Domain}/intelligent/search?k={IX_Access_Token}", Method="POST", JSON_Data=Data)
            JSON_Object = Common.JSON_Handler(IX_Response)
            JSON_Response = JSON_Object.To_JSON_Loads()
            JSON_Output_Response = JSON_Object.Dump_JSON()
            Main_File_1 = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query + "-Request-1", The_File_Extensions["Main"])
            

            if "id" in JSON_Response:
                Search_ID = JSON_Response["id"]
                IX_Response = Common.Request_Handler(f"https://2.{Domain}/intelligent/search/result?k={IX_Access_Token}&id={Search_ID}")
                JSON_Object = Common.JSON_Handler(IX_Response)
                JSON_Response = JSON_Object.To_JSON_Loads()
                JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File_2 = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query + "-Request-2", The_File_Extensions["Main"])
                Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Data Leakage", Task_ID, Plugin_Name.lower())

                if "records" in JSON_Response:

                    for IX_Item in JSON_Response["records"]:

                        if "systemid" in IX_Item and "name" in IX_Item:
                            IX_URL = f"https://{Domain}/?did=" + IX_Item['systemid']

                            if IX_Item["name"] != "":
                                Title = f"IntelligenceX Data Leak | " + IX_Item["name"]

                            else:
                                TItle = "IntelligenceX Data Leak | Untitled Document"

                            if IX_URL not in Cached_Data and IX_URL not in Data_to_Cache:
                                IX_Item_Responses = Common.Request_Handler(IX_URL, Filter=True, Host=f"https://{Domain}")
                                IX_Item_Response = IX_Item_Responses["Filtered"]
                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, IX_Item_Response, IX_URL, The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output([Main_File_1, Main_File_2, Output_file], IX_URL, Title, Plugin_Name.lower())
                                    Data_to_Cache.append(IX_URL)

                                else:
                                    logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

            else:
                logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - No results found.")

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #28
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)

            try:
                pyhibp.set_api_key(key=Load_Configuration())

            except:
                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to set API key, make sure it is set in the configuration file.")

            if self.Type == "email":
                Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name)
                Cached_Data = Cached_Data_Object.Get_Cache()

                for Query in self.Query_List:
                    Query_Response = pyhibp.get_pastes(email_address=Query)
                    logging.info(Query_Response)

                    if Query_Response:
                        Current_Domain = Query_Response[0]["Source"]
                        ID = Query_Response[0]["Id"]
                        Link = f"https://www.{Current_Domain}.com/{ID}"
                        JSON_Query_Response = Common.JSON_Handler(Query_Response).Dump_JSON()

                        if Link not in Cached_Data and Link not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, JSON_Query_Response, "email", self.The_File_Extension)

                            if Output_file:
                                Output_Connections = General.Connections(Query, Local_Plugin_Name, self.Domain, self.Result_Type_1, self.Task_ID, Local_Plugin_Name.lower())
                                Output_Connections.Output([Output_file], Link, General.Get_Title(Link), self.Concat_Plugin_Name)
                                Data_to_Cache.append(Link)

                            else:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                Cached_Data_Object.Write_Cache(Data_to_Cache)

            elif self.Type == "breach":
                Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name)
                Cached_Data = Cached_Data_Object.Get_Cache()

                for Query in self.Query_List:
                    Query_Response = pyhibp.get_single_breach(breach_name=Query)

                    if Query_Response:
                        Current_Domain = Query_Response["Domain"]
                        Link = f"https://www.{Current_Domain}.com/"
                        JSON_Query_Response = Common.JSON_Handler(Query_Response).Dump_JSON()

                        if Link not in Cached_Data and Link not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, JSON_Query_Response, "breach", self.The_File_Extension)

                            if Output_file:
                                Output_Connections = General.Connections(Query, Local_Plugin_Name, self.Domain, self.Result_Type_2, self.Task_ID, Local_Plugin_Name.lower())
                                Output_Connections.Output([Output_file], Link, General.Get_Title(Link), self.Concat_Plugin_Name)
                                Data_to_Cache.append(Link)

                            else:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                Cached_Data_Object.Write_Cache(Data_to_Cache)

            elif self.Type == "password":
                Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name)
                Cached_Data = Cached_Data_Object.Get_Cache()

                for Query in self.Query_List:
                    Query_Response = pw.is_password_breached(password=Query)
                    logging.info(Query_Response)

                    if Query_Response:
                        Link = f"https://{self.Domain}/Passwords?{Query}"

                        if Link not in Cached_Data and Link not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, str(Query_Response), "password", ".txt")

                            if Output_file:
                                Output_Connections = General.Connections(Query, Local_Plugin_Name, self.Domain, self.Result_Type_2, self.Task_ID, Local_Plugin_Name.lower())
                                Output_Connections.Output([Output_file], Link, General.Get_Title(Link), self.Concat_Plugin_Name)
                                Data_to_Cache.append(Link)

                            else:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                Cached_Data_Object.Write_Cache(Data_to_Cache)

            elif self.Type == "account":
                Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name)
                Cached_Data = Cached_Data_Object.Get_Cache()

                for Query in self.Query_List:
                    Query_Response = pyhibp.get_account_breaches(account=Query, truncate_response=True)

                    if Query_Response:
                        Current_Step = 0

                        for Response in Query_Response:
                            Current_Response = pyhibp.get_single_breach(breach_name=Response['Name'])
                            JSON_Query_Response = Common.JSON_Handler(Query_Response).Dump_JSON()
                            Link = "https://" + Current_Response['self.Domain']

                            if Current_Response['self.Domain'] not in Cached_Data and Current_Response['self.Domain'] not in Data_to_Cache and Current_Step < int(self.Limit):
                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, JSON_Query_Response, "account", self.The_File_Extension)

                                if Output_file:
                                    Output_Connections = General.Connections(Query, Local_Plugin_Name, Current_Response['self.Domain'], self.Result_Type_1, self.Task_ID, Local_Plugin_Name.lower())
                                    Output_Connections.Output([Output_file], Link, General.Get_Title(Link), self.Concat_Plugin_Name)
                                    Data_to_Cache.append(Current_Response['self.Domain'])

                                else:
                                    logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                                Current_Step += 1

                Cached_Data_Object.Write_Cache(Data_to_Cache)

            else:
                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid Type provided.")

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #29
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Location = Connectors.Load_Location_Configuration()
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            try:
                Request_Query = urllib.parse.quote(Query)
                Main_URL = f"http://{Domain}/search?term={Request_Query}&country={Location}&entity=software&limit={str(Limit)}"
                Response = Common.Request_Handler(Main_URL)

            except:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request, are you connected to the internet?"
                )
                break

            JSON_Object = Common.JSON_Handler(Response)
            JSON_Response = JSON_Object.To_JSON_Loads()
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 JSON_Object.Dump_JSON(),
                                                 Query,
                                                 The_File_Extensions["Main"])

            if 'resultCount' in JSON_Response:

                if JSON_Response['resultCount'] > 0:
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain, "Application", Task_ID,
                        Concat_Plugin_Name)

                    for JSON_Object in JSON_Response['results']:
                        JSON_Object_Responses = Common.Request_Handler(
                            JSON_Object['artistViewUrl'],
                            Filter=True,
                            Host=f"https://{Domain}")
                        JSON_Object_Response = JSON_Object_Responses[
                            "Filtered"]

                        if JSON_Object[
                                'artistViewUrl'] not in Cached_Data and JSON_Object[
                                    'artistViewUrl'] not in Data_to_Cache:
                            Apple_Store_Regex = Common.Regex_Handler(
                                JSON_Object['artistViewUrl'],
                                Custom_Regex=r"https\:\/\/apps\.apple\.com\/" +
                                rf"{Location}" +
                                r"\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?.+")

                            if Apple_Store_Regex:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    JSON_Object_Response,
                                    Apple_Store_Regex.group(1),
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file],
                                        JSON_Object['artistViewUrl'],
                                        General.Get_Title(
                                            JSON_Object['artistViewUrl']),
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(
                                        JSON_Object['artistViewUrl'])

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                else:
                    logging.warning(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value provided, value not greater than 0."
                    )

            else:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #30
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Ebay_API_Key = self.Load_Configuration()
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:

                try:
                    API_Request = Connection(appid=Ebay_API_Key,
                                             config_file=None)
                    API_Response = API_Request.execute('findItemsAdvanced',
                                                       {'keywords': Query})
                    JSON_Output_Response = Common.JSON_Handler(
                        API_Response.dict()).Dump_JSON()
                    JSON_Object = Common.JSON_Handler(API_Response.dict())
                    JSON_Response = JSON_Object.Dump_JSON(Indentation=0,
                                                          Sort=False)
                    JSON_Response = JSON_Object.To_JSON_Loads()
                    Main_File = General.Main_File_Create(
                        Directory, self.Plugin_Name, JSON_Output_Response,
                        Query, self.The_File_Extensions["Main"])

                    if JSON_Response["ack"] == "Success":
                        Output_Connections = General.Connections(
                            Query, self.Plugin_Name, self.Domain,
                            self.Result_Type, self.Task_ID,
                            self.Plugin_Name.lower())
                        Current_Step = 0

                        for JSON_Line in JSON_Response['searchResult']['item']:
                            Ebay_Item_URL = JSON_Line['viewItemURL']
                            Title = "Ebay | " + General.Get_Title(
                                Ebay_Item_URL)

                            if Ebay_Item_URL not in Cached_Data and Ebay_Item_URL not in Data_to_Cache and Current_Step < int(
                                    self.Limit):
                                Ebay_Item_Regex = Common.Regex_Handler(
                                    Ebay_Item_URL,
                                    Custom_Regex=
                                    r"https\:\/\/www\.ebay\.com\/itm\/([\w\d\-]+)\-\/\d+"
                                )
                                Ebay_Item_Responses = Common.Request_Handler(
                                    Ebay_Item_URL,
                                    Application_JSON_CT=True,
                                    Accept_XML=True,
                                    Accept_Language_EN_US=True,
                                    Filter=True,
                                    Host=f"https://www.{self.Domain}")
                                Ebay_Item_Response = Ebay_Item_Responses[
                                    "Filtered"]
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, self.Plugin_Name,
                                    Ebay_Item_Response,
                                    Ebay_Item_Regex.group(1).rstrip("-"),
                                    self.The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file],
                                        Ebay_Item_URL, Title,
                                        self.Plugin_Name.lower())
                                    Data_to_Cache.append(Ebay_Item_URL)

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                    )

                                Current_Step += 1

                    else:
                        logging.warning(
                            f"{Common.Date()} - {self.Logging_Plugin_Name} - No results found."
                        )

                except:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make API call."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")