Ejemplo n.º 1
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                BSB_Search_URL = f"https://www.{self.Domain}/{Query}.html"
                Responses = Common.Request_Handler(
                    BSB_Search_URL,
                    Filter=True,
                    Host=f"https://www.{self.Domain}")
                Response = Responses["Filtered"]
                Error_Regex = Common.Regex_Handler(
                    Response, Custom_Regex=r"Correct\sthe\sfollowing\serrors")
                Output_Connections = General.Connections(
                    Query, self.Plugin_Name, self.Domain, self.Result_Type,
                    self.Task_ID, self.Plugin_Name.lower())

                if not Error_Regex:

                    if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, self.Plugin_Name, Response,
                            Query, self.The_File_Extension)

                        if Output_file:
                            Output_Connections.Output(
                                [Output_file], BSB_Search_URL,
                                General.Get_Title(BSB_Search_URL),
                                self.Plugin_Name.lower())
                            Data_to_Cache.append(BSB_Search_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                            )

                else:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Query returned error, probably does not exist."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Ejemplo n.º 2
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:
            BSB_Search_URL = f"https://www.bsbnumbers.com/{Query}.html"
            Response = requests.get(BSB_Search_URL).text
            Error_Regex = re.search(r"Correct\sthe\sfollowing\serrors",
                                    Response)
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     "bsbnumbers.com",
                                                     "BSB Details", Task_ID,
                                                     Plugin_Name.lower())

            if not Error_Regex:

                if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache:
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, Response, Query,
                        The_File_Extension)

                    if Output_file:
                        Output_Connections.Output(
                            [Output_file], BSB_Search_URL,
                            General.Get_Title(BSB_Search_URL),
                            Plugin_Name.lower())
                        Data_to_Cache.append(BSB_Search_URL)

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Query returned error, probably does not exist."
                )

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 3
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Craigslist_Location = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            Main_URL = f"https://{Craigslist_Location.lower()}.craigslist.org/search/sss?format=rss&query={Query}"
            Craigslist_Response = feedparser.parse(Main_URL)
            Craigslist_Items = Craigslist_Response["items"]
            Current_Step = 0

            for Item in Craigslist_Items:
                Item_URL = Item["link"]

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(Limit):
                    Craigslist_Response = requests.get(Item_URL).text
                    Local_URL = f"https://{Craigslist_Location.lower()}.craigslist.org/"
                    Local_Domain = f"{Craigslist_Location.lower()}.craigslist.org/"
                    Filename = Item_URL.replace(Local_URL, "")
                    Filename = Filename.replace(".html/", "")
                    Filename = Filename.replace(".html", "")
                    Filename = Filename.replace("/", "-")
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Craigslist_Response, Filename, The_File_Extension)

                    if Output_file:
                        Output_Connections = General.Connections(Query, Plugin_Name, Local_Domain, "Search Result", Task_ID, Plugin_Name.lower())
                        Output_Connections.Output([Output_file], Item_URL, General.Get_Title(Item_URL), Plugin_Name.lower())
                        Data_to_Cache.append(Item_URL)

                    else:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                    Current_Step += 1

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 4
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                # Query can be Title or ISBN
                Main_URL = f"http://{self.Domain}/search.php?req={Query}&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def"
                Lib_Gen_Response = Common.Request_Handler(Main_URL)
                Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Lib_Gen_Response, Query, self.The_File_Extension)
                Lib_Gen_Regex = Common.Regex_Handler(Lib_Gen_Response, Custom_Regex=r"book\/index\.php\?md5=[A-Fa-f0-9]{32}", Findall=True)

                if Lib_Gen_Regex:
                    Current_Step = 0

                    for Regex in Lib_Gen_Regex:
                        Item_URL = f"http://{self.Domain}/{Regex}"
                        Title = General.Get_Title(Item_URL).replace("Genesis:", "Genesis |")
                        Lib_Item_Responses = Common.Request_Handler(Item_URL, Filter=True, Host=f"http://{self.Domain}")
                        Lib_Item_Response = Lib_Item_Responses["Filtered"]

                        if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(self.Limit):
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Lib_Item_Response, Regex, self.The_File_Extension)

                            if Output_file:
                                Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name)
                                Output_Connections.Output([Main_File, Output_file], Item_URL, Title, self.Concat_Plugin_Name)
                                Data_to_Cache.append(Item_URL)

                            else:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                            Current_Step += 1

                else:
                    logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.")

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Ejemplo n.º 5
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Location = self.Load_Configuration()
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                Main_URL = f"https://www.{self.Domain}/en-{Location}/search?q={Query}"
                Win_Store_Response = Common.Request_Handler(Main_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True)
                Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Win_Store_Response, Query, self.The_File_Extension)
                Win_Store_Regex = Common.Regex_Handler(Win_Store_Response, Custom_Regex=r"\/en\-au\/p\/([\w\-]+)\/([\w\d]+)", Findall=True)
                Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name)

                if Win_Store_Regex:
                    Current_Step = 0

                    for Regex_Group_1, Regex_Group_2 in Win_Store_Regex:
                        Item_URL = f"https://www.microsoft.com/en-au/p/{Regex_Group_1}/{Regex_Group_2}"
                        Win_Store_Responses = Common.Request_Handler(Item_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}")
                        Win_Store_Response = Win_Store_Responses["Filtered"]
                        Title = "Windows Store | " + General.Get_Title(Item_URL)

                        if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(self.Limit):
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Win_Store_Response, Regex_Group_1, self.The_File_Extension)

                            if Output_file:
                                Output_Connections.Output([Main_File, Output_file], Item_URL, Title, self.Concat_Plugin_Name)
                                Data_to_Cache.append(Item_URL)

                            else:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                            Current_Step += 1

                else:
                    logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.")

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Ejemplo n.º 6
0
def Search(Query_List, Task_ID):
    Data_to_Cache = []
    Cached_Data = []
    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        BSB_Search_URL = "https://www.bsbnumbers.com/" + Query + ".html"
        Response = requests.get(BSB_Search_URL).text
        Error_Regex = re.search(r"Correct\sthe\sfollowing\serrors", Response)

        if not Error_Regex:

            if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache:
                Output_file = General.Create_Query_Results_Output_File(
                    Directory, Query, Plugin_Name, Response, Query,
                    The_File_Extension)

                if Output_file:
                    General.Connections(Output_file, Query, Plugin_Name,
                                        BSB_Search_URL, "bsbnumbers.com",
                                        "Data Leakage", Task_ID,
                                        General.Get_Title(BSB_Search_URL),
                                        Plugin_Name.lower())

                Data_to_Cache.append(BSB_Search_URL)

        else:
            logging.warning(General.Date() +
                            " Query returned error, probably does not exist.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Ejemplo n.º 7
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:
            Response = Common.Request_Handler('https://tpbc.herokuapp.com/search/' + Query.replace(" ", "+") + '/?sort=seeds_desc')
            JSON_Object = Common.JSON_Handler(Response)
            Response = JSON_Object.To_JSON_Loads()
            JSON_Response = JSON_Object.Dump_JSON()
            Output_file = General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, The_File_Extension)

            if Output_file:
                Current_Step = 0
                Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Torrent", Task_ID, Plugin_Name.lower())

                for Search_Result in Response:
                    Result_Title = Search_Result["title"]
                    Result_URL = Search_Result["magnet"]

                    if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(Limit):
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, JSON_Response, Result_Title, The_File_Extension)

                        if Output_file:
                            Output_Connections.Output([Output_file], Result_URL, General.Get_Title(Result_URL), Plugin_Name.lower())
                            Data_to_Cache.append(Result_URL)

                        else:
                            logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                        Current_Step += 1

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 8
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Results = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Reddit_Details = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Limit = General.Get_Limit(kwargs)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            try:
                Reddit_Connection = praw.Reddit(
                    client_id=Reddit_Details[0],
                    client_secret=Reddit_Details[1],
                    user_agent=Reddit_Details[2],
                    username=Reddit_Details[3],
                    password=Reddit_Details[4])
                All_Subreddits = Reddit_Connection.subreddit(Reddit_Details[5])

                for Subreddit in All_Subreddits.search(
                        Query, limit=Limit
                ):  # Limit, subreddit and search to be controlled by the web app.
                    Results.append(Subreddit.url)

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to get results. Are you connected to the internet?"
                )

            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain, "Forum", Task_ID,
                                                     Plugin_Name.lower())

            for Result in Results:

                if Result not in Cached_Data and Result not in Data_to_Cache:

                    try:
                        Reddit_Regex = re.search(
                            "https\:\/\/www\.reddit\.com\/r\/(\w+)\/comments\/(\w+)\/([\w\d]+)\/",
                            Result[0])

                        if Reddit_Regex:
                            Reddit_Responses = General.Request_Handler(
                                Result,
                                Application_JSON_CT=True,
                                Accept_XML=True,
                                Accept_Language_EN_US=True,
                                Filter=True,
                                Host=f"https://www.{Domain}")
                            Reddit_Response = Reddit_Responses["Filtered"]
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name, Reddit_Response,
                                Reddit_Regex.group(3), The_File_Extension)

                            if Output_file:
                                Output_Connections.Output(
                                    [Output_file], Result,
                                    General.Get_Title(Result[0]),
                                    Plugin_Name.lower())
                                Data_to_Cache.append(Result[0])

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                    except:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create file."
                        )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 9
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []
    Results = []

    if int(kwargs["Limit"]) > 0:
        Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Reddit_Details = Load_Configuration()
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:
            Reddit_Connection = praw.Reddit(client_id=Reddit_Details[0], \
                                            client_secret=Reddit_Details[1], \
                                            user_agent=Reddit_Details[2], \
                                            username=Reddit_Details[3], \
                                            password=Reddit_Details[4])

            All_Subreddits = Reddit_Connection.subreddit(Reddit_Details[5])

            for Subreddit in All_Subreddits.search(Query, limit=Limit): # Limit, subreddit and search to be controlled by the web app.
                Current_Result = []
                Current_Result.append(Subreddit.url)
                Current_Result.append(Subreddit.selftext)
                Results.append(Current_Result)

        except:
            logging.warning(General.Date() + " Failed to get results. Are you connected to the internet?")

        for Result in Results:

            if Result[0] not in Cached_Data and Result[0] not in Data_to_Cache:

                try:
                    Reddit_Regex = re.search("https\:\/\/www\.reddit\.com\/r\/(\w+)\/comments\/(\w+)\/([\w\d]+)\/", Result[0])

                    if Reddit_Regex:
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Result[1], Reddit_Regex.group(3), The_File_Extension)

                        if Output_file:
                            General.Connections(Output_file, Query, Plugin_Name, Result[0], "reddit.com", "Data Leakage", Task_ID, General.Get_Title(Result[0]), Plugin_Name.lower())

                except:
                    logging.warning(General.Date() + " Failed to create file.")

                Data_to_Cache.append(Result[0])

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Ejemplo n.º 10
0
def Transaction_Search(Query_List, Task_ID, Type, **kwargs):

    try:
        Local_Plugin_Name = Plugin_Name + "-Transaction-Search"
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Local_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            if Type != "monero":

                if Type == "btc" or Type == "bch":
                    Query_Regex = re.search(r"[\d\w]{64}", Query)

                elif Type == "eth":
                    Query_Regex = re.search(r"(0x[\d\w]{64})", Query)

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Invalid type provided."
                    )

                if Query_Regex:
                    Main_URL = f"https://www.{Domain}/{Type}/tx/{Query}"
                    Main_Response = General.Request_Handler(Main_URL)

                    if Type == "btc":
                        Address_Regex = re.findall(
                            r"\/btc\/address\/([\d\w]{26,34})", Main_Response)

                    elif Type == "bch":
                        Address_Regex = re.findall(r"([\d\w]{42})",
                                                   Main_Response)

                    elif Type == "eth":
                        Address_Regex = re.findall(r"(0x[\w\d]{40})",
                                                   Main_Response)

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Invalid type provided."
                        )

                    if Address_Regex:
                        Current_Step = 0
                        Output_Connections = General.Connections(
                            Query, Local_Plugin_Name, Domain,
                            "Blockchain Address", Task_ID, Plugin_Name.lower())

                        for Transaction in Address_Regex:
                            Query_URL = f"https://www.{Domain}/{Type}/address/{Transaction}"

                            if Query_URL not in Cached_Data and Query_URL not in Data_to_Cache and Current_Step < int(
                                    Limit):
                                Transaction_Responses = General.Request_Handler(
                                    Query_URL,
                                    Filter=True,
                                    Host=f"https://www.{Domain}")
                                Transaction_Response = Transaction_Responses[
                                    "Filtered"]
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Local_Plugin_Name,
                                    Transaction_Response, Transaction,
                                    The_File_Extension)

                                if Output_file:
                                    Output_Connections.Output(
                                        [Output_file], Query_URL,
                                        General.Get_Title(Query_URL),
                                        Plugin_Name.lower())
                                    Data_to_Cache.append(Query_URL)

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                                Current_Step += 1

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                        )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                    )

        else:
            Alt_Domain = "localmonero.co"
            Query_URL = f"https://{Alt_Domain}/blocks/search/{Query}"
            Transaction_Response = General.Request_Handler(Query_URL)

            if "Whoops, looks like something went wrong." not in Transaction_Response and Query_URL not in Cached_Data and Query_URL not in Data_to_Cache:
                Transaction_Responses = General.Request_Handler(
                    Query_URL, Filter=True, Host=f"https://{Alt_Domain}")
                Transaction_Response = Transaction_Responses["Filtered"]
                Output_file = General.Create_Query_Results_Output_File(
                    Directory, Query, Local_Plugin_Name, Transaction_Response,
                    Query, The_File_Extension)

                if Output_file:
                    Output_Connections = General.Connections(
                        Query, Local_Plugin_Name, Alt_Domain,
                        "Blockchain Transaction", Task_ID, Plugin_Name.lower())
                    Output_Connections.Output(
                        [Output_file], Query_URL,
                        General.Get_Title_Requests_Module(Query_URL),
                        Plugin_Name.lower())
                    Data_to_Cache.append(Query_URL)

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                    )

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name,
                                "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name,
                                "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 11
0
def Search(Query_List, Task_ID, Type_of_Query, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    try:

        if kwargs.get('Limit'):

            if int(kwargs["Limit"]) > 0:
                Limit = kwargs["Limit"]

        else:
            Limit = 10

        Directory = General.Make_Directory(Concat_Plugin_Name)

        logger = logging.getLogger()
        logger.setLevel(logging.INFO)

        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)

        try:
            pyhibp.set_api_key(key=Load_Configuration())

        except:
            logging.warning(General.Date() + " Failed to set API key, make sure it is set in the configuration file.")

        Query_List = General.Convert_to_List(Query_List)

        if Type_of_Query == "email":
            Local_Plugin_Name = Plugin_Name + "-" + Type_of_Query
            Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)

            if not Cached_Data:
                Cached_Data = []

            for Query in Query_List:
                Query_Response = pyhibp.get_pastes(email_address=Query)
                logging.info(Query_Response)

                if Query_Response:
                    Domain = Query_Response[0]["Source"]
                    ID = Query_Response[0]["Id"]
                    Link = "https://www." + Domain + ".com/" + ID
                    JSON_Query_Response = json.dumps(Query_Response, indent=4, sort_keys=True)

                    if Link not in Cached_Data and Link not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, JSON_Query_Response, "email", The_File_Extension)

                        if Output_file:
                            General.Connections(Output_file, Query, Local_Plugin_Name, Link, "haveibeenpwned.com", "Data Leakage", Task_ID, General.Get_Title(Link), Local_Plugin_Name.lower())

                        Data_to_Cache.append(Link)

            if Cached_Data:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a")

            else:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")

        elif Type_of_Query == "breach":
            Local_Plugin_Name = Plugin_Name + "-" + Type_of_Query
            Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)

            if not Cached_Data:
                Cached_Data = []

            for Query in Query_List:
                Query_Response = pyhibp.get_single_breach(breach_name=Query)

                if Query_Response:
                    Domain = Query_Response["Domain"]
                    Link = "https://www." + Domain + ".com/"
                    JSON_Query_Response = json.dumps(Query_Response, indent=4, sort_keys=True)

                    if Link not in Cached_Data and Link not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, JSON_Query_Response, "breach", The_File_Extension)

                        if Output_file:
                            General.Connections(Output_file, Query, Local_Plugin_Name, Link, "haveibeenpwned.com", "Data Leakage", Task_ID, General.Get_Title(Link), Local_Plugin_Name.lower())

                        Data_to_Cache.append(Link)

            if Cached_Data:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a")

            else:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")

        elif Type_of_Query == "password":
            Local_Plugin_Name = Plugin_Name + "-" + Type_of_Query
            Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)

            if not Cached_Data:
                Cached_Data = []

            for Query in Query_List:
                Query_Response = pw.is_password_breached(password=Query)
                logging.info(Query_Response)

                if Query_Response:
                    Link = "https://haveibeenpwned.com/Passwords?" + Query

                    if Link not in Cached_Data and Link not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, str(Query_Response), "password", ".txt")

                        if Output_file:
                            General.Connections(Output_file, Query, Local_Plugin_Name, Link, "haveibeenpwned.com", "Data Leakage", Task_ID, General.Get_Title(Link), Local_Plugin_Name.lower())

                        Data_to_Cache.append(Link)

            if Cached_Data:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a")

            else:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")

        elif Type_of_Query == "account":
            Local_Plugin_Name = Plugin_Name + "-" + Type_of_Query
            Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)

            if not Cached_Data:
                Cached_Data = []

            for Query in Query_List:
                Query_Response = pyhibp.get_account_breaches(account=Query, truncate_response=True)

                if Query_Response:
                    Current_Step = 0

                    for Response in Query_Response:
                        Current_Response = pyhibp.get_single_breach(breach_name=Response['Name'])
                        JSON_Query_Response = json.dumps(Current_Response, indent=4, sort_keys=True)
                        Link = "https://" + Current_Response['Domain']

                        if Current_Response['Domain'] not in Cached_Data and Current_Response['Domain'] not in Data_to_Cache and Current_Step < int(Limit):
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, JSON_Query_Response, "account", The_File_Extension)

                            if Output_file:
                                General.Connections(Output_file, Query, Local_Plugin_Name, Link, Current_Response['Domain'], "Data Leakage", Task_ID, General.Get_Title(Link), Local_Plugin_Name.lower())

                            Data_to_Cache.append(Current_Response['Domain'])
                            Current_Step += 1

            if Cached_Data:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a")

            else:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")

        else:
            logging.warning(General.Date() + " Invalid type provided.")

    except:
        logging.warning(General.Date() + " Execution error.")
Ejemplo n.º 12
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Craigslist_Location = Load_Configuration()
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        Main_URL = "https://" + Craigslist_Location.lower(
        ) + ".craigslist.org/search/sss?format=rss&query=" + Query
        Craigslist_Response = feedparser.parse(Main_URL)
        Craigslist_Items = Craigslist_Response["items"]
        Current_Step = 0

        for Item in Craigslist_Items:
            Item_URL = Item["link"]

            if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                    Limit):
                Craigslist_Response = requests.get(Item_URL).text
                Local_URL = "https://" + Craigslist_Location.lower(
                ) + ".craigslist.org/"
                Local_Domain = Craigslist_Location.lower() + ".craigslist.org/"
                Filename = Item_URL.replace(Local_URL, "")
                Filename = Filename.replace(".html/", "")
                Filename = Filename.replace(".html", "")
                Filename = Filename.replace("/", "-")
                Output_file = General.Create_Query_Results_Output_File(
                    Directory, Query, Plugin_Name, Craigslist_Response,
                    Filename, The_File_Extension)

                if Output_file:
                    General.Connections(Output_file, Query, Plugin_Name,
                                        Item_URL, Local_Domain, "Data Leakage",
                                        Task_ID, General.Get_Title(Item_URL),
                                        Plugin_Name.lower())

                Data_to_Cache.append(Item_URL)
                Current_Step += 1

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Ejemplo n.º 13
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Ebay_API_Key = Load_Configuration()
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:
            API_Request = Connection(appid=Ebay_API_Key, config_file=None)
            API_Response = API_Request.execute('findItemsAdvanced',
                                               {'keywords': Query})
            JSON_Output_Response = json.dumps(API_Response.dict(),
                                              indent=4,
                                              sort_keys=True)
            JSON_Response = json.dumps(API_Response.dict())
            JSON_Response = json.loads(JSON_Response)
            General.Main_File_Create(Directory, Plugin_Name,
                                     JSON_Output_Response, Query, ".json")

            if JSON_Response["ack"] == "Success":
                Current_Step = 0

                for JSON_Line in JSON_Response['searchResult']['item']:
                    Ebay_Item_URL = JSON_Line['viewItemURL']

                    if Ebay_Item_URL not in Cached_Data and Ebay_Item_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Ebay_Item_Regex = re.search(
                            r"http\:\/\/www\.ebay\.com\/itm\/([\w\d\-]+)\-\/\d+",
                            Ebay_Item_URL)
                        headers = {
                            'Content-Type': 'application/json',
                            'User-Agent':
                            'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
                            'Accept':
                            'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                            'Accept-Language': 'en-US,en;q=0.5'
                        }
                        Ebay_Item_Response = requests.get(Ebay_Item_URL,
                                                          headers=headers).text
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Ebay_Item_Response,
                            Ebay_Item_Regex.group(1), The_File_Extension)

                        if Output_file:
                            General.Connections(
                                Output_file, Query, Plugin_Name, Ebay_Item_URL,
                                "ebay.com", "Data Leakage", Task_ID,
                                General.Get_Title(Ebay_Item_URL),
                                Plugin_Name.lower())

                        Data_to_Cache.append(Ebay_Item_URL)
                        Current_Step += 1

            else:
                logging.warning(General.Date() + " No results found.")

        except:
            logging.info(General.Date() + " Failed to make API call.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Ejemplo n.º 14
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

        else:
            Limit = 10

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    try:
        Flickr_Details = Load_Configuration()
        flickr_api.set_keys(api_key=Flickr_Details[0],
                            api_secret=Flickr_Details[1])

    except:
        logging.info(General.Date() + " - " + __name__.strip('plugins.') +
                     " - Failed to establish API identity.")

    for Query in Query_List:
        Email_Regex = re.search(r"[^@]+@[^\.]+\..+", Query)

        if Email_Regex:

            try:
                User = flickr_api.Person.findByEmail(Query)
                Photos = User.getPhotos()
                General.Main_File_Create(Directory, Plugin_Name, Photos, Query,
                                         ".txt")
                Output_Connections = General.Connections(
                    Query, Plugin_Name, "flickr.com", "Data Leakage", Task_ID,
                    Plugin_Name.lower())
                Current_Step = 0

                for Photo in Photos:
                    Photo_URL = "https://www.flickr.com/photos/" + Query + "/" + Photo[
                        "id"]

                    if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        headers = {
                            'Content-Type': 'application/json',
                            'User-Agent':
                            'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
                            'Accept':
                            'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                            'Accept-Language': 'en-US,en;q=0.5'
                        }
                        Photo_Response = requests.get(Photo_URL,
                                                      headers=headers).text
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Photo_Response,
                            Photo, The_File_Extension)

                        if Output_file:
                            Output_Connections.Output(
                                Output_file, Photo_URL,
                                General.Get_Title(Photo_URL))

                        Data_to_Cache.append(Photo_URL)
                        Current_Step += 1

            except:
                logging.info(General.Date() + " - " +
                             __name__.strip('plugins.') +
                             " - Failed to make API call.")

        else:

            try:
                User = flickr_api.Person.findByUserName(Query)
                Photos = User.getPhotos()
                General.Main_File_Create(Directory, Plugin_Name, Photos, Query,
                                         ".txt")
                Output_Connections = General.Connections(
                    Query, Plugin_Name, "flickr.com", "Data Leakage", Task_ID,
                    Plugin_Name.lower())
                Current_Step = 0

                for Photo in Photos:
                    Photo_URL = "https://www.flickr.com/photos/" + Query + "/" + Photo[
                        "id"]

                    if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        headers = {
                            'Content-Type': 'application/json',
                            'User-Agent':
                            'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
                            'Accept':
                            'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                            'Accept-Language': 'en-US,en;q=0.5'
                        }
                        Photo_Response = requests.get(Photo_URL,
                                                      headers=headers).text
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Photo_Response,
                            str(Photo['id']), The_File_Extension)

                        if Output_file:
                            Output_Connections.Output(
                                Output_file, Photo_URL,
                                General.Get_Title(Photo_URL))

                        Data_to_Cache.append(Photo_URL)
                        Current_Step += 1

            except:
                logging.info(General.Date() + " - " +
                             __name__.strip('plugins.') +
                             " - Failed to make API call.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Ejemplo n.º 15
0
def Search(Query_List, Task_ID, Type, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            if Type == "User":
                from instagramy import InstagramUser
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = InstagramUser(Query)
                JSON_Object = Common.JSON_Handler(vars(CSE_Response))
                CSE_JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])

                if not CSE_Response.is_private:
                    Posts = CSE_Response.posts
                    Output_Connections = General.Connections(
                        Query, Local_Plugin_Name, Domain,
                        "Social Media - Person", Task_ID,
                        Local_Plugin_Name.lower())
                    Current_Step = 0

                    for Post in Posts:
                        URL = Post['post_url']
                        Shortcode = URL.replace(f"https://www.{Domain}/p/",
                                                "").replace("/", "")
                        Title = "IG | " + General.Get_Title(URL, Requests=True)

                        if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                                Limit):
                            Responses = Common.Request_Handler(
                                URL,
                                Application_JSON_CT=True,
                                Accept_XML=True,
                                Accept_Language_EN_US=True,
                                Filter=True,
                                Host=f"https://www.{Domain}")
                            Response = Responses["Filtered"]
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name, Response,
                                Shortcode, The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], URL, Title,
                                    Plugin_Name.lower())
                                Data_to_Cache.append(URL)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                            Current_Step += 1

                else:
                    logging.info(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - The provided user's profile is private and cannot be scraped."
                    )

            elif Type == "Tag":
                from instagramy import InstagramHashTag
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = InstagramHashTag(Query)
                JSON_Object = Common.JSON_Handler(vars(CSE_Response))
                CSE_JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])
                Posts = vars(
                    CSE_Response)['tag_data']['edge_hashtag_to_media']['edges']
                Output_Connections = General.Connections(
                    Query, Local_Plugin_Name, Domain, "Social Media - Person",
                    Task_ID, Local_Plugin_Name.lower())
                Current_Step = 0

                for Post in Posts:
                    Shortcode = Post['node']['shortcode']
                    URL = f"https://www.{Domain}/p/{Shortcode}/"
                    Title = "IG | " + General.Get_Title(URL, Requests=True)

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Responses = Common.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Response = Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name, Response,
                            Shortcode, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            elif Type == "Post":
                from instagramy import InstagramPost
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = InstagramPost(Query)
                JSON_Object = Common.JSON_Handler(vars(CSE_Response))
                CSE_JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])
                Output_Connections = General.Connections(
                    Query, Local_Plugin_Name, Domain, "Social Media - Place",
                    Task_ID, Local_Plugin_Name.lower())
                URL = CSE_Response.url
                Shortcode = URL.replace(f"https://www.{Domain}/p/",
                                        "").replace("/", "")
                Title = "IG | " + General.Get_Title(URL, Requests=True)

                if URL not in Cached_Data and URL not in Data_to_Cache:
                    Responses = Common.Request_Handler(
                        URL,
                        Application_JSON_CT=True,
                        Accept_XML=True,
                        Accept_Language_EN_US=True,
                        Filter=True,
                        Host=f"https://www.{Domain}")
                    Response = Responses["Filtered"]
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Local_Plugin_Name, Response,
                        Shortcode, The_File_Extensions["Query"])

                    if Output_file:
                        Output_Connections.Output([Main_File, Output_file],
                                                  URL, Title,
                                                  Plugin_Name.lower())
                        Data_to_Cache.append(URL)

                    else:
                        logging.warning(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

            else:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid type provided."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 16
0
def Search(Query_List, Task_ID):
    Data_to_Cache = []
    Cached_Data = []
    Configuration_Details = Load_Configuration()
    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        if Configuration_Details[1].lower() == "true":
            Request = 'https://api.certspotter.com/v1/issuances?domain=' + Query + '&include_subdomains=true&expand=dns_names&expand=issuer&expand=cert'
            Response = requests.get(Request,
                                    auth=(Configuration_Details[0], '')).text

        else:
            Request = 'https://api.certspotter.com/v1/issuances?domain=' + Query + '&expand=dns_names&expand=issuer&expand=cert'
            Response = requests.get(Request,
                                    auth=(Configuration_Details[0], '')).text

        JSON_Response = json.loads(Response)

        if 'exists' not in JSON_Response:

            if JSON_Response:

                if Request not in Cached_Data and Request not in Data_to_Cache:

                    try:
                        SSLMate_Regex = re.search(
                            "([\w\d]+)\.[\w]{2,3}(\.[\w]{2,3})?(\.[\w]{2,3})?",
                            Query)

                        if SSLMate_Regex:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name,
                                json.dumps(JSON_Response,
                                           indent=4,
                                           sort_keys=True),
                                SSLMate_Regex.group(1), The_File_Extension)

                            if Output_file:
                                Output_Connections = General.Connections(
                                    Query, Plugin_Name,
                                    "sslmate.com", "Domain Spoof", Task_ID,
                                    Plugin_Name.lower())
                                Output_Connections.Output(
                                    Output_file, Request,
                                    General.Get_Title(Request))

                        else:
                            logging.warning(
                                General.Date() + " - " +
                                __name__.strip('plugins.') +
                                " - Failed to match regular expression.")

                    except:
                        logging.warning(General.Date() + " - " +
                                        __name__.strip('plugins.') +
                                        " - Failed to create file.")

                    Data_to_Cache.append(Request)

            else:
                logging.warning(General.Date() + " - " +
                                __name__.strip('plugins.') + " - No response.")

        else:
            logging.warning(General.Date() + " - " +
                            __name__.strip('plugins.') +
                            " - Query does not exist.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Ejemplo n.º 17
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        try:
            Flickr_Details = Load_Configuration()
            flickr_api.set_keys(api_key=Flickr_Details[0],
                                api_secret=Flickr_Details[1])

        except:
            logging.info(
                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to establish API identity."
            )

        for Query in Query_List:
            Email_Regex = re.search(r"[^@]+@[^\.]+\..+", Query)

            if Email_Regex:

                try:
                    User = flickr_api.Person.findByEmail(Query)
                    Photos = User.getPhotos()

                    if Photos:
                        Main_File = General.Main_File_Create(
                            Directory, Plugin_Name, Convert_to_JSON(Photos),
                            Query, The_File_Extensions["Main"])
                        Output_Connections = General.Connections(
                            Query, Plugin_Name,
                            "flickr.com", "Social Media - Media", Task_ID,
                            Plugin_Name.lower())
                        Current_Step = 0

                        for Photo in Photos:
                            Photo_URL = f"https://www.flickr.com/photos/{Query}/{Photo['id']}"

                            if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int(
                                    Limit):
                                Photo_Response = requests.get(
                                    Photo_URL, headers=headers).text
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    Photo_Response, Photo,
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file], Photo_URL,
                                        General.Get_Title(Photo_URL),
                                        Plugin_Name.lower())
                                    Data_to_Cache.append(Photo_URL)

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                                Current_Step += 1

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - No photos found."
                        )

                except:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make API call."
                    )

            else:

                try:
                    User = flickr_api.Person.findByUserName(Query)
                    Photos = User.getPhotos()

                    if Photos:
                        Main_File = General.Main_File_Create(
                            Directory, Plugin_Name, Convert_to_JSON(Photos),
                            Query, The_File_Extensions["Main"])
                        Output_Connections = General.Connections(
                            Query, Plugin_Name, "flickr.com", "Data Leakage",
                            Task_ID, Plugin_Name.lower())
                        Current_Step = 0

                        for Photo in Photos:
                            Photo_URL = f"https://www.flickr.com/photos/{Query}/{Photo['id']}"

                            if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int(
                                    Limit):
                                Photo_Response = requests.get(
                                    Photo_URL, headers=headers).text
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    Photo_Response, str(Photo['id']),
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file], Photo_URL,
                                        General.Get_Title(Photo_URL),
                                        Plugin_Name.lower())
                                    Data_to_Cache.append(Photo_URL)

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                                Current_Step += 1

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - No photos found."
                        )

                except:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make API call."
                    )

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, Type, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:

            if Type == "ABN":
                Main_URL = 'https://abr.business.gov.au/ABN/View?id=' + Query
                Response = requests.get(Main_URL).text

                try:

                    if 'Error searching ABN Lookup' not in Response:
                        Query = str(int(Query))

                        if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name, Response,
                                General.Get_Title(Main_URL),
                                The_File_Extension)

                            if Output_file:
                                General.Connections(
                                    Output_file, Query, Plugin_Name, Main_URL,
                                    "abr.business.gov.au",
                                    "Data Leakage", Task_ID,
                                    General.Get_Title(Main_URL), Plugin_Name)
                                Data_to_Cache.append(Main_URL)

                except:
                    logging.warning(General.Date() +
                                    " Invalid query provided for ABN Search.")

            elif Type == "ACN":
                Main_URL = 'https://abr.business.gov.au/Search/Run'
                Data = {
                    'SearchParameters.SearchText':
                    Query,
                    'SearchParameters.AllNames':
                    'true',
                    'ctl00%24ContentPagePlaceholder%24SearchBox%24MainSearchButton':
                    'Search'
                }
                Response = requests.post(Main_URL, data=Data).text

                if kwargs.get('Limit'):

                    if int(kwargs["Limit"]) > 0:
                        Limit = kwargs["Limit"]

                else:
                    Limit = 10

                try:
                    ACN_Regex = re.search(r".*[a-zA-Z].*", Query)

                    if ACN_Regex:
                        General.Main_File_Create(Directory, Plugin_Name,
                                                 Response, Query,
                                                 The_File_Extension)
                        Current_Step = 0
                        ABNs_Regex = re.findall(
                            r"\<input\sid\=\"Results\_NameItems\_\d+\_\_Compressed\"\sname\=\"Results\.NameItems\[\d+\]\.Compressed\"\stype\=\"hidden\"\svalue\=\"(\d{11})\,\d{2}\s\d{3}\s\d{3}\s\d{3}\,0000000001\,Active\,active\,([\d\w\s\&\-\_\.]+)\,Current\,",
                            Response)

                        if ABNs_Regex:

                            for ABN_URL, ACN in ABNs_Regex:
                                Full_ABN_URL = 'https://abr.business.gov.au/ABN/View?abn=' + ABN_URL

                                if Full_ABN_URL not in Cached_Data and Full_ABN_URL not in Data_to_Cache and Current_Step < int(
                                        Limit):
                                    ACN = ACN.rstrip()
                                    Current_Response = requests.get(
                                        Full_ABN_URL).text
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, Plugin_Name,
                                        str(Current_Response),
                                        ACN.replace(' ', '-'),
                                        The_File_Extension)

                                    if Output_file:
                                        General.Connections(
                                            Output_file, Query, Plugin_Name,
                                            Full_ABN_URL,
                                            "abr.business.gov.au",
                                            "Data Leakage", Task_ID,
                                            General.Get_Title(Full_ABN_URL),
                                            Plugin_Name)
                                        Data_to_Cache.append(Full_ABN_URL)
                                        Current_Step += 1

                        else:
                            logging.warning(
                                General.Date() +
                                " Response did not match regular expression.")

                    else:
                        logging.warning(
                            General.Date() +
                            " Query did not match regular expression.")

                except:
                    logging.warning(General.Date() +
                                    " Invalid query provided for ACN Search.")

            else:
                logging.warning(General.Date() + " Invalid request type.")

        except:
            logging.warning(General.Date() + " Failed to make request.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Ejemplo n.º 19
0
def Search(Query_List, Task_ID, Type, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:

            if Type == "NZBN":
                Main_URL = 'https://app.companiesoffice.govt.nz/companies/app/ui/pages/companies/search?q=' + Query + '&entityTypes=ALL&entityStatusGroups=ALL&incorpFrom=&incorpTo=&addressTypes=ALL&addressKeyword=&start=0&limit=1&sf=&sd=&advancedPanel=true&mode=advanced#results'
                Response = requests.get(Main_URL).text

                try:

                    if 'An error has occurred and the requested action cannot be performed.' not in Response:
                        Query = str(int(Query))

                        if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Response, General.Get_Title(Main_URL), The_File_Extension)

                            if Output_file:
                                General.Connections(Output_file, Query, Plugin_Name, Main_URL, "app.companiesoffice.govt.nz", "Data Leakage", Task_ID, General.Get_Title(Main_URL), Plugin_Name)
                                Data_to_Cache.append(Main_URL)

                except:
                    logging.warning(General.Date() + " Invalid query provided for NZBN Search.")

            elif Type == "NZCN":

                if kwargs.get('Limit'):

                    if int(kwargs["Limit"]) > 0:
                        Limit = kwargs["Limit"]

                else:
                    Limit = 10

                try:
                    Main_URL = 'https://app.companiesoffice.govt.nz/companies/app/ui/pages/companies/search?q=' + urllib.parse.quote(
                        Query) + '&entityTypes=ALL&entityStatusGroups=ALL&incorpFrom=&incorpTo=&addressTypes=ALL&addressKeyword=&start=0&limit=' + str(
                        Limit) + '&sf=&sd=&advancedPanel=true&mode=advanced#results'
                    Response = requests.get(Main_URL).text
                    NZCN_Regex = re.search(r".*[a-zA-Z].*", Query)

                    if NZCN_Regex:
                        General.Main_File_Create(Directory, Plugin_Name, Response, Query, The_File_Extension)
                        NZBNs_Regex = re.findall(r"\<span\sclass\=\"entityName\"\>([\w\d\s\-\_\&\|\!\@\#\$\%\^\*\(\)\.\,]+)\<\/span\>\s<span\sclass\=\"entityInfo\"\>\((\d{6})\)\s\(NZBN\:\s(\d{13})\)", Response)

                        if NZBNs_Regex:

                            for NZCN, NZ_ID, NZBN_URL in NZBNs_Regex:
                                print(NZBN_URL, NZ_ID, NZCN)
                                Full_NZBN_URL = 'https://app.companiesoffice.govt.nz/companies/app/ui/pages/companies/' + NZ_ID + '?backurl=H4sIAAAAAAAAAEXLuwrCQBCF4bfZNtHESIpBbLQwhWBeYNgddSF7cWai5O2NGLH7zwenyHgjKWwKGaOfSwjZ3ncPaOt1W9bbsmqaamMoqtepnzIJ7Ltu2RdFHeXIacxf9tEmzgdOAZbuExh0jknk%2F17gRNMrsQMjiqxQmsEHr7Aycp3NfY5PjJbcGSMNoDySCckR%2FPwNLgXMiL4AAAA%3D'

                                if Full_NZBN_URL not in Cached_Data and Full_NZBN_URL not in Data_to_Cache:
                                    Current_Response = requests.get(Full_NZBN_URL).text
                                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, str(Current_Response), NZCN.replace(' ', '-'), The_File_Extension)

                                    if Output_file:
                                        General.Connections(Output_file, Query, Plugin_Name, Full_NZBN_URL, "app.companiesoffice.govt.nz", "Data Leakage", Task_ID, General.Get_Title(Full_NZBN_URL), Plugin_Name)
                                        Data_to_Cache.append(Full_NZBN_URL)

                        else:
                            logging.warning(General.Date() + " Response did not match regular expression.")

                    else:
                        logging.warning(General.Date() + " Query did not match regular expression.")

                except:
                    logging.warning(General.Date() + " Invalid query provided for NZCN Search.")

            else:
                logging.warning(General.Date() + " Invalid request type.")

        except:
            logging.warning(General.Date() + " Failed to make request.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, Type, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        if Type == "CIK":
            Main_URL = 'https://www.sec.gov/cgi-bin/browse-edgar?action=getcompany&CIK=' + Query + '&owner=exclude&count=40&hidefilings=0'
            Response = requests.get(Main_URL).text

            try:

                if 'No matching CIK.' not in Response:
                    Query = str(int(Query))

                    if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Response,
                            General.Get_Title(Main_URL), The_File_Extension)

                        if Output_file:
                            General.Connections(Output_file, Query,
                                                Plugin_Name, Main_URL,
                                                "sec.gov", "Data Leakage",
                                                Task_ID,
                                                General.Get_Title(Main_URL),
                                                Plugin_Name)
                            Data_to_Cache.append(Main_URL)

            except:
                logging.info(
                    str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
                    + " Invalid query provided for CIK Search.")

        elif Type == "ACN":
            Main_URL = 'https://www.sec.gov/cgi-bin/browse-edgar?company=' + Query + '&owner=exclude&action=getcompany'
            Response = requests.get(Main_URL).text

            if kwargs.get('Limit'):

                if int(kwargs["Limit"]) > 0:
                    Limit = kwargs["Limit"]

            else:
                Limit = 10

            try:
                ACN = re.search(r".*[a-zA-Z].*", Query)

                if ACN:
                    General.Main_File_Create(Directory, Plugin_Name, Response,
                                             Query, The_File_Extension)
                    Current_Step = 0
                    CIKs_Regex = re.findall(
                        r"(\d{10})\<\/a\>\<\/td\>\s+\<td\sscope\=\"row\"\>(.*\S.*)\<\/td\>",
                        Response)

                    if CIKs_Regex:

                        for CIK_URL, ACN in CIKs_Regex:
                            Full_CIK_URL = 'https://www.sec.gov/cgi-bin/browse-edgar?action=getcompany&CIK=' + CIK_URL + '&owner=exclude&count=40&hidefilings=0'

                            if Full_CIK_URL not in Cached_Data and Full_CIK_URL not in Data_to_Cache and Current_Step < int(
                                    Limit):
                                Current_Response = requests.get(
                                    Full_CIK_URL).text
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    str(Current_Response),
                                    ACN.replace(' ', '-'), The_File_Extension)

                                if Output_file:
                                    General.Connections(
                                        Output_file, Query, Plugin_Name,
                                        Full_CIK_URL, "sec.gov",
                                        "Data Leakage", Task_ID,
                                        General.Get_Title(Full_CIK_URL),
                                        Plugin_Name)
                                    Data_to_Cache.append(Full_CIK_URL)
                                    Current_Step += 1

            except:
                logging.info(
                    str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
                    + " Invalid query provided for ACN Search.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Ejemplo n.º 21
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

        else:
            Limit = 10

    else:
        Limit = 10

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        # Query can be Title or ISBN
        Main_URL = "http://gen.lib.rus.ec/search.php?req=" + Query + "&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def"
        Lib_Gen_Response = requests.get(Main_URL).text
        General.Main_File_Create(Directory, Plugin_Name, Lib_Gen_Response,
                                 Query, The_File_Extension)
        Lib_Gen_Regex = re.findall("book\/index\.php\?md5=[A-Fa-f0-9]{32}",
                                   Lib_Gen_Response)

        if Lib_Gen_Regex:
            Current_Step = 0

            for Regex in Lib_Gen_Regex:
                Item_URL = "http://gen.lib.rus.ec/" + Regex
                Lib_Item_Response = requests.get(Item_URL).text

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                        Limit):
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, Lib_Item_Response,
                        Regex, The_File_Extension)

                    if Output_file:
                        General.Connections(Output_file, Query, Plugin_Name,
                                            Item_URL, "gen.lib.rus.ec",
                                            "Data Leakage", Task_ID,
                                            General.Get_Title(Item_URL),
                                            Concat_Plugin_Name)

                    Data_to_Cache.append(Item_URL)
                    Current_Step += 1

        else:
            logging.warning(General.Date() + " - " +
                            __name__.strip('plugins.') +
                            " - Failed to match regular expression.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Ejemplo n.º 22
0
def Character_Switch(Query_List, Task_ID):
    Local_Plugin_Name = Plugin_Name + "-Character-Switch"
    Data_to_Cache = []
    Cached_Data = []
    Valid_Results = ["Domain,IP Address"]
    Valid_Hosts = []
    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Local_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    logging.info(
        str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) +
        " Character Switching Selected.")
    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        URL_Regex = re.search(
            r"(https?:\/\/(www\.)?)?([-a-zA-Z0-9@:%_\+~#=]{2,256})(\.[a-z]{2,3})(\.[a-z]{2,3})?(\.[a-z]{2,3})?",
            Query)

        if URL_Regex:
            URL_Prefix = URL_Regex.group(1)
            URL_Body = URL_Regex.group(3)

            if URL_Regex.group(5) and URL_Regex.group(6):
                URL_Extension = URL_Regex.group(4) + URL_Regex.group(
                    5) + URL_Regex.group(6)

            elif URL_Regex.group(5):
                URL_Extension = URL_Regex.group(4) + URL_Regex.group(5)

            else:
                URL_Extension = URL_Regex.group(4)

        else:
            logging.warning(
                str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) +
                " Please provide valid URLs.")

        logging.info(
            str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) +
            URL_Body)
        URL_List = list(URL_Body)
        Altered_URLs = Rotor.Search(URL_List, True, False, False, False, True,
                                    True, True)
        logging.info(
            str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) +
            ", ".join(Altered_URLs))

        for Altered_URL in Altered_URLs:

            if not Altered_URL == URL_Body:

                try:
                    Query = Altered_URL + URL_Extension
                    Web_Host = URL_Prefix.replace("s", "") + Query
                    Response = socket.gethostbyname(Query)

                    if Response:
                        Cache = Query + ":" + Response

                        if Cache not in Cached_Data and Cache not in Data_to_Cache:
                            Valid_Results.append(Query + "," + Response)
                            Data_to_Cache.append(Cache)
                            Valid_Hosts.append(Web_Host)

                except Exception as e:
                    logging.info(
                        str(datetime.datetime.now().strftime(
                            '%Y-%m-%d %H:%M:%S')) + str(e))

        logging.info(
            str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) +
            Directory)
        URL_Domain = URL_Body + URL_Extension
        Output_File = General.Main_File_Create(Directory, Local_Plugin_Name,
                                               "\n".join(Valid_Results),
                                               URL_Body, The_File_Extension)

        if Output_File:

            for Host in Valid_Hosts:
                General.Connections(Output_File, Query, Local_Plugin_Name,
                                    Host, URL_Domain, "Domain Spoof", Task_ID,
                                    General.Get_Title(Host),
                                    Local_Plugin_Name.lower())

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")
Ejemplo n.º 23
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        Tor_Pull_URL = Tor_General_URL + Query
        Tor_Scrape_URLs = General.Get_Latest_URLs(Tor_Pull_URL,
                                                  Tor_Scrape_Regex_URL)

        if Tor_Scrape_URLs:
            Output_file = General.Main_File_Create(Directory,
                                                   Tor_Plugin_Name.lower(),
                                                   "\n".join(Tor_Scrape_URLs),
                                                   Query, The_File_Extension)

            if Output_file:
                Current_Step = 0

                for URL in Tor_Scrape_URLs:

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        General.Connections(Output_file, Query,
                                            Tor_Plugin_Name, URL, "ahmia.fl",
                                            "Domain Spoof", Task_ID,
                                            General.Get_Title(URL),
                                            Plugin_Name.lower())
                        Data_to_Cache.append(URL)
                        Current_Step += 1

        else:
            logging.info(General.Date() + " No Tor links scraped.")

        I2P_Pull_URL = I2P_General_URL + Query
        I2P_Scrape_URLs = General.Get_Latest_URLs(I2P_Pull_URL,
                                                  I2P_Scrape_Regex_URL)

        if I2P_Scrape_URLs:
            Output_file = General.Main_File_Create(Directory,
                                                   I2P_Plugin_Name.lower(),
                                                   "\n".join(I2P_Scrape_URLs),
                                                   Query, The_File_Extension)

            if Output_file:
                Current_Step = 0

                for URL in I2P_Scrape_URLs:

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        General.Connections(Output_file, Query,
                                            I2P_Plugin_Name, URL, "ahmia.fl",
                                            "Domain Spoof", Task_ID,
                                            General.Get_Title(URL),
                                            Plugin_Name.lower())
                        Data_to_Cache.append(URL)
                        Current_Step += 1

        else:
            logging.info(General.Date() + " No I2P links scraped.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Ejemplo n.º 24
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Location = Connectors.Load_Location_Configuration()
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            try:
                Request_Query = urllib.parse.quote(Query)
                Main_URL = f"http://{Domain}/search?term={Request_Query}&country={Location}&entity=software&limit={str(Limit)}"
                Response = Common.Request_Handler(Main_URL)

            except:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request, are you connected to the internet?"
                )
                break

            JSON_Object = Common.JSON_Handler(Response)
            JSON_Response = JSON_Object.To_JSON_Loads()
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 JSON_Object.Dump_JSON(),
                                                 Query,
                                                 The_File_Extensions["Main"])

            if 'resultCount' in JSON_Response:

                if JSON_Response['resultCount'] > 0:
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain, "Application", Task_ID,
                        Concat_Plugin_Name)

                    for JSON_Object in JSON_Response['results']:
                        JSON_Object_Responses = Common.Request_Handler(
                            JSON_Object['artistViewUrl'],
                            Filter=True,
                            Host=f"https://{Domain}")
                        JSON_Object_Response = JSON_Object_Responses[
                            "Filtered"]

                        if JSON_Object[
                                'artistViewUrl'] not in Cached_Data and JSON_Object[
                                    'artistViewUrl'] not in Data_to_Cache:
                            Apple_Store_Regex = Common.Regex_Handler(
                                JSON_Object['artistViewUrl'],
                                Custom_Regex=r"https\:\/\/apps\.apple\.com\/" +
                                rf"{Location}" +
                                r"\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?.+")

                            if Apple_Store_Regex:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    JSON_Object_Response,
                                    Apple_Store_Regex.group(1),
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file],
                                        JSON_Object['artistViewUrl'],
                                        General.Get_Title(
                                            JSON_Object['artistViewUrl']),
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(
                                        JSON_Object['artistViewUrl'])

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                else:
                    logging.warning(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value provided, value not greater than 0."
                    )

            else:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 25
0
def Search(Query_List, Task_ID, Type, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            try:

                if Type == "CBN":
                    Main_API_URL = f'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B{Query}%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc'
                    Response = Common.Request_Handler(Main_API_URL)
                    JSON_Object = Common.JSON_Handler(Response)
                    JSON_Response = JSON_Object.To_JSON_Loads()
                    Indented_JSON_Response = JSON_Object.Dump_JSON()
                    Main_Output_File = General.Main_File_Create(
                        Directory, Plugin_Name, Indented_JSON_Response, Query,
                        The_File_Extensions["Main"])

                    try:

                        if JSON_Response['count'] != 0:
                            Query = str(int(Query))
                            Main_URL = f'https://{Domain}/search/results?search=%7B{Query}%7D&status=Active'
                            Responses = Common.Request_Handler(
                                Main_URL,
                                Filter=True,
                                Host=f"https://{Domain}")
                            Response = Responses["Filtered"]

                            if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name, Response,
                                    General.Get_Title(Main_URL),
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections = General.Connections(
                                        Query, Plugin_Name,
                                        Domain.strip("beta."),
                                        "Company Details", Task_ID,
                                        Plugin_Name)
                                    Output_Connections.Output(
                                        [Main_Output_File, Output_file],
                                        Main_URL,
                                        f"Canadian Business Number {Query}",
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(Main_URL)

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                    except:
                        logging.warning(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CBN Search."
                        )

                elif Type == "CCN":
                    Total_Results = 0
                    Iterator = "page=0"

                    while Limit > Total_Results and Iterator is not None:
                        Main_URL = 'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B' + urllib.parse.quote(
                            Query
                        ) + f'%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc&{Iterator}'
                        Response = Common.Request_Handler(Main_URL)
                        JSON_Object = Common.JSON_Handler(Response)
                        JSON_Response = JSON_Object.To_JSON_Loads()
                        Total_Results += len(JSON_Response["docs"])

                        if "paging" in JSON_Response and "next" in JSON_Response.get(
                                "paging"):
                            Iterator = JSON_Response["paging"]["next"]

                        else:
                            Iterator = None

                        Indented_JSON_Response = JSON_Object.Dump_JSON()
                        Limit = General.Get_Limit(Limit)

                        try:
                            Main_File = General.Main_File_Create(
                                Directory, Plugin_Name, Indented_JSON_Response,
                                Query, The_File_Extensions["Main"])
                            Current_Step = 0
                            Output_Connections = General.Connections(
                                Query, Plugin_Name, Domain.strip("beta."),
                                "Company Details", Task_ID, Plugin_Name)

                            for JSON_Item in JSON_Response['docs']:

                                if JSON_Item.get('BN'):
                                    CCN = JSON_Item['Company_Name']
                                    CBN = str(int(JSON_Item['BN']))

                                    Full_CCN_URL = f'https://{Domain}/search/results?search=%7B{CBN}%7D&status=Active'

                                    if Full_CCN_URL not in Cached_Data and Full_CCN_URL not in Data_to_Cache and Current_Step < int(
                                            Limit):
                                        Current_Responses = Common.Request_Handler(
                                            Full_CCN_URL,
                                            Filter=True,
                                            Host=f"https://{Domain}")
                                        Current_Response = Current_Responses[
                                            "Filtered"]
                                        Output_file = General.Create_Query_Results_Output_File(
                                            Directory, Query, Plugin_Name,
                                            str(Current_Response),
                                            CCN.replace(' ', '-'),
                                            The_File_Extensions["Query"])

                                        if Output_file:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                Full_CCN_URL,
                                                f"Canadian Business Number {CBN} for Query {Query}",
                                                Concat_Plugin_Name)
                                            Data_to_Cache.append(Full_CCN_URL)

                                        else:
                                            logging.warning(
                                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                            )

                                        Current_Step += 1

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {__name__.strip('plugins.')} - Unable to retrieve business numbers from the JSON response."
                                    )

                        except:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CCN Search."
                            )

                else:
                    logging.warning(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid request type."
                    )

            except:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 26
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)

            try:
                pyhibp.set_api_key(key=Load_Configuration())

            except:
                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to set API key, make sure it is set in the configuration file.")

            if self.Type == "email":
                Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name)
                Cached_Data = Cached_Data_Object.Get_Cache()

                for Query in self.Query_List:
                    Query_Response = pyhibp.get_pastes(email_address=Query)
                    logging.info(Query_Response)

                    if Query_Response:
                        Current_Domain = Query_Response[0]["Source"]
                        ID = Query_Response[0]["Id"]
                        Link = f"https://www.{Current_Domain}.com/{ID}"
                        JSON_Query_Response = Common.JSON_Handler(Query_Response).Dump_JSON()

                        if Link not in Cached_Data and Link not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, JSON_Query_Response, "email", self.The_File_Extension)

                            if Output_file:
                                Output_Connections = General.Connections(Query, Local_Plugin_Name, self.Domain, self.Result_Type_1, self.Task_ID, Local_Plugin_Name.lower())
                                Output_Connections.Output([Output_file], Link, General.Get_Title(Link), self.Concat_Plugin_Name)
                                Data_to_Cache.append(Link)

                            else:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                Cached_Data_Object.Write_Cache(Data_to_Cache)

            elif self.Type == "breach":
                Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name)
                Cached_Data = Cached_Data_Object.Get_Cache()

                for Query in self.Query_List:
                    Query_Response = pyhibp.get_single_breach(breach_name=Query)

                    if Query_Response:
                        Current_Domain = Query_Response["Domain"]
                        Link = f"https://www.{Current_Domain}.com/"
                        JSON_Query_Response = Common.JSON_Handler(Query_Response).Dump_JSON()

                        if Link not in Cached_Data and Link not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, JSON_Query_Response, "breach", self.The_File_Extension)

                            if Output_file:
                                Output_Connections = General.Connections(Query, Local_Plugin_Name, self.Domain, self.Result_Type_2, self.Task_ID, Local_Plugin_Name.lower())
                                Output_Connections.Output([Output_file], Link, General.Get_Title(Link), self.Concat_Plugin_Name)
                                Data_to_Cache.append(Link)

                            else:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                Cached_Data_Object.Write_Cache(Data_to_Cache)

            elif self.Type == "password":
                Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name)
                Cached_Data = Cached_Data_Object.Get_Cache()

                for Query in self.Query_List:
                    Query_Response = pw.is_password_breached(password=Query)
                    logging.info(Query_Response)

                    if Query_Response:
                        Link = f"https://{self.Domain}/Passwords?{Query}"

                        if Link not in Cached_Data and Link not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, str(Query_Response), "password", ".txt")

                            if Output_file:
                                Output_Connections = General.Connections(Query, Local_Plugin_Name, self.Domain, self.Result_Type_2, self.Task_ID, Local_Plugin_Name.lower())
                                Output_Connections.Output([Output_file], Link, General.Get_Title(Link), self.Concat_Plugin_Name)
                                Data_to_Cache.append(Link)

                            else:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                Cached_Data_Object.Write_Cache(Data_to_Cache)

            elif self.Type == "account":
                Local_Plugin_Name = self.Plugin_Name + " " + self.Type
                Cached_Data_Object = General.Cache(Directory, Local_Plugin_Name)
                Cached_Data = Cached_Data_Object.Get_Cache()

                for Query in self.Query_List:
                    Query_Response = pyhibp.get_account_breaches(account=Query, truncate_response=True)

                    if Query_Response:
                        Current_Step = 0

                        for Response in Query_Response:
                            Current_Response = pyhibp.get_single_breach(breach_name=Response['Name'])
                            JSON_Query_Response = Common.JSON_Handler(Query_Response).Dump_JSON()
                            Link = "https://" + Current_Response['self.Domain']

                            if Current_Response['self.Domain'] not in Cached_Data and Current_Response['self.Domain'] not in Data_to_Cache and Current_Step < int(self.Limit):
                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, JSON_Query_Response, "account", self.The_File_Extension)

                                if Output_file:
                                    Output_Connections = General.Connections(Query, Local_Plugin_Name, Current_Response['self.Domain'], self.Result_Type_1, self.Task_ID, Local_Plugin_Name.lower())
                                    Output_Connections.Output([Output_file], Link, General.Get_Title(Link), self.Concat_Plugin_Name)
                                    Data_to_Cache.append(Current_Response['self.Domain'])

                                else:
                                    logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                                Current_Step += 1

                Cached_Data_Object.Write_Cache(Data_to_Cache)

            else:
                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid Type provided.")

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Ejemplo n.º 27
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            URL_Query = urllib.parse.quote(Query)
            URL = f"https://api.duckduckgo.com/?q={URL_Query}&format=json"
            DDG_Response = requests.get(URL).text
            JSON_Response = json.loads(DDG_Response)
            JSON_Output_Response = json.dumps(JSON_Response,
                                              indent=4,
                                              sort_keys=True)
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 JSON_Output_Response, Query,
                                                 The_File_Extensions["Main"])
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     "duckduckgo.com",
                                                     "Search Result", Task_ID,
                                                     Plugin_Name.lower())

            if JSON_Response.get('RelatedTopics'):
                Current_Step = 0

                for DDG_Item_Link in JSON_Response['RelatedTopics']:

                    try:

                        if 'FirstURL' in DDG_Item_Link:
                            DDG_URL = DDG_Item_Link['FirstURL']
                            Title = General.Get_Title(DDG_URL)
                            Title = f"DuckDuckGo | {Title}"

                            if DDG_URL not in Cached_Data and DDG_URL not in Data_to_Cache and Current_Step < int(
                                    Limit):
                                headers = {
                                    'Content-Type': 'application/json',
                                    'User-Agent':
                                    'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0',
                                    'Accept':
                                    'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                                    'Accept-Language': 'en-US,en;q=0.5'
                                }
                                DDG_Item_Response = requests.get(
                                    DDG_URL, headers=headers).text
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    DDG_Item_Response, DDG_URL,
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file], DDG_URL,
                                        Title, Plugin_Name.lower())
                                    Data_to_Cache.append(DDG_URL)

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                                Current_Step += 1

                            else:
                                break

                        elif 'Topics' in DDG_Item_Link:

                            if type(DDG_Item_Link['Topics']) == list:
                                JSON_Response['RelatedTopics'].extend(
                                    DDG_Item_Link['Topics'])

                    except Exception as e:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}"
                        )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - No results found."
                )

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 28
0
def General_Pull(Handle, Limit, Directory, API, Task_ID):
    Data_to_Cache = []
    Cached_Data = []
    JSON_Response = []
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Latest_Tweets = API.user_timeline(screen_name=Handle, count=Limit)

    for Tweet in Latest_Tweets:
        Link = ""

        try:
            JSON_Response.append({
                'id': Tweet.id,
                'text': Tweet.text,
                'author_name': Tweet.user.screen_name,
                'url': Tweet.entities['urls'][0]["expanded_url"]
            })
            Link = Tweet.entities['urls'][0]["expanded_url"]

        except:
            JSON_Response.append({
                'id': Tweet.id,
                'text': Tweet.text,
                'author_name': Tweet.user.screen_name
            })

    JSON_Output = json.dumps(JSON_Response, indent=4, sort_keys=True)

    for JSON_Item in JSON_Response:

        if 'text' in JSON_Item and 'url' in JSON_Item:
            Link = JSON_Item['url']

            if Link not in Cached_Data and Link not in Data_to_Cache:
                logging.info(General.Date() + " " + Link)
                Item_Response = requests.get(Link).text
                Output_file = General.Create_Query_Results_Output_File(Directory, Handle, Plugin_Name, Item_Response, str(JSON_Item['id']), ".html")

                if Output_file:
                    General.Connections(Output_file, Handle, Plugin_Name, Link, "twitter.com", "Data Leakage", Task_ID, General.Get_Title(Link), Plugin_Name.lower())

                else:
                    logging.warning(General.Date() + " Output file not returned.")

        else:
            logging.warning(General.Date() + " Insufficient parameters provided.")

        Data_to_Cache.append(Link)

    General.Main_File_Create(Directory, Plugin_Name, JSON_Output, Handle, ".json")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Ejemplo n.º 29
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)

        try:
            File_Dir = os.path.dirname(os.path.realpath('__file__'))
            Configuration_File = os.path.join(
                File_Dir, 'plugins/common/config/RSS_Feeds.txt')
            Current_File = open(
                Configuration_File, "r"
            )  # Open the provided file and retrieve each client to test.
            URLs = Current_File.read().splitlines()
            Current_File.close()

        except:
            logging.warning(
                f"{General.Date()} - {__name__.strip('plugins.')} - Please provide a valid file, failed to open the file which contains the data to search for."
            )

        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            for URL in URLs:  # URLs to be controlled by the web app.
                RSS = feedparser.parse(URL)
                Current_Step = 0

                for Feed in RSS.entries:

                    if Query in Feed.description:
                        Dump_Types = General.Data_Type_Discovery(
                            Feed.description)
                        File_Link = Feed.link.replace("https://", "")
                        File_Link = File_Link.replace("http://", "")
                        File_Link = File_Link.replace("www.", "")
                        File_Link = File_Link.replace("/", "-")
                        Domain = URL.replace("https://", "")
                        Domain = Domain.replace("http://", "")
                        Domain = Domain.replace("www.", "")

                        if Feed.link not in Cached_Data and Feed.link not in Data_to_Cache and Current_Step < int(
                                Limit):
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name,
                                Feed.description, File_Link,
                                The_File_Extension)
                            Title = "RSS Feed | " + General.Get_Title(
                                Feed.link)

                            if Output_file:
                                Output_Connections = General.Connections(
                                    Query, Plugin_Name, Domain, "News Report",
                                    Task_ID, Plugin_Name.lower())
                                Output_Connections.Output(
                                    [Output_file],
                                    Feed.link,
                                    Title,
                                    Plugin_Name.lower(),
                                    Dump_Types=Dump_Types)
                                Data_to_Cache.append(Feed.link)

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                            Current_Step += 1

                    else:
                        logging.info(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Query not found."
                        )

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 30
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                URL_Body = f'https://{self.Domain}'
                Main_URL = URL_Body + '/' + Query.lower().replace(' ', '-')
                Responses = Common.Request_Handler(
                    Main_URL, Filter=True, Host=f"https://www.{self.Domain}")
                Response = Responses["Regular"]
                Filtered_Response = Responses["Filtered"]
                Main_File = General.Main_File_Create(Directory,
                                                     self.Plugin_Name,
                                                     Filtered_Response, Query,
                                                     self.The_File_Extension)
                Regex = Common.Regex_Handler(
                    Response,
                    Custom_Regex=
                    r"\<tr\>\s+\<td\sclass\=\"name\"\>\s+\<a\shref\=\"([\/\d\w\-\+\?\.]+)\"\>([\/\d\w\-\+\?\.\(\)\s\,\;\:\~\`\!\@\#\$\%\^\&\*\[\]\{\}]+)\<\/a\>\s+\<\/td\>",
                    Findall=True)

                if Regex:
                    Current_Step = 0
                    Output_Connections = General.Connections(
                        Query, self.Plugin_Name, self.Domain, self.Result_Type,
                        self.Task_ID, self.Concat_Plugin_Name)

                    for URL, Title in Regex:
                        Item_URL = URL_Body + URL
                        Current_Response = Common.Request_Handler(Item_URL)
                        Current_Item_Regex = Common.Regex_Handler(
                            Current_Response,
                            Custom_Regex=
                            r"\<button\sclass\=\"btn\sbtn\-primary\spassword\"\s+data\-data\=\"([\-\d\w\?\/]+)\"\s+data\-toggle\=\"modal\"\s+data\-target\=\"\#modal\"\s+\>show\sme\!\<\/button\>"
                        )

                        if Current_Item_Regex:

                            try:
                                Detailed_Item_URL = URL_Body + Current_Item_Regex.group(
                                    1)
                                Detailed_Responses = Common.Request_Handler(
                                    Item_URL,
                                    Filter=True,
                                    Host=f"https://www.{self.Domain}")
                                Detailed_Response = Detailed_Responses[
                                    "Regular"]
                                Output_Dict = Common.JSON_Handler(
                                    Detailed_Response).Is_JSON()

                                if JSON_Response:
                                    Output_Response = "<head><title>" + JSON_Response[
                                        "title"] + "</title></head>\n"
                                    Output_Response = Output_Response + JSON_Response[
                                        "data"]

                                else:
                                    Output_Response = Detailed_Responses[
                                        "Filtered"]

                                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                                        self.Limit):
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, self.Plugin_Name,
                                        Output_Response, Title,
                                        self.The_File_Extension)

                                    if Output_file:
                                        Output_Connections.Output(
                                            [Main_File, Output_file], Item_URL,
                                            General.Get_Title(Item_URL),
                                            self.Concat_Plugin_Name)
                                        Data_to_Cache.append(Item_URL)

                                    else:
                                        logging.warning(
                                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                        )

                                    Current_Step += 1

                            except:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to generate output, may have a blank detailed response."
                                )

                        else:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression for current result."
                            )

                else:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression for provided query."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")