Пример #1
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Twitter_Credentials = Load_Configuration()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            try:
                Authentication = tweepy.OAuthHandler(Twitter_Credentials[0],
                                                     Twitter_Credentials[1])
                Authentication.set_access_token(Twitter_Credentials[2],
                                                Twitter_Credentials[3])
                API = tweepy.API(Authentication)
                General_Pull(Query, Limit, Directory, API, Task_ID)

            except:
                logging.info(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to get results. Are you connected to the internet?"
                )

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID):
    Data_to_Cache = []
    Cached_Data = []
    Configuration_Details = Load_Configuration()
    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        if Configuration_Details[1].lower() == "true":
            Request = 'https://api.certspotter.com/v1/issuances?domain=' + Query + '&include_subdomains=true&expand=dns_names&expand=issuer&expand=cert'
            Response = requests.get(Request, auth=(Configuration_Details[0], '')).text

        else:
            Request = 'https://api.certspotter.com/v1/issuances?domain=' + Query + '&expand=dns_names&expand=issuer&expand=cert'
            Response = requests.get(Request, auth=(Configuration_Details[0], '')).text

        JSON_Response = json.loads(Response)

        if 'exists' not in JSON_Response:

            if JSON_Response:

                if Request not in Cached_Data and Request not in Data_to_Cache:

                    try:
                        SSLMate_Regex = re.search("([\w\d]+)\.[\w]{2,3}(\.[\w]{2,3})?(\.[\w]{2,3})?", Query)

                        if SSLMate_Regex:
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, json.dumps(JSON_Response, indent=4, sort_keys=True), SSLMate_Regex.group(1), The_File_Extension)

                            if Output_file:
                                General.Connections(Output_file, Query, Plugin_Name, Request, "sslmate.com", "Domain Spoof", Task_ID, General.Get_Title(Request), Plugin_Name.lower())

                    except:
                        logging.info(str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + "[-] Failed to create file.")

                    Data_to_Cache.append(Request)

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Пример #3
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    YouTube_Details = Load_Configuration()
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        YouTube_Handler = discovery.build(YouTube_Details[1], YouTube_Details[2], developerKey=YouTube_Details[0])
        Search_Response = YouTube_Handler.search().list(
        q=Query,
        type='video',
        location=YouTube_Details[3],
        locationRadius=YouTube_Details[4],
        part='id,snippet',
        maxResults=Limit,
        ).execute()
        General.Main_File_Create(Directory, Plugin_Name, json.dumps(Search_Response.get('items', []), indent=4, sort_keys=True), Query, ".json")

        for Search_Result in Search_Response.get('items', []):
            Full_Video_URL = "https://www.youtube.com/watch?v=" + Search_Result['id']['videoId']
            Search_Video_Response = requests.get(Full_Video_URL).text

            if Full_Video_URL not in Cached_Data and Full_Video_URL not in Data_to_Cache:
                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Video_Response, Search_Result['id']['videoId'], The_File_Extension)

                if Output_file:
                    General.Connections(Output_file, Query, Plugin_Name, Full_Video_URL, "youtube.com", "Data Leakage", Task_ID, General.Get_Title(Full_Video_URL), Plugin_Name.lower())

                Data_to_Cache.append(Full_Video_URL)

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Пример #4
0
def Search(Query_List, Task_ID, **kwargs):

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Twitter_Credentials = Load_Configuration()
    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:
            Authentication = tweepy.OAuthHandler(Twitter_Credentials[0], Twitter_Credentials[1])
            Authentication.set_access_token(Twitter_Credentials[2], Twitter_Credentials[3])
            API = tweepy.API(Authentication)
            General_Pull(Query, Limit, Directory, API, Task_ID)

        except:
            logging.info(General.Date() + " Failed to get results. Are you connected to the internet?")
Пример #5
0
    def Search(self):

        try:
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Twitter_Credentials = self.Load_Configuration()

            for Query in self.Query_List:

                try:
                    Authentication = tweepy.OAuthHandler(Twitter_Credentials[0], Twitter_Credentials[1])
                    Authentication.set_access_token(Twitter_Credentials[2], Twitter_Credentials[3])
                    API = tweepy.API(Authentication)
                    self.General_Pull(Query, Directory, API)

                except:
                    logging.info(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to get results. Are you connected to the internet?")

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #6
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if Common.Regex_Handler(Query, Type="IP"):
                API_Key = Load_Configuration()
                Search_Response = Common.Request_Handler(
                    f"http://api.{Domain}/{Query}?access_key={API_Key}")
                JSON_Object = Common.JSON_Handler(Search_Response)
                JSON_Response = JSON_Object.To_JSON_Loads()
                JSON_Output_Response = JSON_Object.Dump_JSON()
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "IP Address Information",
                    Task_ID, Plugin_Name.lower())

                if Query not in Cached_Data and Query not in Data_to_Cache:
                    Result_URL = f"https://{Domain}/?{Query}"
                    Title = f"IP Stack | {Query}"
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, JSON_Output_Response,
                        Title, The_File_Extensions["Main"])
                    HTML_Output_File_Data = General.JSONDict_to_HTML(
                        JSON_Response, JSON_Output_Response,
                        f"IPStack Query {Query}")
                    HTML_Output_File = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, HTML_Output_File_Data,
                        Title.replace(" ", "-"),
                        The_File_Extensions["Main_Converted"])

                    if Output_file:
                        Output_Connections.Output([Output_file], Result_URL,
                                                  Title, Plugin_Name.lower())
                        Data_to_Cache.append(Result_URL)

                    else:
                        logging.warning(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #7
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.0; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0'}
        Response = requests.get('https://tpbc.herokuapp.com/search/' + Query.replace(" ", "+") + '/?sort=seeds_desc', headers=headers).text
        Response = json.loads(Response)
        JSON_Response = json.dumps(Response, indent=4, sort_keys=True)
        Output_file = General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, ".json")

        if Output_file:
            Current_Step = 0

            for Search_Result in Response:
                Result_Title = Search_Result["title"]
                Result_URL = Search_Result["magnet"]

                if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(Limit):
                    # Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Result_Response, Result_Title, The_File_Extension)

                    if Output_file:
                        General.Connections(Output_file, Query, Plugin_Name, Result_URL, "thepiratebay.org", "Data Leakage", Task_ID, Result_Title, Plugin_Name.lower())

                    Data_to_Cache.append(Result_URL)
                    Current_Step += 1

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Пример #8
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        # Query can be Title or ISBN
        Main_URL = "http://gen.lib.rus.ec/search.php?req=" + Query + "&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def"
        Lib_Gen_Response = requests.get(Main_URL).text
        General.Main_File_Create(Directory, Plugin_Name, Lib_Gen_Response, Query, The_File_Extension)
        Lib_Gen_Regex = re.findall("book\/index\.php\?md5=[A-Fa-f0-9]{32}", Lib_Gen_Response)

        if Lib_Gen_Regex:
            Current_Step = 0

            for Regex in Lib_Gen_Regex:
                Item_URL = "http://gen.lib.rus.ec/" + Regex
                Lib_Item_Response = requests.get(Item_URL).text

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(Limit):
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Lib_Item_Response, Regex, The_File_Extension)

                    if Output_file:
                        General.Connections(Output_file, Query, Plugin_Name, Item_URL, "gen.lib.rus.ec", "Data Leakage", Task_ID, General.Get_Title(Item_URL), Concat_Plugin_Name)

                    Data_to_Cache.append(Item_URL)
                    Current_Step += 1

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Пример #9
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:
            BSB_Search_URL = f"https://www.bsbnumbers.com/{Query}.html"
            Response = requests.get(BSB_Search_URL).text
            Error_Regex = re.search(r"Correct\sthe\sfollowing\serrors",
                                    Response)
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     "bsbnumbers.com",
                                                     "BSB Details", Task_ID,
                                                     Plugin_Name.lower())

            if not Error_Regex:

                if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache:
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, Response, Query,
                        The_File_Extension)

                    if Output_file:
                        Output_Connections.Output(
                            [Output_file], BSB_Search_URL,
                            General.Get_Title(BSB_Search_URL),
                            Plugin_Name.lower())
                        Data_to_Cache.append(BSB_Search_URL)

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Query returned error, probably does not exist."
                )

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #10
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                BSB_Search_URL = f"https://www.{self.Domain}/{Query}.html"
                Responses = Common.Request_Handler(
                    BSB_Search_URL,
                    Filter=True,
                    Host=f"https://www.{self.Domain}")
                Response = Responses["Filtered"]
                Error_Regex = Common.Regex_Handler(
                    Response, Custom_Regex=r"Correct\sthe\sfollowing\serrors")
                Output_Connections = General.Connections(
                    Query, self.Plugin_Name, self.Domain, self.Result_Type,
                    self.Task_ID, self.Plugin_Name.lower())

                if not Error_Regex:

                    if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, self.Plugin_Name, Response,
                            Query, self.The_File_Extension)

                        if Output_file:
                            Output_Connections.Output(
                                [Output_file], BSB_Search_URL,
                                General.Get_Title(BSB_Search_URL),
                                self.Plugin_Name.lower())
                            Data_to_Cache.append(BSB_Search_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                            )

                else:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Query returned error, probably does not exist."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #11
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:

                if Common.Regex_Handler(Query, Type="Email"):
                    Link = f"https://{self.Domain}/home/verify-as-guest/{Query}"
                    JSON_Response = Common.Request_Handler(Link)
                    JSON_Object = Common.JSON_Handler(JSON_Response)

                    if JSON_Object.Is_JSON():
                        JSON_Response = JSON_Object.To_JSON_Loads()
                        JSON_Output_Response = JSON_Object.Dump_JSON()
                        Table_JSON = {}

                        for Key, Value in JSON_Response.items():

                            if Key != "response":
                                Table_JSON[Key] = Value

                            else:

                                for Det_Key, Det_Val in JSON_Response["response"].items():
                                    Table_JSON[Det_Key] = Det_Val

                        Filter_JSON = [Table_JSON]
                        Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name)

                        if Query not in Cached_Data and Query not in Data_to_Cache:
                            Title = f"Email Verification | {Query}"
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Concat_Plugin_Name, JSON_Output_Response, Title, self.The_File_Extensions["Main"])
                            HTML_Output_File_Data = General.JSONDict_to_HTML(Filter_JSON, JSON_Output_Response, f"Email Verification Query {Query}")
                            HTML_Output_File = General.Create_Query_Results_Output_File(Directory, Query, self.Concat_Plugin_Name, HTML_Output_File_Data, Title, self.The_File_Extensions["Main_Converted"])

                            if Output_file and HTML_Output_File:
                                Output_Connections.Output([Output_file, HTML_Output_File], Link, Title, self.Concat_Plugin_Name)
                                Data_to_Cache.append(Link)

                            else:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #12
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        vulners_api = vulners.Vulners(api_key=Load_Configuration())
        Search_Response = vulners_api.search(Query, limit=int(Limit))
        JSON_Response = json.dumps(Search_Response, indent=4, sort_keys=True)
        General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, ".json")

        for Search_Result in Search_Response:

            if Search_Result["bulletinFamily"] not in Unacceptable_Bulletins:
                Result_Title = Search_Result["title"]
                Result_URL = Search_Result["vhref"]
                Search_Result_Response = requests.get(Result_URL).text

                if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache:
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Result_Response, Result_Title, The_File_Extension)

                    if Output_file:
                        General.Connections(Output_file, Query, Plugin_Name, Result_URL, "vulners.com", "Exploit", Task_ID, Result_Title, Plugin_Name.lower())

                    Data_to_Cache.append(Result_URL)

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:
            Output_Connections = General.Connections(Query, Plugin_Name, "general-insurance.coles.com.au", "Vehicle Details", Task_ID, Concat_Plugin_Name)

            for State in States:
                Post_URL = 'https://general-insurance.coles.com.au/bin/wesfarmers/search/vehiclerego'
                data = '''{"isRegoSearch":"YES","regoSearchCount":2,"regoMatchCount":1,"regoSearchFailureCount":0,"failPaymentAttempts":0,"pauseStep":"false","campaignBaseURL":"https://secure.colesinsurance.com.au/campaignimages/","sessionState":"OPEN","sessionStep":"0","policyHolders":[],"updateSessionURL":"http://dev.gtw.gp-mdl.auiag.corp:9000/sys/colessessionservice/motor/v1/update-session","insuranceType":"COMP","startDate":"03/07/2019","drivers":[{"driverRef":"MainDriver","yearsLicenced":{"vehRef":"veh1"}}],"priceBeatAttemptsRemaining":"2","currentInsurerOptions":[{"id":"AAMI","value":"AAMI","text":"AAMI"},{"id":"Allianz","value":"Allianz","text":"Allianz"},{"id":"Apia","value":"Apia","text":"Apia"},{"id":"Bingle","value":"Bingle","text":"Bingle"},{"id":"Broker","value":"Broker","text":"Broker"},{"id":"BudgDirect","value":"BudgDirect","text":"Budget Direct"},{"id":"Buzz","value":"Buzz","text":"Buzz"},{"id":"CGU","value":"CGU","text":"CGU"},{"id":"Coles","value":"Coles","text":"Coles"},{"id":"CommInsure","value":"CommInsure","text":"CommInsure"},{"id":"GIO","value":"GIO","text":"GIO"},{"id":"HBF","value":"HBF","text":"HBF"},{"id":"JustCar","value":"JustCar","text":"Just Car"},{"id":"NRMA","value":"NRMA","text":"NRMA"},{"id":"Progress","value":"Progress","text":"Progressive"},{"id":"QBE","value":"QBE","text":"QBE"},{"id":"RAA","value":"RAA","text":"RAA"},{"id":"RAC","value":"RAC","text":"RAC"},{"id":"RACQ","value":"RACQ","text":"RACQ"},{"id":"RACT","value":"RACT","text":"RACT"},{"id":"RACV","value":"RACV","text":"RACV"},{"id":"Real","value":"Real","text":"Real"},{"id":"SGIC","value":"SGIC","text":"SGIC"},{"id":"SGIO","value":"SGIO","text":"SGIO"},{"id":"Shannons","value":"Shannons","text":"Shannons"},{"id":"Suncorp","value":"Suncorp","text":"Suncorp"},{"id":"Youi","value":"Youi","text":"Youi"},{"id":"None","value":"None","text":"Car is not currently insured"},{"id":"Dontknow","value":"Dontknow","text":"Don't Know"},{"id":"Other","value":"Other","text":"Other"}],"coverLevelOptions":[{"id":"Gold","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"NRMA","code":"Gold","order":"1"},{"id":"Gold1","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"BudgDirect","code":"Gold","order":"1"},{"id":"Standard2","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIC","code":"Standard","order":"2"},{"id":"Gold6","value":"Comprehensive Advantages Car Insurance","text":"Comprehensive Advantages Car Insurance","flagname":"Suncorp","code":"Gold","order":"1"},{"id":"Standard","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"GIO","code":"Standard","order":"2"},{"id":"Standard0","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"NRMA","code":"Standard","order":"2"},{"id":"Gold4","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIC","code":"Gold","order":"1"},{"id":"Standard5","value":"Full Comprehensive Car Insurance","text":"Full Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Standard","order":"2"},{"id":"Gold5","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIO","code":"Gold","order":"1"},{"id":"Gold2","value":"Platinum Car Insurance","text":"Platinum Car Insurance","flagname":"GIO","code":"Gold","order":"1"},{"id":"Standard3","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIO","code":"Standard","order":"2"},{"id":"Gold3","value":"Complete Care Motor Insurance","text":"Complete Care Motor Insurance","flagname":"RACV","code":"Gold","order":"1"},{"id":"Standard4","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"Suncorp","code":"Standard","order":"2"},{"id":"Gold0","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Gold","order":"1"},{"id":"Standard1","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"RACV","code":"Standard","order":"2"}],"riskAddress":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"postcode":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"carRegistration":"''' + Query + '''","chooseValue":"","whatValueInsure":"Marketvalue","whatValueInsure_value":{"key":"Marketvalue","value":"Market Value"}}'''
                headers = {'Content-Type': 'ext/plain;charset=UTF-8',
                           'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.90 Safari/537.36',
                           'Accept': '*/*', 'Accept-Encoding': 'gzip, deflate, br',
                           'Referer': 'https://general-insurance.coles.com.au/motor/get-quote',
                           'Origin': 'https://general-insurance.coles.com.au', 'Host': 'general-insurance.coles.com.au'}
                Registration_Response = requests.post(Post_URL, data=data, headers=headers).text
                Registration_Response = json.loads(Registration_Response)

                try:
                    Title = "Vehicle Registration | " + Registration_Response['vehicles'][0]['make'] + " " + Registration_Response['vehicles'][0]['model']
                    Item_URL = Post_URL + "?" + Query

                    if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, json.dumps(Registration_Response, indent=4, sort_keys=True), Title.replace(" ", "-"), The_File_Extension)

                        if Output_file:
                            Output_Connections.Output([Output_file], Item_URL, Title, Concat_Plugin_Name)
                            Data_to_Cache.append(Item_URL)

                        else:
                            logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                except:
                    logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - No result found for given query {Query} for state {State}.")

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #14
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Craigslist_Location = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            Main_URL = f"https://{Craigslist_Location.lower()}.craigslist.org/search/sss?format=rss&query={Query}"
            Craigslist_Response = feedparser.parse(Main_URL)
            Craigslist_Items = Craigslist_Response["items"]
            Current_Step = 0

            for Item in Craigslist_Items:
                Item_URL = Item["link"]

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(Limit):
                    Craigslist_Response = requests.get(Item_URL).text
                    Local_URL = f"https://{Craigslist_Location.lower()}.craigslist.org/"
                    Local_Domain = f"{Craigslist_Location.lower()}.craigslist.org/"
                    Filename = Item_URL.replace(Local_URL, "")
                    Filename = Filename.replace(".html/", "")
                    Filename = Filename.replace(".html", "")
                    Filename = Filename.replace("/", "-")
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Craigslist_Response, Filename, The_File_Extension)

                    if Output_file:
                        Output_Connections = General.Connections(Query, Plugin_Name, Local_Domain, "Search Result", Task_ID, Plugin_Name.lower())
                        Output_Connections.Output([Output_file], Item_URL, General.Get_Title(Item_URL), Plugin_Name.lower())
                        Data_to_Cache.append(Item_URL)

                    else:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                    Current_Step += 1

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #15
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        YouTube_Details = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            YouTube_Handler = discovery.build(YouTube_Details[1], YouTube_Details[2], developerKey=YouTube_Details[0], cache_discovery=False)

            if YouTube_Details[3] and YouTube_Details[4]:
                Search_Response = YouTube_Handler.search().list(q=Query, type='video', location=YouTube_Details[3], locationRadius=YouTube_Details[4], part='id,snippet', maxResults=Limit,).execute()

            else:
                Search_Response = YouTube_Handler.search().list(q=Query, type='video', part='id,snippet', maxResults=Limit,).execute()
            
            Main_File = General.Main_File_Create(Directory, Plugin_Name, json.dumps(Search_Response.get('items', []), indent=4, sort_keys=True), Query, The_File_Extensions["Main"])
            Output_Connections = General.Connections(Query, Plugin_Name, "youtube.com", "Social Media - Media", Task_ID, Plugin_Name.lower())

            for Search_Result in Search_Response.get('items', []):
                Full_Video_URL = "https://www.youtube.com/watch?v=" + Search_Result['id']['videoId']
                Search_Video_Response = requests.get(Full_Video_URL).text
                Title = "YouTube | " + Search_Result['snippet']['title']

                if Full_Video_URL not in Cached_Data and Full_Video_URL not in Data_to_Cache:
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Search_Video_Response, Search_Result['id']['videoId'], The_File_Extensions["Query"])

                    if Output_file:
                        Output_Connections.Output([Main_File, Output_file], Full_Video_URL, Title, Plugin_Name.lower())
                        Data_to_Cache.append(Full_Video_URL)

                    else:
                        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #16
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if General.Regex_Checker(Query, "IP"):
                API_Key = Load_Configuration()
                Search_Response = General.Request_Handler(
                    f"http://api.{Domain}/{Query}?access_key={API_Key}")
                JSON_Response = json.loads(Search_Response)
                JSON_Output_Response = json.dumps(JSON_Response,
                                                  indent=4,
                                                  sort_keys=True)
                Output_Connections = General.Connections(
                    Query, Plugin_Name, Domain, "IP Address Information",
                    Task_ID, Plugin_Name.lower())

                if Query not in Cached_Data and Query not in Data_to_Cache:
                    Result_URL = f"https://{Domain}/?{Query}"
                    Title = f"IP Stack | {Query}"
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, JSON_Output_Response,
                        Title, The_File_Extension)

                    if Output_file:
                        Output_Connections.Output([Output_file], Result_URL,
                                                  Title, Plugin_Name.lower())
                        Data_to_Cache.append(Result_URL)

                    else:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #17
0
def Search(Query_List, Task_ID):
    Data_to_Cache = []
    Cached_Data = []
    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        BSB_Search_URL = "https://www.bsbnumbers.com/" + Query + ".html"
        Response = requests.get(BSB_Search_URL).text
        Error_Regex = re.search(r"Correct\sthe\sfollowing\serrors", Response)

        if not Error_Regex:

            if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache:
                Output_file = General.Create_Query_Results_Output_File(
                    Directory, Query, Plugin_Name, Response, Query,
                    The_File_Extension)

                if Output_file:
                    General.Connections(Output_file, Query, Plugin_Name,
                                        BSB_Search_URL, "bsbnumbers.com",
                                        "Data Leakage", Task_ID,
                                        General.Get_Title(BSB_Search_URL),
                                        Plugin_Name.lower())

                Data_to_Cache.append(BSB_Search_URL)

        else:
            logging.warning(General.Date() +
                            " Query returned error, probably does not exist.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Пример #18
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Location = self.Load_Configuration()
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                Main_URL = f"https://www.{self.Domain}/en-{Location}/search?q={Query}"
                Win_Store_Response = Common.Request_Handler(Main_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True)
                Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Win_Store_Response, Query, self.The_File_Extension)
                Win_Store_Regex = Common.Regex_Handler(Win_Store_Response, Custom_Regex=r"\/en\-au\/p\/([\w\-]+)\/([\w\d]+)", Findall=True)
                Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name)

                if Win_Store_Regex:
                    Current_Step = 0

                    for Regex_Group_1, Regex_Group_2 in Win_Store_Regex:
                        Item_URL = f"https://www.microsoft.com/en-au/p/{Regex_Group_1}/{Regex_Group_2}"
                        Win_Store_Responses = Common.Request_Handler(Item_URL, Application_JSON_CT=True, Accept_XML=True, Accept_Language_EN_US=True, Filter=True, Host=f"https://www.{self.Domain}")
                        Win_Store_Response = Win_Store_Responses["Filtered"]
                        Title = "Windows Store | " + General.Get_Title(Item_URL)

                        if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(self.Limit):
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Win_Store_Response, Regex_Group_1, self.The_File_Extension)

                            if Output_file:
                                Output_Connections.Output([Main_File, Output_file], Item_URL, Title, self.Concat_Plugin_Name)
                                Data_to_Cache.append(Item_URL)

                            else:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                            Current_Step += 1

                else:
                    logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.")

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #19
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                # Query can be Title or ISBN
                Main_URL = f"http://{self.Domain}/search.php?req={Query}&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def"
                Lib_Gen_Response = Common.Request_Handler(Main_URL)
                Main_File = General.Main_File_Create(Directory, self.Plugin_Name, Lib_Gen_Response, Query, self.The_File_Extension)
                Lib_Gen_Regex = Common.Regex_Handler(Lib_Gen_Response, Custom_Regex=r"book\/index\.php\?md5=[A-Fa-f0-9]{32}", Findall=True)

                if Lib_Gen_Regex:
                    Current_Step = 0

                    for Regex in Lib_Gen_Regex:
                        Item_URL = f"http://{self.Domain}/{Regex}"
                        Title = General.Get_Title(Item_URL).replace("Genesis:", "Genesis |")
                        Lib_Item_Responses = Common.Request_Handler(Item_URL, Filter=True, Host=f"http://{self.Domain}")
                        Lib_Item_Response = Lib_Item_Responses["Filtered"]

                        if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(self.Limit):
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, Lib_Item_Response, Regex, self.The_File_Extension)

                            if Output_file:
                                Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Concat_Plugin_Name)
                                Output_Connections.Output([Main_File, Output_file], Item_URL, Title, self.Concat_Plugin_Name)
                                Data_to_Cache.append(Item_URL)

                            else:
                                logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                            Current_Step += 1

                else:
                    logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to match regular expression.")

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #20
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:
            Headers_Custom = {"Referer": f"https://www.doingbusiness.org/en/data/exploreeconomies/{Query}"}
            Main_URL = f"https://wbgindicatorsqa.azure-api.net/DoingBusiness/api/GetEconomyByURL/{Query}"
            Doing_Business_Response = Common.Request_Handler(Main_URL, Optional_Headers=Headers_Custom)
            JSON_Object = Common.JSON_Handler(Doing_Business_Response)
            JSON_Response = JSON_Object.To_JSON_Loads()
            JSON_Output_Response = JSON_Object.Dump_JSON()

            if 'message' not in JSON_Response:
                Main_File = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query, The_File_Extensions["Main"])
                Item_URL = f"https://www.{Domain}/en/data/exploreeconomies/{Query}"
                Title = f"Doing Business | {Query}"
                Current_Doing_Business_Responses = Common.Request_Handler(Item_URL, Filter=True, Host=f"https://www.{Domain}")
                Current_Doing_Business_Response = Current_Doing_Business_Responses["Filtered"]

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache:
                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Current_Doing_Business_Response, Query, The_File_Extensions["Query"])

                    if Output_file:
                        Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Economic Details", Task_ID, Concat_Plugin_Name)
                        Output_Connections.Output([Main_File, Output_file], Item_URL, Title, Concat_Plugin_Name)
                        Data_to_Cache.append(Item_URL)

                    else:
                        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

            else:
                logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression.")

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #21
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:
            Response = Common.Request_Handler('https://tpbc.herokuapp.com/search/' + Query.replace(" ", "+") + '/?sort=seeds_desc')
            JSON_Object = Common.JSON_Handler(Response)
            Response = JSON_Object.To_JSON_Loads()
            JSON_Response = JSON_Object.Dump_JSON()
            Output_file = General.Main_File_Create(Directory, Plugin_Name, JSON_Response, Query, The_File_Extension)

            if Output_file:
                Current_Step = 0
                Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Torrent", Task_ID, Plugin_Name.lower())

                for Search_Result in Response:
                    Result_Title = Search_Result["title"]
                    Result_URL = Search_Result["magnet"]

                    if Result_URL not in Cached_Data and Result_URL not in Data_to_Cache and Current_Step < int(Limit):
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, JSON_Response, Result_Title, The_File_Extension)

                        if Output_file:
                            Output_Connections.Output([Output_file], Result_URL, General.Get_Title(Result_URL), Plugin_Name.lower())
                            Data_to_Cache.append(Result_URL)

                        else:
                            logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                        Current_Step += 1

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #22
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)

        try:
            File_Dir = os.path.dirname(os.path.realpath('__file__'))
            Configuration_File = os.path.join(
                File_Dir, 'plugins/common/config/RSS_Feeds.txt')
            Current_File = open(
                Configuration_File, "r"
            )  # Open the provided file and retrieve each client to test.
            URLs = Current_File.read().splitlines()
            Current_File.close()

        except:
            logging.warning(
                f"{General.Date()} - {__name__.strip('plugins.')} - Please provide a valid file, failed to open the file which contains the data to search for."
            )

        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            for URL in URLs:  # URLs to be controlled by the web app.
                RSS = feedparser.parse(URL)
                Current_Step = 0

                for Feed in RSS.entries:

                    if Query in Feed.description:
                        Dump_Types = General.Data_Type_Discovery(
                            Feed.description)
                        File_Link = Feed.link.replace("https://", "")
                        File_Link = File_Link.replace("http://", "")
                        File_Link = File_Link.replace("www.", "")
                        File_Link = File_Link.replace("/", "-")
                        Domain = URL.replace("https://", "")
                        Domain = Domain.replace("http://", "")
                        Domain = Domain.replace("www.", "")

                        if Feed.link not in Cached_Data and Feed.link not in Data_to_Cache and Current_Step < int(
                                Limit):
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name,
                                Feed.description, File_Link,
                                The_File_Extension)
                            Title = "RSS Feed | " + General.Get_Title(
                                Feed.link)

                            if Output_file:
                                Output_Connections = General.Connections(
                                    Query, Plugin_Name, Domain, "News Report",
                                    Task_ID, Plugin_Name.lower())
                                Output_Connections.Output(
                                    [Output_file],
                                    Feed.link,
                                    Title,
                                    Plugin_Name.lower(),
                                    Dump_Types=Dump_Types)
                                Data_to_Cache.append(Feed.link)

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                            Current_Step += 1

                    else:
                        logging.info(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Query not found."
                        )

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #23
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Location = Connectors.Load_Location_Configuration()
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            try:
                Request_Query = urllib.parse.quote(Query)
                Main_URL = f"http://{Domain}/search?term={Request_Query}&country={Location}&entity=software&limit={str(Limit)}"
                Response = Common.Request_Handler(Main_URL)

            except:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request, are you connected to the internet?"
                )
                break

            JSON_Object = Common.JSON_Handler(Response)
            JSON_Response = JSON_Object.To_JSON_Loads()
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 JSON_Object.Dump_JSON(),
                                                 Query,
                                                 The_File_Extensions["Main"])

            if 'resultCount' in JSON_Response:

                if JSON_Response['resultCount'] > 0:
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain, "Application", Task_ID,
                        Concat_Plugin_Name)

                    for JSON_Object in JSON_Response['results']:
                        JSON_Object_Responses = Common.Request_Handler(
                            JSON_Object['artistViewUrl'],
                            Filter=True,
                            Host=f"https://{Domain}")
                        JSON_Object_Response = JSON_Object_Responses[
                            "Filtered"]

                        if JSON_Object[
                                'artistViewUrl'] not in Cached_Data and JSON_Object[
                                    'artistViewUrl'] not in Data_to_Cache:
                            Apple_Store_Regex = Common.Regex_Handler(
                                JSON_Object['artistViewUrl'],
                                Custom_Regex=r"https\:\/\/apps\.apple\.com\/" +
                                rf"{Location}" +
                                r"\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?.+")

                            if Apple_Store_Regex:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    JSON_Object_Response,
                                    Apple_Store_Regex.group(1),
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file],
                                        JSON_Object['artistViewUrl'],
                                        General.Get_Title(
                                            JSON_Object['artistViewUrl']),
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(
                                        JSON_Object['artistViewUrl'])

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                else:
                    logging.warning(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value provided, value not greater than 0."
                    )

            else:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #24
0
def Character_Switch(Query_List, Task_ID):
    Local_Plugin_Name = Plugin_Name + "-Character-Switch"
    Data_to_Cache = []
    Cached_Data = []
    Valid_Results = ["Domain,IP Address"]
    Valid_Hosts = []
    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Local_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    logging.info(
        str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) +
        " Character Switching Selected.")
    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        URL_Regex = re.search(
            r"(https?:\/\/(www\.)?)?([-a-zA-Z0-9@:%_\+~#=]{2,256})(\.[a-z]{2,3})(\.[a-z]{2,3})?(\.[a-z]{2,3})?",
            Query)

        if URL_Regex:
            URL_Prefix = URL_Regex.group(1)
            URL_Body = URL_Regex.group(3)

            if URL_Regex.group(5) and URL_Regex.group(6):
                URL_Extension = URL_Regex.group(4) + URL_Regex.group(
                    5) + URL_Regex.group(6)

            elif URL_Regex.group(5):
                URL_Extension = URL_Regex.group(4) + URL_Regex.group(5)

            else:
                URL_Extension = URL_Regex.group(4)

        else:
            logging.warning(
                str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) +
                " Please provide valid URLs.")

        logging.info(
            str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) +
            URL_Body)
        URL_List = list(URL_Body)
        Altered_URLs = Rotor.Search(URL_List, True, False, False, False, True,
                                    True, True)
        logging.info(
            str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) +
            ", ".join(Altered_URLs))

        for Altered_URL in Altered_URLs:

            if not Altered_URL == URL_Body:

                try:
                    Query = Altered_URL + URL_Extension
                    Web_Host = URL_Prefix.replace("s", "") + Query
                    Response = socket.gethostbyname(Query)

                    if Response:
                        Cache = Query + ":" + Response

                        if Cache not in Cached_Data and Cache not in Data_to_Cache:
                            Valid_Results.append(Query + "," + Response)
                            Data_to_Cache.append(Cache)
                            Valid_Hosts.append(Web_Host)

                except Exception as e:
                    logging.info(
                        str(datetime.datetime.now().strftime(
                            '%Y-%m-%d %H:%M:%S')) + str(e))

        logging.info(
            str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) +
            Directory)
        URL_Domain = URL_Body + URL_Extension
        Output_File = General.Main_File_Create(Directory, Local_Plugin_Name,
                                               "\n".join(Valid_Results),
                                               URL_Body, The_File_Extension)

        if Output_File:

            for Host in Valid_Hosts:
                General.Connections(Output_File, Query, Local_Plugin_Name,
                                    Host, URL_Domain, "Domain Spoof", Task_ID,
                                    General.Get_Title(Host),
                                    Local_Plugin_Name.lower())

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")
Пример #25
0
def Search(Query_List, Task_ID, Type, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            try:

                if Type == "CBN":
                    Main_API_URL = f'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B{Query}%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc'
                    Response = Common.Request_Handler(Main_API_URL)
                    JSON_Object = Common.JSON_Handler(Response)
                    JSON_Response = JSON_Object.To_JSON_Loads()
                    Indented_JSON_Response = JSON_Object.Dump_JSON()
                    Main_Output_File = General.Main_File_Create(
                        Directory, Plugin_Name, Indented_JSON_Response, Query,
                        The_File_Extensions["Main"])

                    try:

                        if JSON_Response['count'] != 0:
                            Query = str(int(Query))
                            Main_URL = f'https://{Domain}/search/results?search=%7B{Query}%7D&status=Active'
                            Responses = Common.Request_Handler(
                                Main_URL,
                                Filter=True,
                                Host=f"https://{Domain}")
                            Response = Responses["Filtered"]

                            if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name, Response,
                                    General.Get_Title(Main_URL),
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections = General.Connections(
                                        Query, Plugin_Name,
                                        Domain.strip("beta."),
                                        "Company Details", Task_ID,
                                        Plugin_Name)
                                    Output_Connections.Output(
                                        [Main_Output_File, Output_file],
                                        Main_URL,
                                        f"Canadian Business Number {Query}",
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(Main_URL)

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                    except:
                        logging.warning(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CBN Search."
                        )

                elif Type == "CCN":
                    Total_Results = 0
                    Iterator = "page=0"

                    while Limit > Total_Results and Iterator is not None:
                        Main_URL = 'https://searchapi.mrasservice.ca/Search/api/v1/search?fq=keyword:%7B' + urllib.parse.quote(
                            Query
                        ) + f'%7D+Status_State:Active&lang=en&queryaction=fieldquery&sortfield=Company_Name&sortorder=asc&{Iterator}'
                        Response = Common.Request_Handler(Main_URL)
                        JSON_Object = Common.JSON_Handler(Response)
                        JSON_Response = JSON_Object.To_JSON_Loads()
                        Total_Results += len(JSON_Response["docs"])

                        if "paging" in JSON_Response and "next" in JSON_Response.get(
                                "paging"):
                            Iterator = JSON_Response["paging"]["next"]

                        else:
                            Iterator = None

                        Indented_JSON_Response = JSON_Object.Dump_JSON()
                        Limit = General.Get_Limit(Limit)

                        try:
                            Main_File = General.Main_File_Create(
                                Directory, Plugin_Name, Indented_JSON_Response,
                                Query, The_File_Extensions["Main"])
                            Current_Step = 0
                            Output_Connections = General.Connections(
                                Query, Plugin_Name, Domain.strip("beta."),
                                "Company Details", Task_ID, Plugin_Name)

                            for JSON_Item in JSON_Response['docs']:

                                if JSON_Item.get('BN'):
                                    CCN = JSON_Item['Company_Name']
                                    CBN = str(int(JSON_Item['BN']))

                                    Full_CCN_URL = f'https://{Domain}/search/results?search=%7B{CBN}%7D&status=Active'

                                    if Full_CCN_URL not in Cached_Data and Full_CCN_URL not in Data_to_Cache and Current_Step < int(
                                            Limit):
                                        Current_Responses = Common.Request_Handler(
                                            Full_CCN_URL,
                                            Filter=True,
                                            Host=f"https://{Domain}")
                                        Current_Response = Current_Responses[
                                            "Filtered"]
                                        Output_file = General.Create_Query_Results_Output_File(
                                            Directory, Query, Plugin_Name,
                                            str(Current_Response),
                                            CCN.replace(' ', '-'),
                                            The_File_Extensions["Query"])

                                        if Output_file:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                Full_CCN_URL,
                                                f"Canadian Business Number {CBN} for Query {Query}",
                                                Concat_Plugin_Name)
                                            Data_to_Cache.append(Full_CCN_URL)

                                        else:
                                            logging.warning(
                                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                            )

                                        Current_Step += 1

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {__name__.strip('plugins.')} - Unable to retrieve business numbers from the JSON response."
                                    )

                        except:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for CCN Search."
                            )

                else:
                    logging.warning(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid request type."
                    )

            except:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #26
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        IX_Access_Token = Load_Configuration()
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:
            Data = {"term": Query, "buckets": [], "lookuplevel": 0, "maxresults": Limit, "timeout": 0, "datefrom": "", "dateto": "", "sort": 2, "media": 0, "terminate": []}
            IX_Response = Common.Request_Handler(f"https://2.{Domain}/intelligent/search?k={IX_Access_Token}", Method="POST", JSON_Data=Data)
            JSON_Object = Common.JSON_Handler(IX_Response)
            JSON_Response = JSON_Object.To_JSON_Loads()
            JSON_Output_Response = JSON_Object.Dump_JSON()
            Main_File_1 = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query + "-Request-1", The_File_Extensions["Main"])
            

            if "id" in JSON_Response:
                Search_ID = JSON_Response["id"]
                IX_Response = Common.Request_Handler(f"https://2.{Domain}/intelligent/search/result?k={IX_Access_Token}&id={Search_ID}")
                JSON_Object = Common.JSON_Handler(IX_Response)
                JSON_Response = JSON_Object.To_JSON_Loads()
                JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File_2 = General.Main_File_Create(Directory, Plugin_Name, JSON_Output_Response, Query + "-Request-2", The_File_Extensions["Main"])
                Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Data Leakage", Task_ID, Plugin_Name.lower())

                if "records" in JSON_Response:

                    for IX_Item in JSON_Response["records"]:

                        if "systemid" in IX_Item and "name" in IX_Item:
                            IX_URL = f"https://{Domain}/?did=" + IX_Item['systemid']

                            if IX_Item["name"] != "":
                                Title = f"IntelligenceX Data Leak | " + IX_Item["name"]

                            else:
                                TItle = "IntelligenceX Data Leak | Untitled Document"

                            if IX_URL not in Cached_Data and IX_URL not in Data_to_Cache:
                                IX_Item_Responses = Common.Request_Handler(IX_URL, Filter=True, Host=f"https://{Domain}")
                                IX_Item_Response = IX_Item_Responses["Filtered"]
                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, IX_Item_Response, IX_URL, The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output([Main_File_1, Main_File_2, Output_file], IX_URL, Title, Plugin_Name.lower())
                                    Data_to_Cache.append(IX_URL)

                                else:
                                    logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

            else:
                logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - No results found.")

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #27
0
def Search(Query_List, Task_ID, Type, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            if Type == "User":
                from instagramy import InstagramUser
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = InstagramUser(Query)
                JSON_Object = Common.JSON_Handler(vars(CSE_Response))
                CSE_JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])

                if not CSE_Response.is_private:
                    Posts = CSE_Response.posts
                    Output_Connections = General.Connections(
                        Query, Local_Plugin_Name, Domain,
                        "Social Media - Person", Task_ID,
                        Local_Plugin_Name.lower())
                    Current_Step = 0

                    for Post in Posts:
                        URL = Post['post_url']
                        Shortcode = URL.replace(f"https://www.{Domain}/p/",
                                                "").replace("/", "")
                        Title = "IG | " + General.Get_Title(URL, Requests=True)

                        if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                                Limit):
                            Responses = Common.Request_Handler(
                                URL,
                                Application_JSON_CT=True,
                                Accept_XML=True,
                                Accept_Language_EN_US=True,
                                Filter=True,
                                Host=f"https://www.{Domain}")
                            Response = Responses["Filtered"]
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Local_Plugin_Name, Response,
                                Shortcode, The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], URL, Title,
                                    Plugin_Name.lower())
                                Data_to_Cache.append(URL)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                            Current_Step += 1

                else:
                    logging.info(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - The provided user's profile is private and cannot be scraped."
                    )

            elif Type == "Tag":
                from instagramy import InstagramHashTag
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = InstagramHashTag(Query)
                JSON_Object = Common.JSON_Handler(vars(CSE_Response))
                CSE_JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])
                Posts = vars(
                    CSE_Response)['tag_data']['edge_hashtag_to_media']['edges']
                Output_Connections = General.Connections(
                    Query, Local_Plugin_Name, Domain, "Social Media - Person",
                    Task_ID, Local_Plugin_Name.lower())
                Current_Step = 0

                for Post in Posts:
                    Shortcode = Post['node']['shortcode']
                    URL = f"https://www.{Domain}/p/{Shortcode}/"
                    Title = "IG | " + General.Get_Title(URL, Requests=True)

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Responses = Common.Request_Handler(
                            URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Response = Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name, Response,
                            Shortcode, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            elif Type == "Post":
                from instagramy import InstagramPost
                Local_Plugin_Name = Plugin_Name + "-" + Type
                CSE_Response = InstagramPost(Query)
                JSON_Object = Common.JSON_Handler(vars(CSE_Response))
                CSE_JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(
                    Directory, Local_Plugin_Name, CSE_JSON_Output_Response,
                    Query, The_File_Extensions["Main"])
                Output_Connections = General.Connections(
                    Query, Local_Plugin_Name, Domain, "Social Media - Place",
                    Task_ID, Local_Plugin_Name.lower())
                URL = CSE_Response.url
                Shortcode = URL.replace(f"https://www.{Domain}/p/",
                                        "").replace("/", "")
                Title = "IG | " + General.Get_Title(URL, Requests=True)

                if URL not in Cached_Data and URL not in Data_to_Cache:
                    Responses = Common.Request_Handler(
                        URL,
                        Application_JSON_CT=True,
                        Accept_XML=True,
                        Accept_Language_EN_US=True,
                        Filter=True,
                        Host=f"https://www.{Domain}")
                    Response = Responses["Filtered"]
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Local_Plugin_Name, Response,
                        Shortcode, The_File_Extensions["Query"])

                    if Output_file:
                        Output_Connections.Output([Main_File, Output_file],
                                                  URL, Title,
                                                  Plugin_Name.lower())
                        Data_to_Cache.append(URL)

                    else:
                        logging.warning(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

            else:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid type provided."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Пример #28
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        Tor_Pull_URL = Tor_General_URL + Query
        Tor_Scrape_URLs = General.Get_Latest_URLs(Tor_Pull_URL,
                                                  Tor_Scrape_Regex_URL)

        if Tor_Scrape_URLs:
            Output_file = General.Main_File_Create(Directory,
                                                   Tor_Plugin_Name.lower(),
                                                   "\n".join(Tor_Scrape_URLs),
                                                   Query, The_File_Extension)

            if Output_file:
                Current_Step = 0

                for URL in Tor_Scrape_URLs:

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        General.Connections(Output_file, Query,
                                            Tor_Plugin_Name, URL, "ahmia.fl",
                                            "Domain Spoof", Task_ID,
                                            General.Get_Title(URL),
                                            Plugin_Name.lower())
                        Data_to_Cache.append(URL)
                        Current_Step += 1

        else:
            logging.info(General.Date() + " No Tor links scraped.")

        I2P_Pull_URL = I2P_General_URL + Query
        I2P_Scrape_URLs = General.Get_Latest_URLs(I2P_Pull_URL,
                                                  I2P_Scrape_Regex_URL)

        if I2P_Scrape_URLs:
            Output_file = General.Main_File_Create(Directory,
                                                   I2P_Plugin_Name.lower(),
                                                   "\n".join(I2P_Scrape_URLs),
                                                   Query, The_File_Extension)

            if Output_file:
                Current_Step = 0

                for URL in I2P_Scrape_URLs:

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        General.Connections(Output_file, Query,
                                            I2P_Plugin_Name, URL, "ahmia.fl",
                                            "Domain Spoof", Task_ID,
                                            General.Get_Title(URL),
                                            Plugin_Name.lower())
                        Data_to_Cache.append(URL)
                        Current_Step += 1

        else:
            logging.info(General.Date() + " No I2P links scraped.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Пример #29
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Shodan_API_Key = self.Load_Configuration()
            API_Session = Shodan(Shodan_API_Key)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:

                try:

                    if self.Type == "Search":
                        Local_Plugin_Name = self.Plugin_Name + "-Search"

                        try:
                            API_Response = API_Session.search(Query)

                        except Exception as e:
                            logging.error(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}."
                            )
                            break

                        JSON_Output_Response = Common.JSON_Handler(
                            API_Response).Dump_JSON()
                        Main_File = General.Main_File_Create(
                            Directory, Local_Plugin_Name, JSON_Output_Response,
                            Query, self.The_File_Extensions["Main"])
                        Output_Connections = General.Connections(
                            Query, Local_Plugin_Name, self.Domain,
                            self.Result_Type, self.Task_ID,
                            self.Plugin_Name.lower())
                        Current_Step = 0

                        for Shodan_Item in API_Response["matches"]:
                            Shodan_Item_Module = Shodan_Item['_shodan'][
                                'module']
                            Shodan_Item_Module = Shodan_Item_Module.replace(
                                '-simple-new', '')

                            if Shodan_Item_Module.startswith("http"):
                                Shodan_Item_Host = ""
                                Shodan_Item_Port = 0

                                if 'http' in Shodan_Item:
                                    Shodan_Item_Host = Shodan_Item['http'][
                                        'host']
                                    Shodan_Item_Response = Shodan_Item['http'][
                                        'html']

                                elif 'ip_str' in Shodan_Item and 'domains' in Shodan_Item and len(
                                        Shodan_Item['domains']) > 0:
                                    Shodan_Item_Host = Shodan_Item['domains'][
                                        0]
                                    Shodan_Item_Response = Shodan_Item['data']

                                elif 'ip_str' in Shodan_Item and 'domains' not in Shodan_Item:
                                    Shodan_Item_Host = Shodan_Item['ip_str']
                                    Shodan_Item_Response = Shodan_Item['data']

                                if Shodan_Item_Host:

                                    if 'port' in Shodan_Item_Host:

                                        if int(Shodan_Item['port']) not in [
                                                80, 443
                                        ]:
                                            Shodan_Item_Port = Shodan_Item[
                                                'port']

                                    if Shodan_Item_Port != 0:
                                        Shodan_Item_URL = f"{Shodan_Item_Module}://{Shodan_Item_Host}:{str(Shodan_Item_Port)}"

                                    else:
                                        Shodan_Item_URL = f"{Shodan_Item_Module}://{Shodan_Item_Host}"

                                    Title = f"Shodan | {str(Shodan_Item_Host)}"

                                    if Shodan_Item_URL not in Cached_Data and Shodan_Item_URL not in Data_to_Cache and Current_Step < int(
                                            self.Limit):
                                        Output_file = General.Create_Query_Results_Output_File(
                                            Directory, Query,
                                            Local_Plugin_Name,
                                            Shodan_Item_Response,
                                            Shodan_Item_Host,
                                            self.The_File_Extensions["Query"])

                                        if Output_file:
                                            Output_Connections.Output(
                                                [Main_File, Output_file],
                                                Shodan_Item_URL, Title,
                                                self.Plugin_Name.lower())
                                            Data_to_Cache.append(
                                                Shodan_Item_URL)

                                        else:
                                            logging.warning(
                                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                            )

                                        Current_Step += 1

                    elif self.Type == "Host":
                        Local_Plugin_Name = self.Plugin_Name + "-Host"

                        try:
                            API_Response = API_Session.host(Query)

                        except Exception as e:
                            logging.error(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}."
                            )
                            break

                        JSON_Output_Response = Common.JSON_Handler(
                            API_Response).Dump_JSON()
                        Main_File = General.Main_File_Create(
                            Directory, Local_Plugin_Name, JSON_Output_Response,
                            Query, self.The_File_Extensions["Main"])
                        Output_Connections = General.Connections(
                            Query, Local_Plugin_Name, self.Domain,
                            self.Result_Type, self.Task_ID,
                            self.Plugin_Name.lower())
                        Shodan_URL = f"https://www.{self.Domain}/host/{Query}"
                        Title = f"Shodan | {Query}"

                        if Shodan_URL not in Cached_Data and Shodan_URL not in Data_to_Cache:
                            Shodan_Responses = Common.Request_Handler(
                                Shodan_URL,
                                Application_JSON_CT=True,
                                Accept_XML=True,
                                Accept_Language_EN_US=True,
                                Filter=True,
                                Host=f"https://www.{self.Domain}")
                            Shodan_Response = Shodan_Responses["Filtered"]
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, self.Plugin_Name,
                                Shodan_Response, Query,
                                self.The_File_Extensions["Query"])

                            if Output_file:
                                Output_Connections.Output(
                                    [Main_File, Output_file], Shodan_URL,
                                    Title, self.Plugin_Name.lower())
                                Data_to_Cache.append(Shodan_URL)

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                )

                        else:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - No results found."
                            )

                except:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to complete task."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Пример #30
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Subdomains = Load_Configuration()
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if Subdomains:
                Request = f'https://api.certspotter.com/v1/issuances?domain={Query}&include_subdomains=true&expand=dns_names&expand=issuer&expand=cert'
                Response = requests.get(Request).text

            else:
                Request = f'https://api.certspotter.com/v1/issuances?domain={Query}&expand=dns_names&expand=issuer&expand=cert'
                Response = requests.get(Request).text

            JSON_Response = json.loads(Response)

            if 'exists' not in JSON_Response:

                if JSON_Response:

                    if Request not in Cached_Data and Request not in Data_to_Cache:

                        try:
                            SSLMate_Regex = re.search(
                                "([\w\d]+)\.[\w]{2,3}(\.[\w]{2,3})?(\.[\w]{2,3})?",
                                Query)

                            if SSLMate_Regex:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    json.dumps(JSON_Response,
                                               indent=4,
                                               sort_keys=True),
                                    SSLMate_Regex.group(1), The_File_Extension)

                                if Output_file:
                                    Output_Connections = General.Connections(
                                        Query, Plugin_Name, "sslmate.com",
                                        "Certificate", Task_ID,
                                        Plugin_Name.lower())
                                    Data_to_Cache.append(Request)

                                    if Subdomains:
                                        Output_Connections.Output(
                                            [Output_file], Request,
                                            f"Subdomain Certificate Search for {Query}",
                                            Plugin_Name.lower())

                                    else:
                                        Output_Connections.Output(
                                            [Output_file], Request,
                                            f"Domain Certificate Search for {Query}",
                                            Plugin_Name.lower())

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                                )

                        except:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create file."
                            )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - No response."
                    )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Query does not exist."
                )

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")