Ejemplo n.º 1
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

        else:
            Limit = 10

    else:
        Limit = 10

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Location = General.Load_Location_Configuration()
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:
            Response = requests.get("http://itunes.apple.com/search?term=" +
                                    Query + "&country=" + Location +
                                    "&entity=software&limit=" +
                                    str(Limit)).text

        except:
            logging.warning(
                General.Date() + " - " + __name__.strip('plugins.') +
                " - Failed to make request, are you connected to the internet?"
            )

        JSON_Response = json.loads(Response)
        General.Main_File_Create(
            Directory, "iTunes", json.dumps(Response, indent=4,
                                            sort_keys=True), Query, ".json")

        if 'resultCount' in JSON_Response:

            if not JSON_Response['resultCount'] == 0:

                if JSON_Response['resultCount'] > 0:
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, "instagram.com", "Data Leakage",
                        Task_ID, Concat_Plugin_Name)

                    for JSON_Object in JSON_Response['results']:
                        JSON_Object_Response = requests.get(
                            JSON_Object['artistViewUrl']).text

                        if JSON_Object[
                                'artistViewUrl'] not in Cached_Data and JSON_Object[
                                    'artistViewUrl'] not in Data_to_Cache:
                            iTunes_Regex = re.search(
                                "https\:\/\/itunes\.apple\.com\/" + Location +
                                "\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?mt\=\d\&uo\=\d",
                                JSON_Object['artistViewUrl'])

                            if iTunes_Regex:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query,
                                    Plugin_Name, JSON_Object_Response,
                                    iTunes_Regex.group(1), The_File_Extension)

                                if Output_file:
                                    Output_Connections.Output(
                                        Output_file,
                                        JSON_Object['artistViewUrl'],
                                        General.Get_Title(
                                            JSON_Object['artistViewUrl']))

                            Data_to_Cache.append(JSON_Object['artistViewUrl'])

                else:
                    logging.warning(
                        General.Date() + " - " + __name__.strip('plugins.') +
                        " - Invalid value provided, value less than 0.")

            else:
                logging.warning(General.Date() + " - " +
                                __name__.strip('plugins.') +
                                " - Invalid value provided, value equal to 0.")

        else:
            logging.warning(General.Date() + " - " +
                            __name__.strip('plugins.') + " - Invalid value.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Ejemplo n.º 2
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Location = General.Load_Location_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            Main_URL = f"https://www.{Domain}/en-{Location}/search?q={Query}"
            Win_Store_Response = General.Request_Handler(
                Main_URL,
                Application_JSON_CT=True,
                Accept_XML=True,
                Accept_Language_EN_US=True)
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 Win_Store_Response, Query,
                                                 The_File_Extension)
            Win_Store_Regex = re.findall(r"\/en\-au\/p\/([\w\-]+)\/([\w\d]+)",
                                         Win_Store_Response)
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain, "Application",
                                                     Task_ID,
                                                     Concat_Plugin_Name)

            if Win_Store_Regex:
                Current_Step = 0

                for Regex_Group_1, Regex_Group_2 in Win_Store_Regex:
                    Item_URL = f"https://www.microsoft.com/en-au/p/{Regex_Group_1}/{Regex_Group_2}"
                    Win_Store_Responses = General.Request_Handler(
                        Item_URL,
                        Application_JSON_CT=True,
                        Accept_XML=True,
                        Accept_Language_EN_US=True,
                        Filter=True,
                        Host=f"https://www.{Domain}")
                    Win_Store_Response = Win_Store_Responses["Filtered"]
                    Title = "Windows Store | " + General.Get_Title(Item_URL)

                    if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Win_Store_Response,
                            Regex_Group_1, The_File_Extension)

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Item_URL, Title,
                                                      Concat_Plugin_Name)
                            Data_to_Cache.append(Item_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 3
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Location = General.Load_Location_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:

            try:
                Request_Query = urllib.parse.quote(Query)
                Main_URL = f"http://{Domain}/search?term={Request_Query}&country={Location}&entity=software&limit={str(Limit)}"
                Response = General.Request_Handler(Main_URL)

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to make request, are you connected to the internet?"
                )
                break

            JSON_Response = json.loads(Response)
            Main_File = General.Main_File_Create(
                Directory, "iTunes",
                json.dumps(JSON_Response, indent=4, sort_keys=True), Query,
                The_File_Extensions["Main"])

            if 'resultCount' in JSON_Response:

                if JSON_Response['resultCount'] > 0:
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain, "Application", Task_ID,
                        Concat_Plugin_Name)

                    for JSON_Object in JSON_Response['results']:
                        JSON_Object_Responses = General.Request_Handler(
                            JSON_Object['artistViewUrl'],
                            Filter=True,
                            Host=f"https://{Domain}")
                        JSON_Object_Response = JSON_Object_Responses[
                            "Filtered"]

                        if JSON_Object[
                                'artistViewUrl'] not in Cached_Data and JSON_Object[
                                    'artistViewUrl'] not in Data_to_Cache:
                            iTunes_Regex = re.search(
                                r"https\:\/\/apps\.apple\.com\/" +
                                rf"{Location}" +
                                r"\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?.+",
                                JSON_Object['artistViewUrl'])

                            if iTunes_Regex:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query,
                                    Plugin_Name, JSON_Object_Response,
                                    iTunes_Regex.group(1),
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file],
                                        JSON_Object['artistViewUrl'],
                                        General.Get_Title(
                                            JSON_Object['artistViewUrl']),
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(
                                        JSON_Object['artistViewUrl'])

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                else:
                    logging.warning(
                        f"{General.Date()} - {__name__.strip('plugins.')} - Invalid value provided, value not greater than 0."
                    )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Invalid value."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 4
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Location = General.Load_Location_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            Main_URL = f"https://www.microsoft.com/en-{Location}/search?q={Query}"
            headers = {
                'Content-Type': 'application/json',
                'User-Agent':
                'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0',
                'Accept':
                'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                'Accept-Language': 'en-US,en;q=0.5'
            }
            Win_Store_Response = requests.get(Main_URL, headers=headers).text
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 Win_Store_Response, Query,
                                                 The_File_Extension)
            Win_Store_Regex = re.findall(r"\/en\-au\/p\/([\w\-]+)\/([\w\d]+)",
                                         Win_Store_Response)
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     "microsoft.com",
                                                     "Application", Task_ID,
                                                     Concat_Plugin_Name)

            if Win_Store_Regex:
                Current_Step = 0

                for Regex_Group_1, Regex_Group_2 in Win_Store_Regex:
                    Item_URL = f"https://www.microsoft.com/en-au/p/{Regex_Group_1}/{Regex_Group_2}"
                    headers = {
                        'Content-Type': 'application/json',
                        'User-Agent':
                        'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0',
                        'Accept':
                        'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                        'Accept-Language': 'en-US,en;q=0.5'
                    }
                    Win_Store_Response = requests.get(Item_URL,
                                                      headers=headers).text
                    Title = "Windows Store | " + General.Get_Title(Item_URL)

                    if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Win_Store_Response,
                            Regex_Group_1, The_File_Extension)

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Item_URL, Title,
                                                      Concat_Plugin_Name)
                            Data_to_Cache.append(Item_URL)

                        else:
                            logging.warning(
                                f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                )

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Ejemplo n.º 5
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Concat_Plugin_Name)
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Location = General.Load_Location_Configuration()
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        Main_URL = "https://www.microsoft.com/en-" + Location + "/search?q=" + Query
        headers = {
            'Content-Type': 'application/json',
            'User-Agent':
            'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
            'Accept':
            'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
            'Accept-Language': 'en-US,en;q=0.5'
        }
        Win_Store_Response = requests.get(Main_URL, headers=headers).text
        General.Main_File_Create(Directory, Plugin_Name, Win_Store_Response,
                                 Query, The_File_Extension)
        Win_Store_Regex = re.findall(r"\/en\-au\/p\/([\w\-]+)\/([\w\d]+)",
                                     Win_Store_Response)

        if Win_Store_Regex:
            Current_Step = 0

            for Regex_Group_1, Regex_Group_2 in Win_Store_Regex:
                Item_URL = "https://www.microsoft.com/en-au/p/" + Regex_Group_1 + "/" + Regex_Group_2
                headers = {
                    'Content-Type': 'application/json',
                    'User-Agent':
                    'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
                    'Accept':
                    'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                    'Accept-Language': 'en-US,en;q=0.5'
                }
                Win_Store_Response = requests.get(Item_URL,
                                                  headers=headers).text

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                        Limit):
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, Win_Store_Response,
                        Regex_Group_1, The_File_Extension)

                    if Output_file:
                        General.Connections(Output_file, Query, Plugin_Name,
                                            Item_URL, "microsoft.com",
                                            "Data Leakage", Task_ID,
                                            General.Get_Title(Item_URL),
                                            Concat_Plugin_Name)

                    Data_to_Cache.append(Item_URL)
                    Current_Step += 1

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    logging.info('Windows Store Search Plugin Terminated.')