Exemple #1
0
def Load_Configuration():
    File_Dir = os.path.dirname(os.path.realpath('__file__'))
    Configuration_File = os.path.join(File_Dir,
                                      'plugins/common/config/config.json')
    logging.info(General.Date() + " - " + __name__.strip('plugins.') +
                 " - Loading configuration data.")

    try:
        with open(Configuration_File) as JSON_File:
            Configuration_Data = json.load(JSON_File)

            for Reddit_Details in Configuration_Data[Plugin_Name.lower()]:
                Reddit_Client_ID = Reddit_Details['client_id']
                Reddit_Client_Secret = Reddit_Details['client_secret']
                Reddit_User_Agent = Reddit_Details['user_agent']
                Reddit_Username = Reddit_Details['username']
                Reddit_Password = Reddit_Details['password']
                Subreddit_to_Search = Reddit_Details["subreddits"]

                if Reddit_Client_ID and Reddit_Client_Secret and Reddit_User_Agent and Reddit_Username and Reddit_Password and Subreddit_to_Search:
                    return [
                        Reddit_Client_ID, Reddit_Client_Secret,
                        Reddit_User_Agent, Reddit_Username, Reddit_Password,
                        Subreddit_to_Search
                    ]

                else:
                    return None
    except:
        logging.warning(General.Date() + " - " + __name__.strip('plugins.') +
                        " - Failed to load Reddit details.")
def Load_Configuration():
    File_Dir = os.path.dirname(os.path.realpath('__file__'))
    Configuration_File = os.path.join(File_Dir,
                                      'plugins/common/config/config.json')
    logging.info(General.Date() + " - " + __name__.strip('plugins.') +
                 " - Loading configuration data.")

    try:

        with open(Configuration_File) as JSON_File:
            Configuration_Data = json.load(JSON_File)

            for Flickr_Details in Configuration_Data[Plugin_Name.lower()]:

                if Flickr_Details['api_key'] and Flickr_Details['api_secret']:
                    return [
                        Flickr_Details['api_key'], Flickr_Details['api_secret']
                    ]

                else:
                    return None

    except:
        logging.warning(General.Date() + " - " + __name__.strip('plugins.') +
                        " - Failed to load location details.")
Exemple #3
0
def Load_Configuration():
    File_Dir = os.path.dirname(os.path.realpath('__file__'))
    Configuration_File = os.path.join(File_Dir, 'plugins/common/config/config.json')
    logging.info(General.Date() + " - " + __name__.strip('plugins.') + " - Loading configuration data.")

    try:

        with open(Configuration_File) as JSON_File:  
            Configuration_Data = json.load(JSON_File)

            for YouTube_Details in Configuration_Data[Plugin_Name.lower()]:
                YouTube_Developer_Key = YouTube_Details['developer_key']
                YouTube_Application_Name = YouTube_Details['application_name']
                YouTube_Application_Version = YouTube_Details['application_version']
                YouTube_Location = YouTube_Details['location']
                YouTube_Location_Radius = YouTube_Details['location_radius']

                if YouTube_Developer_Key and YouTube_Application_Name and YouTube_Application_Version and YouTube_Location and YouTube_Location_Radius:
                    return [YouTube_Developer_Key, YouTube_Application_Name, YouTube_Application_Version, YouTube_Location, YouTube_Location_Radius]

                else:
                    return None

    except:
        logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Failed to load location details.")
Exemple #4
0
def General_Pull(Handle, Limit, Directory, API, Task_ID):
    Data_to_Cache = []
    Cached_Data = []
    JSON_Response = []
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Latest_Tweets = API.user_timeline(screen_name=Handle, count=Limit)

    for Tweet in Latest_Tweets:
        Link = ""

        try:
            JSON_Response.append({
                'id': Tweet.id,
                'text': Tweet.text,
                'author_name': Tweet.user.screen_name,
                'url': Tweet.entities['urls'][0]["expanded_url"]
            })
            Link = Tweet.entities['urls'][0]["expanded_url"]

        except:
            JSON_Response.append({
                'id': Tweet.id,
                'text': Tweet.text,
                'author_name': Tweet.user.screen_name
            })

    JSON_Output = json.dumps(JSON_Response, indent=4, sort_keys=True)

    for JSON_Item in JSON_Response:

        if 'text' in JSON_Item and 'url' in JSON_Item:
            Link = JSON_Item['url']

            if Link not in Cached_Data and Link not in Data_to_Cache:
                logging.info(General.Date() + " " + Link)
                Item_Response = requests.get(Link).text
                Output_file = General.Create_Query_Results_Output_File(Directory, Handle, Plugin_Name, Item_Response, str(JSON_Item['id']), ".html")

                if Output_file:
                    General.Connections(Output_file, Handle, Plugin_Name, Link, "twitter.com", "Data Leakage", Task_ID, General.Get_Title(Link), Plugin_Name.lower())

                else:
                    logging.warning(General.Date() + " Output file not returned.")

        else:
            logging.warning(General.Date() + " Insufficient parameters provided.")

        Data_to_Cache.append(Link)

    General.Main_File_Create(Directory, Plugin_Name, JSON_Output, Handle, ".json")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Exemple #5
0
def Search(Query_List, Task_ID, **kwargs):

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Twitter_Credentials = Load_Configuration()
    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:
            Authentication = tweepy.OAuthHandler(Twitter_Credentials[0], Twitter_Credentials[1])
            Authentication.set_access_token(Twitter_Credentials[2], Twitter_Credentials[3])
            API = tweepy.API(Authentication)
            General_Pull(Query, Limit, Directory, API, Task_ID)

        except:
            logging.info(General.Date() + " Failed to get results. Are you connected to the internet?")
Exemple #6
0
def Load_Configuration():
    File_Dir = os.path.dirname(os.path.realpath('__file__'))
    Configuration_File = os.path.join(File_Dir,
                                      'plugins/common/config/config.json')
    logging.info(General.Date() + " Loading configuration data.")

    try:

        with open(Configuration_File) as JSON_File:
            Configuration_Data = json.load(JSON_File)

            for Vulners_Details in Configuration_Data[Plugin_Name.lower()]:

                if Vulners_Details['api_key']:
                    return Vulners_Details

                else:
                    return None

    except:
        logging.warning(General.Date() + " Failed to load location details.")
def Load_Configuration():
    File_Dir = os.path.dirname(os.path.realpath('__file__'))
    Configuration_File = os.path.join(File_Dir, 'plugins/common/config/config.json')
    logging.info(General.Date() + " Loading configuration data.")

    try:

        with open(Configuration_File) as JSON_File:
            Configuration_Data = json.load(JSON_File)

            for HIBP_Details in Configuration_Data[Concat_Plugin_Name]:
                API_Key = HIBP_Details['api_key']

                if API_Key:
                    return API_Key

                else:
                    return None

    except:
        logging.warning(General.Date() + " Failed to load API details.")
Exemple #8
0
def Search(Query_List, Task_ID):
    Data_to_Cache = []
    Cached_Data = []
    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        BSB_Search_URL = "https://www.bsbnumbers.com/" + Query + ".html"
        Response = requests.get(BSB_Search_URL).text
        Error_Regex = re.search(r"Correct\sthe\sfollowing\serrors", Response)

        if not Error_Regex:

            if BSB_Search_URL not in Cached_Data and BSB_Search_URL not in Data_to_Cache:
                Output_file = General.Create_Query_Results_Output_File(
                    Directory, Query, Plugin_Name, Response, Query,
                    The_File_Extension)

                if Output_file:
                    General.Connections(Output_file, Query, Plugin_Name,
                                        BSB_Search_URL, "bsbnumbers.com",
                                        "Data Leakage", Task_ID,
                                        General.Get_Title(BSB_Search_URL),
                                        Plugin_Name.lower())

                Data_to_Cache.append(BSB_Search_URL)

        else:
            logging.warning(General.Date() +
                            " Query returned error, probably does not exist.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Exemple #9
0
def Load_Configuration():
    File_Dir = os.path.dirname(os.path.realpath('__file__'))
    Configuration_File = os.path.join(File_Dir, 'plugins/common/config/config.json')
    logging.info(General.Date() + " - " + __name__.strip('plugins.') + " - Loading configuration data.")

    try:

        with open(Configuration_File) as JSON_File:
            Configuration_Data = json.load(JSON_File)

            for API_Details in Configuration_Data[Concat_Plugin_Name]:
                API_Key = API_Details['api_key']

                if API_Key:
                    API_Key = base64.b64encode(API_Key.encode('ascii'))
                    return API_Key

                else:
                    return None

    except:
        logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Failed to load location details.")
Exemple #10
0
def Load_Configuration():
    File_Dir = os.path.dirname(os.path.realpath('__file__'))
    Configuration_File = os.path.join(File_Dir, 'plugins/common/config/config.json')
    logging.info(General.Date() + " Loading configuration data.")

    try:

        with open(Configuration_File) as JSON_File:  
            Configuration_Data = json.load(JSON_File)

            for Twitter_Details in Configuration_Data[Plugin_Name.lower()]:
                Consumer_Key = Twitter_Details['CONSUMER_KEY']
                Consumer_Secret = Twitter_Details['CONSUMER_SECRET']
                Access_Key = Twitter_Details['ACCESS_KEY']
                Access_Secret = Twitter_Details['ACCESS_SECRET']

                if Consumer_Key and Consumer_Secret and Access_Key and Access_Secret:
                    return [Consumer_Key, Consumer_Secret, Access_Key, Access_Secret]

                else:
                    return None

    except:
        logging.warning(General.Date() + " Failed to load Twitter details.")
Exemple #11
0
def Load_Configuration():
    File_Dir = os.path.dirname(os.path.realpath('__file__'))
    Configuration_File = os.path.join(File_Dir, 'plugins/common/config/config.json')
    logging.info(General.Date() + " - " + __name__.strip('plugins.') + " - Loading configuration data.")

    try:

        with open(Configuration_File) as JSON_File:  
            Configuration_Data = json.load(JSON_File)

            for Google_Details in Configuration_Data[Plugin_Name.lower()]:
                Google_CX = Google_Details['cx']
                Google_Developer_Key = Google_Details['developer_key']
                Google_Application_Name = Google_Details['application_name']
                Google_Application_Version = Google_Details['application_version']

                if Google_CX and Google_Developer_Key and Google_Application_Name and Google_Application_Version:
                    return [Google_CX, Google_Developer_Key, Google_Application_Name, Google_Application_Version]

                else:
                    return None

    except:
        logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Failed to load API details.")
Exemple #12
0
def Load_Configuration():
    File_Dir = os.path.dirname(os.path.realpath('__file__'))
    Configuration_File = os.path.join(File_Dir, 'plugins/common/config/config.json')
    logging.info(General.Date() + "[+] Loading configuration data.")

    try:

        with open(Configuration_File) as JSON_File:
            Configuration_Data = json.load(JSON_File)
            Craigslist_Details = Configuration_Data[Plugin_Name.lower()]

            if Craigslist_Details['city']:
                return Craigslist_Details['city']

            else:
                return None

    except:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to load location details.")
def Load_Configuration():
    logging.info(General.Date() + "[+] Loading configuration data.")

    try:

        with open(Connectors.Set_Configuration_File()) as JSON_File:
            Configuration_Data = json.load(JSON_File)
            Craigslist_Details = Configuration_Data[Plugin_Name.lower()]

            if Craigslist_Details['city']:
                return Craigslist_Details['city']

            else:
                return None

    except:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to load location details."
        )
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

        else:
            Limit = 10

    else:
        Limit = 10

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        # Query can be Title or ISBN
        Main_URL = "http://gen.lib.rus.ec/search.php?req=" + Query + "&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def"
        Lib_Gen_Response = requests.get(Main_URL).text
        General.Main_File_Create(Directory, Plugin_Name, Lib_Gen_Response,
                                 Query, The_File_Extension)
        Lib_Gen_Regex = re.findall("book\/index\.php\?md5=[A-Fa-f0-9]{32}",
                                   Lib_Gen_Response)

        if Lib_Gen_Regex:
            Current_Step = 0

            for Regex in Lib_Gen_Regex:
                Item_URL = "http://gen.lib.rus.ec/" + Regex
                Lib_Item_Response = requests.get(Item_URL).text

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                        Limit):
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, Lib_Item_Response,
                        Regex, The_File_Extension)

                    if Output_file:
                        General.Connections(Output_file, Query, Plugin_Name,
                                            Item_URL, "gen.lib.rus.ec",
                                            "Data Leakage", Task_ID,
                                            General.Get_Title(Item_URL),
                                            Concat_Plugin_Name)

                    Data_to_Cache.append(Item_URL)
                    Current_Step += 1

        else:
            logging.warning(General.Date() + " - " +
                            __name__.strip('plugins.') +
                            " - Failed to match regular expression.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Exemple #15
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []
    Results = []

    if int(kwargs["Limit"]) > 0:
        Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Reddit_Details = Load_Configuration()
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:
            Reddit_Connection = praw.Reddit(client_id=Reddit_Details[0], \
                                            client_secret=Reddit_Details[1], \
                                            user_agent=Reddit_Details[2], \
                                            username=Reddit_Details[3], \
                                            password=Reddit_Details[4])

            All_Subreddits = Reddit_Connection.subreddit(Reddit_Details[5])

            for Subreddit in All_Subreddits.search(Query, limit=Limit): # Limit, subreddit and search to be controlled by the web app.
                Current_Result = []
                Current_Result.append(Subreddit.url)
                Current_Result.append(Subreddit.selftext)
                Results.append(Current_Result)

        except:
            logging.warning(General.Date() + " Failed to get results. Are you connected to the internet?")

        for Result in Results:

            if Result[0] not in Cached_Data and Result[0] not in Data_to_Cache:

                try:
                    Reddit_Regex = re.search("https\:\/\/www\.reddit\.com\/r\/(\w+)\/comments\/(\w+)\/([\w\d]+)\/", Result[0])

                    if Reddit_Regex:
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Result[1], Reddit_Regex.group(3), The_File_Extension)

                        if Output_file:
                            General.Connections(Output_file, Query, Plugin_Name, Result[0], "reddit.com", "Data Leakage", Task_ID, General.Get_Title(Result[0]), Plugin_Name.lower())

                except:
                    logging.warning(General.Date() + " Failed to create file.")

                Data_to_Cache.append(Result[0])

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Exemple #16
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

        else:
            Limit = 10

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Google_Details = Load_Configuration()
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        Service = build("customsearch", Google_Details[3], developerKey=Google_Details[1])
        CSE_Response = Service.cse().list(q=Query, cx=Google_Details[0], num=Limit).execute()
        CSE_JSON_Output_Response = json.dumps(CSE_Response, indent=4, sort_keys=True)
        CSE_JSON_Response = json.loads(CSE_JSON_Output_Response)

        General.Main_File_Create(Directory, Plugin_Name, CSE_JSON_Output_Response, Query, ".json")
        Output_Connections = General.Connections(Query, Plugin_Name, "google.com", "Domain Spoof", Task_ID, Plugin_Name.lower())

        for JSON_Response_Items in CSE_JSON_Response['items']:

            try:
                Google_Item = JSON_Response_Items['pagemap']['metatags']

                for Google_Item_Line in Google_Item:
                    Google_Item_URL = Google_Item_Line['og:url']

                    if Google_Item_URL not in Cached_Data and Google_Item_URL not in Data_to_Cache:
                        Path_Regex = re.search(r"https?\:\/\/(www\.)?[\w\d\.]+\.\w{2,3}(\.\w{2,3})?(\.\w{2,3})?\/([\w\d\-\_\/]+)?", Google_Item_URL)

                        if Path_Regex:
                            headers = {'Content-Type': 'application/json', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0', 'Accept': 'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5'}
                            Google_Item_Response = requests.get(Google_Item_URL, headers=headers).text
                            Output_Path = Path_Regex.group(4).replace("/", "-")
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Google_Item_Response, Output_Path, The_File_Extension)

                            if Output_file:
                                Output_Connections.Output(Output_file, Google_Item_URL, General.Get_Title(Google_Item_URL))

                        else:
                            logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Failed to match regular expression.")

                        Data_to_Cache.append(Google_Item_URL)

            except Exception as e:
                logging.info(General.Date() + str(e))

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID):
    Data_to_Cache = []
    Cached_Data = []
    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        Output_Connections = General.Connections(
            Query, Plugin_Name, "general-insurance.coles.com.au",
            "Data Leakage", Task_ID, Concat_Plugin_Name)

        for State in States:
            Post_URL = 'https://general-insurance.coles.com.au/bin/wesfarmers/search/vehiclerego'
            data = '''{"isRegoSearch":"YES","regoSearchCount":2,"regoMatchCount":1,"regoSearchFailureCount":0,"failPaymentAttempts":0,"pauseStep":"false","campaignBaseURL":"https://secure.colesinsurance.com.au/campaignimages/","sessionState":"OPEN","sessionStep":"0","policyHolders":[],"updateSessionURL":"http://dev.gtw.gp-mdl.auiag.corp:9000/sys/colessessionservice/motor/v1/update-session","insuranceType":"COMP","startDate":"03/07/2019","drivers":[{"driverRef":"MainDriver","yearsLicenced":{"vehRef":"veh1"}}],"priceBeatAttemptsRemaining":"2","currentInsurerOptions":[{"id":"AAMI","value":"AAMI","text":"AAMI"},{"id":"Allianz","value":"Allianz","text":"Allianz"},{"id":"Apia","value":"Apia","text":"Apia"},{"id":"Bingle","value":"Bingle","text":"Bingle"},{"id":"Broker","value":"Broker","text":"Broker"},{"id":"BudgDirect","value":"BudgDirect","text":"Budget Direct"},{"id":"Buzz","value":"Buzz","text":"Buzz"},{"id":"CGU","value":"CGU","text":"CGU"},{"id":"Coles","value":"Coles","text":"Coles"},{"id":"CommInsure","value":"CommInsure","text":"CommInsure"},{"id":"GIO","value":"GIO","text":"GIO"},{"id":"HBF","value":"HBF","text":"HBF"},{"id":"JustCar","value":"JustCar","text":"Just Car"},{"id":"NRMA","value":"NRMA","text":"NRMA"},{"id":"Progress","value":"Progress","text":"Progressive"},{"id":"QBE","value":"QBE","text":"QBE"},{"id":"RAA","value":"RAA","text":"RAA"},{"id":"RAC","value":"RAC","text":"RAC"},{"id":"RACQ","value":"RACQ","text":"RACQ"},{"id":"RACT","value":"RACT","text":"RACT"},{"id":"RACV","value":"RACV","text":"RACV"},{"id":"Real","value":"Real","text":"Real"},{"id":"SGIC","value":"SGIC","text":"SGIC"},{"id":"SGIO","value":"SGIO","text":"SGIO"},{"id":"Shannons","value":"Shannons","text":"Shannons"},{"id":"Suncorp","value":"Suncorp","text":"Suncorp"},{"id":"Youi","value":"Youi","text":"Youi"},{"id":"None","value":"None","text":"Car is not currently insured"},{"id":"Dontknow","value":"Dontknow","text":"Don't Know"},{"id":"Other","value":"Other","text":"Other"}],"coverLevelOptions":[{"id":"Gold","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"NRMA","code":"Gold","order":"1"},{"id":"Gold1","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"BudgDirect","code":"Gold","order":"1"},{"id":"Standard2","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIC","code":"Standard","order":"2"},{"id":"Gold6","value":"Comprehensive Advantages Car Insurance","text":"Comprehensive Advantages Car Insurance","flagname":"Suncorp","code":"Gold","order":"1"},{"id":"Standard","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"GIO","code":"Standard","order":"2"},{"id":"Standard0","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"NRMA","code":"Standard","order":"2"},{"id":"Gold4","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIC","code":"Gold","order":"1"},{"id":"Standard5","value":"Full Comprehensive Car Insurance","text":"Full Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Standard","order":"2"},{"id":"Gold5","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIO","code":"Gold","order":"1"},{"id":"Gold2","value":"Platinum Car Insurance","text":"Platinum Car Insurance","flagname":"GIO","code":"Gold","order":"1"},{"id":"Standard3","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIO","code":"Standard","order":"2"},{"id":"Gold3","value":"Complete Care Motor Insurance","text":"Complete Care Motor Insurance","flagname":"RACV","code":"Gold","order":"1"},{"id":"Standard4","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"Suncorp","code":"Standard","order":"2"},{"id":"Gold0","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Gold","order":"1"},{"id":"Standard1","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"RACV","code":"Standard","order":"2"}],"riskAddress":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"postcode":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"carRegistration":"''' + Query + '''","chooseValue":"","whatValueInsure":"Marketvalue","whatValueInsure_value":{"key":"Marketvalue","value":"Market Value"}}'''
            headers = {
                'Content-Type': 'ext/plain;charset=UTF-8',
                'User-Agent':
                'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.90 Safari/537.36',
                'Accept': '*/*',
                'Accept-Encoding': 'gzip, deflate, br',
                'Referer':
                'https://general-insurance.coles.com.au/motor/get-quote',
                'Origin': 'https://general-insurance.coles.com.au',
                'Host': 'general-insurance.coles.com.au'
            }
            Registration_Response = requests.post(Post_URL,
                                                  data=data,
                                                  headers=headers).text
            Registration_Response = json.loads(Registration_Response)

            try:
                Title = Registration_Response['vehicles'][0][
                    'make'] + " " + Registration_Response['vehicles'][0][
                        'model']
                Item_URL = Post_URL + "?" + Query

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache:
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name,
                        json.dumps(Registration_Response,
                                   indent=4,
                                   sort_keys=True), Title.replace(" ", "-"),
                        The_File_Extension)

                    if Output_file:
                        Output_Connections.Output(Output_file, Item_URL, Title)

                    Data_to_Cache.append(Item_URL)

            except:
                logging.info(General.Date() + " - " +
                             __name__.strip('plugins.') +
                             " - No result found for given query " + Query +
                             " for state " + State + ".")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Exemple #18
0
def Search(Query_List, Task_ID, Type, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:

            if Type == "UKBN":
                Authorization_Key = Load_Configuration()

                if Authorization_Key:
                    Authorization_Key = "Basic " + Authorization_Key.decode('ascii')
                    headers = {"Authorization": Authorization_Key}
                    Main_URL = 'https://api.companieshouse.gov.uk/company/' + Query
                    Response = requests.get(Main_URL, headers=headers).text
                    JSON_Response = json.loads(Response)
                    Indented_JSON_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)

                    try:
                        Dud_Query = str(int(Query))

                        if Response and '{"errors":[{"error":"company-profile-not-found","type":"ch:service"}]}' not in Response:

                            if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                Result_URL = 'https://beta.companieshouse.gov.uk/company/' + str(JSON_Response["company_number"])
                                Result_Response = requests.get(Result_URL).text
                                Main_Output_File = General.Main_File_Create(Directory, Plugin_Name, Indented_JSON_Response, Query, '.json')
                                Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Result_Response, str(JSON_Response["company_name"]), The_File_Extension)

                                if Main_Output_File and Output_file:
                                    General.Connections(Output_file, Query, Plugin_Name, Result_URL, "companieshouse.gov.uk", "Data Leakage", Task_ID, str(JSON_Response["company_name"]), Plugin_Name)
                                    Data_to_Cache.append(Main_URL)

                    except:
                        logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Invalid query provided for UKBN Search.")

                else:
                    logging.info(General.Date() + " - " + __name__.strip('plugins.') + " - Failed to retrieve API key.")

            elif Type == "UKCN":
                Authorization_Key = Load_Configuration()

                if Authorization_Key:
                    Authorization_Key = "Basic " + Authorization_Key.decode('ascii')

                    if kwargs.get('Limit'):

                        if int(kwargs["Limit"]) > 0:
                            Limit = kwargs["Limit"]

                    else:
                        Limit = 10

                    try:
                        Main_URL = 'https://api.companieshouse.gov.uk/search/companies?q=' + Query + '&items_per_page=' + Limit
                        headers = {"Authorization": "Basic SGI5M0V4STRkMDZ2d0NHSzBZTkI5QUxnQ3N3UDNhNEFNMDRHeWtVdzo="}
                        Response = requests.get(Main_URL, headers=headers).text
                        JSON_Response = json.loads(Response)
                        Indented_JSON_Response = json.dumps(JSON_Response, indent=4, sort_keys=True)

                        try:

                            if JSON_Response['total_results'] > 0:
                                General.Main_File_Create(Directory, Plugin_Name, Indented_JSON_Response, Query, '.json')
                                Output_Connections = General.Connections(Query, Plugin_Name, "companieshouse.gov.uk", "Data Leakage", Task_ID, Plugin_Name)

                                for Item in JSON_Response['items']:
                                    UKBN_URL = Item['links']['self']
                                    Full_UKBN_URL = 'https://beta.companieshouse.gov.uk' + str(UKBN_URL)

                                    if Full_UKBN_URL not in Cached_Data and Full_UKBN_URL not in Data_to_Cache:
                                        UKCN = Item['title']
                                        Current_Response = requests.get(Full_UKBN_URL).text
                                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, str(Current_Response), UKCN, The_File_Extension)

                                        if Output_file:
                                            Output_Connections.Output(Output_file, Full_UKBN_URL, UKCN)
                                            Data_to_Cache.append(Full_UKBN_URL)

                        except:
                            logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Error during UKCN Search, perhaps the rate limit has been exceeded.")

                    except:
                        logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Invalid query provided for UKCN Search.")

                else:
                    logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Failed to retrieve API key.")

            else:
                logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Invalid request type.")

        except:
            logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Failed to make request.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID):
    Data_to_Cache = []
    Cached_Data = []
    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    try:
        DNS_Info = checkdmarc.check_domains(Query_List)

        if len(Query_List) > 1:

            for DNS_Item in DNS_Info:
                Query = DNS_Item['base_domain']
                Output_Dict = json.dumps(DNS_Item, indent=4, sort_keys=True)
                Link = "https://www." + Query
                Title = "DNS Information for " + DNS_Info['base_domain']

                if Link not in Data_to_Cache and Link not in Cached_Data:
                    Output_file = General.Main_File_Create(
                        Directory, Plugin_Name, Output_Dict, Query,
                        The_File_Extension)

                    if Output_file:
                        General.Connections(Output_file, Query, Plugin_Name,
                                            Link, Query, "Domain Spoof",
                                            Task_ID, Title, Concat_Plugin_Name)

                    Data_to_Cache.append(Link)

        else:
            Query = DNS_Info['base_domain']
            Output_Dict = json.dumps(DNS_Info, indent=4, sort_keys=True)
            Link = "https://www." + Query
            Title = "DNS Information for " + Query

            if Link not in Data_to_Cache and Link not in Cached_Data:
                Output_file = General.Main_File_Create(Directory, Plugin_Name,
                                                       Output_Dict, Query,
                                                       The_File_Extension)

                if Output_file:
                    General.Connections(Output_file, Query, Plugin_Name, Link,
                                        Query, "Domain Spoof", Task_ID, Title,
                                        Concat_Plugin_Name)

                Data_to_Cache.append(Link)

    except:
        logging.warning(General.Date() + " Error retrieving DNS details.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Exemple #20
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

        else:
            Limit = 10

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    try:
        File_Dir = os.path.dirname(os.path.realpath('__file__'))
        Configuration_File = os.path.join(
            File_Dir, 'plugins/common/config/RSS_Feeds.txt')
        Current_File = open(
            Configuration_File,
            "r")  # Open the provided file and retrieve each client to test.
        URLs = Current_File.read().splitlines()
        Current_File.close()

    except:
        logging.warning(
            General.Date() + " - " + __name__.strip('plugins.') +
            " - Please provide a valid file, failed to open the file which contains the data to search for."
        )

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        for URL in URLs:  # URLs to be controlled by the web app.
            RSS = feedparser.parse(URL)
            Current_Step = 0

            for Feed in RSS.entries:

                if Query in Feed.description:
                    Dump_Types = General.Data_Type_Discovery(Feed.description)
                    File_Link = Feed.link.replace("https://", "")
                    File_Link = File_Link.replace("http://", "")
                    File_Link = File_Link.replace("www.", "")
                    File_Link = File_Link.replace("/", "-")
                    Domain = URL.replace("https://", "")
                    Domain = Domain.replace("http://", "")
                    Domain = Domain.replace("www.", "")

                    if Feed.link not in Cached_Data and Feed.link not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Feed.description,
                            File_Link, The_File_Extension)

                        if Output_file:
                            Output_Connections = General.Connections(
                                Query, Plugin_Name, Domain, "Data Leakage",
                                Task_ID, Plugin_Name.lower())
                            Output_Connections.Output(Output_file,
                                                      Feed.link,
                                                      General.Get_Title(
                                                          Feed.link),
                                                      Dump_Types=Dump_Types)

                        Data_to_Cache.append(Feed.link)
                        Current_Step += 1

                else:
                    logging.info(General.Date() + " - " +
                                 __name__.strip('plugins.') +
                                 " - Query not found.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Exemple #21
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

        else:
            Limit = 10

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    try:
        Flickr_Details = Load_Configuration()
        flickr_api.set_keys(api_key=Flickr_Details[0],
                            api_secret=Flickr_Details[1])

    except:
        logging.info(General.Date() + " - " + __name__.strip('plugins.') +
                     " - Failed to establish API identity.")

    for Query in Query_List:
        Email_Regex = re.search(r"[^@]+@[^\.]+\..+", Query)

        if Email_Regex:

            try:
                User = flickr_api.Person.findByEmail(Query)
                Photos = User.getPhotos()
                General.Main_File_Create(Directory, Plugin_Name, Photos, Query,
                                         ".txt")
                Output_Connections = General.Connections(
                    Query, Plugin_Name, "flickr.com", "Data Leakage", Task_ID,
                    Plugin_Name.lower())
                Current_Step = 0

                for Photo in Photos:
                    Photo_URL = "https://www.flickr.com/photos/" + Query + "/" + Photo[
                        "id"]

                    if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        headers = {
                            'Content-Type': 'application/json',
                            'User-Agent':
                            'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
                            'Accept':
                            'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                            'Accept-Language': 'en-US,en;q=0.5'
                        }
                        Photo_Response = requests.get(Photo_URL,
                                                      headers=headers).text
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Photo_Response,
                            Photo, The_File_Extension)

                        if Output_file:
                            Output_Connections.Output(
                                Output_file, Photo_URL,
                                General.Get_Title(Photo_URL))

                        Data_to_Cache.append(Photo_URL)
                        Current_Step += 1

            except:
                logging.info(General.Date() + " - " +
                             __name__.strip('plugins.') +
                             " - Failed to make API call.")

        else:

            try:
                User = flickr_api.Person.findByUserName(Query)
                Photos = User.getPhotos()
                General.Main_File_Create(Directory, Plugin_Name, Photos, Query,
                                         ".txt")
                Output_Connections = General.Connections(
                    Query, Plugin_Name, "flickr.com", "Data Leakage", Task_ID,
                    Plugin_Name.lower())
                Current_Step = 0

                for Photo in Photos:
                    Photo_URL = "https://www.flickr.com/photos/" + Query + "/" + Photo[
                        "id"]

                    if Photo_URL not in Cached_Data and Photo_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        headers = {
                            'Content-Type': 'application/json',
                            'User-Agent':
                            'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
                            'Accept':
                            'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                            'Accept-Language': 'en-US,en;q=0.5'
                        }
                        Photo_Response = requests.get(Photo_URL,
                                                      headers=headers).text
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Photo_Response,
                            str(Photo['id']), The_File_Extension)

                        if Output_file:
                            Output_Connections.Output(
                                Output_file, Photo_URL,
                                General.Get_Title(Photo_URL))

                        Data_to_Cache.append(Photo_URL)
                        Current_Step += 1

            except:
                logging.info(General.Date() + " - " +
                             __name__.strip('plugins.') +
                             " - Failed to make API call.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Exemple #22
0
def Search(Query_List, Task_ID):
    Data_to_Cache = []
    Cached_Data = []
    Configuration_Details = Load_Configuration()
    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        if Configuration_Details[1].lower() == "true":
            Request = 'https://api.certspotter.com/v1/issuances?domain=' + Query + '&include_subdomains=true&expand=dns_names&expand=issuer&expand=cert'
            Response = requests.get(Request,
                                    auth=(Configuration_Details[0], '')).text

        else:
            Request = 'https://api.certspotter.com/v1/issuances?domain=' + Query + '&expand=dns_names&expand=issuer&expand=cert'
            Response = requests.get(Request,
                                    auth=(Configuration_Details[0], '')).text

        JSON_Response = json.loads(Response)

        if 'exists' not in JSON_Response:

            if JSON_Response:

                if Request not in Cached_Data and Request not in Data_to_Cache:

                    try:
                        SSLMate_Regex = re.search(
                            "([\w\d]+)\.[\w]{2,3}(\.[\w]{2,3})?(\.[\w]{2,3})?",
                            Query)

                        if SSLMate_Regex:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name,
                                json.dumps(JSON_Response,
                                           indent=4,
                                           sort_keys=True),
                                SSLMate_Regex.group(1), The_File_Extension)

                            if Output_file:
                                Output_Connections = General.Connections(
                                    Query, Plugin_Name,
                                    "sslmate.com", "Domain Spoof", Task_ID,
                                    Plugin_Name.lower())
                                Output_Connections.Output(
                                    Output_file, Request,
                                    General.Get_Title(Request))

                        else:
                            logging.warning(
                                General.Date() + " - " +
                                __name__.strip('plugins.') +
                                " - Failed to match regular expression.")

                    except:
                        logging.warning(General.Date() + " - " +
                                        __name__.strip('plugins.') +
                                        " - Failed to create file.")

                    Data_to_Cache.append(Request)

            else:
                logging.warning(General.Date() + " - " +
                                __name__.strip('plugins.') + " - No response.")

        else:
            logging.warning(General.Date() + " - " +
                            __name__.strip('plugins.') +
                            " - Query does not exist.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, Type, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:

            if Type == "ABN":
                Main_URL = 'https://abr.business.gov.au/ABN/View?id=' + Query
                Response = requests.get(Main_URL).text

                try:

                    if 'Error searching ABN Lookup' not in Response:
                        Query = str(int(Query))

                        if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name, Response,
                                General.Get_Title(Main_URL),
                                The_File_Extension)

                            if Output_file:
                                General.Connections(
                                    Output_file, Query, Plugin_Name, Main_URL,
                                    "abr.business.gov.au",
                                    "Data Leakage", Task_ID,
                                    General.Get_Title(Main_URL), Plugin_Name)
                                Data_to_Cache.append(Main_URL)

                except:
                    logging.warning(General.Date() +
                                    " Invalid query provided for ABN Search.")

            elif Type == "ACN":
                Main_URL = 'https://abr.business.gov.au/Search/Run'
                Data = {
                    'SearchParameters.SearchText':
                    Query,
                    'SearchParameters.AllNames':
                    'true',
                    'ctl00%24ContentPagePlaceholder%24SearchBox%24MainSearchButton':
                    'Search'
                }
                Response = requests.post(Main_URL, data=Data).text

                if kwargs.get('Limit'):

                    if int(kwargs["Limit"]) > 0:
                        Limit = kwargs["Limit"]

                else:
                    Limit = 10

                try:
                    ACN_Regex = re.search(r".*[a-zA-Z].*", Query)

                    if ACN_Regex:
                        General.Main_File_Create(Directory, Plugin_Name,
                                                 Response, Query,
                                                 The_File_Extension)
                        Current_Step = 0
                        ABNs_Regex = re.findall(
                            r"\<input\sid\=\"Results\_NameItems\_\d+\_\_Compressed\"\sname\=\"Results\.NameItems\[\d+\]\.Compressed\"\stype\=\"hidden\"\svalue\=\"(\d{11})\,\d{2}\s\d{3}\s\d{3}\s\d{3}\,0000000001\,Active\,active\,([\d\w\s\&\-\_\.]+)\,Current\,",
                            Response)

                        if ABNs_Regex:

                            for ABN_URL, ACN in ABNs_Regex:
                                Full_ABN_URL = 'https://abr.business.gov.au/ABN/View?abn=' + ABN_URL

                                if Full_ABN_URL not in Cached_Data and Full_ABN_URL not in Data_to_Cache and Current_Step < int(
                                        Limit):
                                    ACN = ACN.rstrip()
                                    Current_Response = requests.get(
                                        Full_ABN_URL).text
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, Plugin_Name,
                                        str(Current_Response),
                                        ACN.replace(' ', '-'),
                                        The_File_Extension)

                                    if Output_file:
                                        General.Connections(
                                            Output_file, Query, Plugin_Name,
                                            Full_ABN_URL,
                                            "abr.business.gov.au",
                                            "Data Leakage", Task_ID,
                                            General.Get_Title(Full_ABN_URL),
                                            Plugin_Name)
                                        Data_to_Cache.append(Full_ABN_URL)
                                        Current_Step += 1

                        else:
                            logging.warning(
                                General.Date() +
                                " Response did not match regular expression.")

                    else:
                        logging.warning(
                            General.Date() +
                            " Query did not match regular expression.")

                except:
                    logging.warning(General.Date() +
                                    " Invalid query provided for ACN Search.")

            else:
                logging.warning(General.Date() + " Invalid request type.")

        except:
            logging.warning(General.Date() + " Failed to make request.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Exemple #24
0
def Search(Query_List, Task_ID, Type, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:

            if Type == "NZBN":
                Main_URL = 'https://app.companiesoffice.govt.nz/companies/app/ui/pages/companies/search?q=' + Query + '&entityTypes=ALL&entityStatusGroups=ALL&incorpFrom=&incorpTo=&addressTypes=ALL&addressKeyword=&start=0&limit=1&sf=&sd=&advancedPanel=true&mode=advanced#results'
                Response = requests.get(Main_URL).text

                try:

                    if 'An error has occurred and the requested action cannot be performed.' not in Response:
                        Query = str(int(Query))

                        if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Response, General.Get_Title(Main_URL), The_File_Extension)

                            if Output_file:
                                General.Connections(Output_file, Query, Plugin_Name, Main_URL, "app.companiesoffice.govt.nz", "Data Leakage", Task_ID, General.Get_Title(Main_URL), Plugin_Name)
                                Data_to_Cache.append(Main_URL)

                except:
                    logging.warning(General.Date() + " Invalid query provided for NZBN Search.")

            elif Type == "NZCN":

                if kwargs.get('Limit'):

                    if int(kwargs["Limit"]) > 0:
                        Limit = kwargs["Limit"]

                else:
                    Limit = 10

                try:
                    Main_URL = 'https://app.companiesoffice.govt.nz/companies/app/ui/pages/companies/search?q=' + urllib.parse.quote(
                        Query) + '&entityTypes=ALL&entityStatusGroups=ALL&incorpFrom=&incorpTo=&addressTypes=ALL&addressKeyword=&start=0&limit=' + str(
                        Limit) + '&sf=&sd=&advancedPanel=true&mode=advanced#results'
                    Response = requests.get(Main_URL).text
                    NZCN_Regex = re.search(r".*[a-zA-Z].*", Query)

                    if NZCN_Regex:
                        General.Main_File_Create(Directory, Plugin_Name, Response, Query, The_File_Extension)
                        NZBNs_Regex = re.findall(r"\<span\sclass\=\"entityName\"\>([\w\d\s\-\_\&\|\!\@\#\$\%\^\*\(\)\.\,]+)\<\/span\>\s<span\sclass\=\"entityInfo\"\>\((\d{6})\)\s\(NZBN\:\s(\d{13})\)", Response)

                        if NZBNs_Regex:

                            for NZCN, NZ_ID, NZBN_URL in NZBNs_Regex:
                                print(NZBN_URL, NZ_ID, NZCN)
                                Full_NZBN_URL = 'https://app.companiesoffice.govt.nz/companies/app/ui/pages/companies/' + NZ_ID + '?backurl=H4sIAAAAAAAAAEXLuwrCQBCF4bfZNtHESIpBbLQwhWBeYNgddSF7cWai5O2NGLH7zwenyHgjKWwKGaOfSwjZ3ncPaOt1W9bbsmqaamMoqtepnzIJ7Ltu2RdFHeXIacxf9tEmzgdOAZbuExh0jknk%2F17gRNMrsQMjiqxQmsEHr7Aycp3NfY5PjJbcGSMNoDySCckR%2FPwNLgXMiL4AAAA%3D'

                                if Full_NZBN_URL not in Cached_Data and Full_NZBN_URL not in Data_to_Cache:
                                    Current_Response = requests.get(Full_NZBN_URL).text
                                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, str(Current_Response), NZCN.replace(' ', '-'), The_File_Extension)

                                    if Output_file:
                                        General.Connections(Output_file, Query, Plugin_Name, Full_NZBN_URL, "app.companiesoffice.govt.nz", "Data Leakage", Task_ID, General.Get_Title(Full_NZBN_URL), Plugin_Name)
                                        Data_to_Cache.append(Full_NZBN_URL)

                        else:
                            logging.warning(General.Date() + " Response did not match regular expression.")

                    else:
                        logging.warning(General.Date() + " Query did not match regular expression.")

                except:
                    logging.warning(General.Date() + " Invalid query provided for NZCN Search.")

            else:
                logging.warning(General.Date() + " Invalid request type.")

        except:
            logging.warning(General.Date() + " Failed to make request.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, Type, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

        else:
            Limit = 10

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        if Type == "User":
            Local_Plugin_Name = Plugin_Name + "-" + Type
            CSE_Response = instagram_explore.user(Query)
            CSE_JSON_Output_Response = json.dumps(CSE_Response, indent=4, sort_keys=True)
            Output_file = General.Main_File_Create(Directory, Local_Plugin_Name, CSE_JSON_Output_Response, Query, ".json")
            Posts = CSE_Response[0]["edge_owner_to_timeline_media"]["edges"]
            Output_Connections = General.Connections(Query, Local_Plugin_Name, "instagram.com", "Data Leakage", Task_ID, Local_Plugin_Name.lower())
            Current_Step = 0

            for Post in Posts:
                Shortcode = Post["node"]["shortcode"]
                URL = "https://www.instagram.com/p/" + Shortcode + "/"

                if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(Limit):

                    if Output_file:
                        Output_Connections.Output(Output_file, URL, General.Get_Title(URL))

                Data_to_Cache.append(URL)
                Current_Step += 1

        elif Type == "Tag":
            Local_Plugin_Name = Plugin_Name + "-" + Type
            CSE_Response = instagram_explore.tag(Query)
            CSE_JSON_Output_Response = json.dumps(CSE_Response, indent=4, sort_keys=True)
            Output_file = General.Main_File_Create(Directory, Local_Plugin_Name, CSE_JSON_Output_Response, Query, ".json")
            Posts = CSE_Response[0]["edge_hashtag_to_media"]["edges"]
            Output_Connections = General.Connections(Query, Local_Plugin_Name, "instagram.com", "Data Leakage", Task_ID, Local_Plugin_Name.lower())
            Current_Step = 0

            for Post in Posts:
                Shortcode = Post["node"]["shortcode"]
                URL = "https://www.instagram.com/p/" + Shortcode + "/"

                if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(Limit):

                    if Output_file:
                        Output_Connections.Output(Output_file, URL, General.Get_Title(URL))

                Data_to_Cache.append(URL)
                Current_Step += 1

        elif Type == "Location":
            Local_Plugin_Name = Plugin_Name + "-" + Type
            CSE_Response = location(Query)
            CSE_JSON_Output_Response = json.dumps(CSE_Response, indent=4, sort_keys=True)
            Output_file = General.Main_File_Create(Directory, Local_Plugin_Name, CSE_JSON_Output_Response, Query, ".json")
            Posts = CSE_Response[0]["edge_location_to_media"]["edges"]
            Output_Connections = General.Connections(Query, Local_Plugin_Name, "instagram.com", "Data Leakage", Task_ID, Local_Plugin_Name.lower())
            Current_Step = 0

            for Post in Posts:
                Shortcode = Post["node"]["shortcode"]
                URL = "https://www.instagram.com/p/" + Shortcode + "/"

                if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(Limit):

                    if Output_file:
                        Output_Connections.Output(Output_file, URL, General.Get_Title(URL))

                Data_to_Cache.append(URL)
                Current_Step += 1

        elif Type == "Media":
            Local_Plugin_Name = Plugin_Name + "-" + Type
            CSE_Response = instagram_explore.media(Query)

            if CSE_Response:
                CSE_JSON_Output_Response = json.dumps(CSE_Response, indent=4, sort_keys=True)
                Output_file = General.Main_File_Create(Directory, Local_Plugin_Name, CSE_JSON_Output_Response, Query, ".json")
                URL = "https://www.instagram.com/p/" + Query + "/"

                if URL not in Cached_Data and URL not in Data_to_Cache:

                    if Output_file:
                        Output_Connections = General.Connections(Query, Local_Plugin_Name, "instagram.com", "Data Leakage", Task_ID, Local_Plugin_Name.lower())
                        Output_Connections.Output(Output_file, URL, General.Get_Title(URL))

                Data_to_Cache.append(URL)

            else:
                logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Invalid response.")

        else:
            logging.warning(General.Date() + " - " + __name__.strip('plugins.') + " - Invalid type provided.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Exemple #26
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Ebay_API_Key = Load_Configuration()
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:
            API_Request = Connection(appid=Ebay_API_Key, config_file=None)
            API_Response = API_Request.execute('findItemsAdvanced',
                                               {'keywords': Query})
            JSON_Output_Response = json.dumps(API_Response.dict(),
                                              indent=4,
                                              sort_keys=True)
            JSON_Response = json.dumps(API_Response.dict())
            JSON_Response = json.loads(JSON_Response)
            General.Main_File_Create(Directory, Plugin_Name,
                                     JSON_Output_Response, Query, ".json")

            if JSON_Response["ack"] == "Success":
                Current_Step = 0

                for JSON_Line in JSON_Response['searchResult']['item']:
                    Ebay_Item_URL = JSON_Line['viewItemURL']

                    if Ebay_Item_URL not in Cached_Data and Ebay_Item_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Ebay_Item_Regex = re.search(
                            r"http\:\/\/www\.ebay\.com\/itm\/([\w\d\-]+)\-\/\d+",
                            Ebay_Item_URL)
                        headers = {
                            'Content-Type': 'application/json',
                            'User-Agent':
                            'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0',
                            'Accept':
                            'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                            'Accept-Language': 'en-US,en;q=0.5'
                        }
                        Ebay_Item_Response = requests.get(Ebay_Item_URL,
                                                          headers=headers).text
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Ebay_Item_Response,
                            Ebay_Item_Regex.group(1), The_File_Extension)

                        if Output_file:
                            General.Connections(
                                Output_file, Query, Plugin_Name, Ebay_Item_URL,
                                "ebay.com", "Data Leakage", Task_ID,
                                General.Get_Title(Ebay_Item_URL),
                                Plugin_Name.lower())

                        Data_to_Cache.append(Ebay_Item_URL)
                        Current_Step += 1

            else:
                logging.warning(General.Date() + " No results found.")

        except:
            logging.info(General.Date() + " Failed to make API call.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Exemple #27
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        Tor_Pull_URL = Tor_General_URL + Query
        Tor_Scrape_URLs = General.Get_Latest_URLs(Tor_Pull_URL,
                                                  Tor_Scrape_Regex_URL)

        if Tor_Scrape_URLs:
            Output_file = General.Main_File_Create(Directory,
                                                   Tor_Plugin_Name.lower(),
                                                   "\n".join(Tor_Scrape_URLs),
                                                   Query, The_File_Extension)

            if Output_file:
                Current_Step = 0

                for URL in Tor_Scrape_URLs:

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        General.Connections(Output_file, Query,
                                            Tor_Plugin_Name, URL, "ahmia.fl",
                                            "Domain Spoof", Task_ID,
                                            General.Get_Title(URL),
                                            Plugin_Name.lower())
                        Data_to_Cache.append(URL)
                        Current_Step += 1

        else:
            logging.info(General.Date() + " No Tor links scraped.")

        I2P_Pull_URL = I2P_General_URL + Query
        I2P_Scrape_URLs = General.Get_Latest_URLs(I2P_Pull_URL,
                                                  I2P_Scrape_Regex_URL)

        if I2P_Scrape_URLs:
            Output_file = General.Main_File_Create(Directory,
                                                   I2P_Plugin_Name.lower(),
                                                   "\n".join(I2P_Scrape_URLs),
                                                   Query, The_File_Extension)

            if Output_file:
                Current_Step = 0

                for URL in I2P_Scrape_URLs:

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        General.Connections(Output_file, Query,
                                            I2P_Plugin_Name, URL, "ahmia.fl",
                                            "Domain Spoof", Task_ID,
                                            General.Get_Title(URL),
                                            Plugin_Name.lower())
                        Data_to_Cache.append(URL)
                        Current_Step += 1

        else:
            logging.info(General.Date() + " No I2P links scraped.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Transaction_Search(Query_List, Task_ID, Type, **kwargs):
    Local_Plugin_Name = Plugin_Name + "-Transaction-Search"
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Local_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        if Type == "btc" or Type == "bch":
            Query_Regex = re.search(r"[\d\w]{64}", Query)

        elif Type == "eth":
            Query_Regex = re.search(r"(0x[\d\w]{64})", Query)

        else:
            logging.warning(General.Date() + " Invalid type provided.")

        if Query_Regex:
            Main_URL = "https://www.blockchain.com/" + Type + "/tx/" + Query
            Main_Response = requests.get(Main_URL).text

            if Type == "btc":
                Address_Regex = re.findall(r"\/btc\/address\/([\d\w]{26,34})",
                                           Main_Response)

            elif Type == "bch":
                Address_Regex = re.findall(r"([\d\w]{42})", Main_Response)

            elif Type == "eth":
                Address_Regex = re.findall(r"(0x[\w\d]{40})", Main_Response)

            else:
                logging.warning(General.Date() + " Invalid type provided.")

            if Address_Regex:
                Current_Step = 0

                for Transaction in Address_Regex:
                    Query_URL = "https://www.blockchain.com/" + Type + "/address/" + Transaction

                    if Query_URL not in Cached_Data and Query_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Transaction_Response = requests.get(Query_URL).text
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Local_Plugin_Name,
                            Transaction_Response, Transaction,
                            The_File_Extension)

                        if Output_file:
                            General.Connections(Output_file, Query,
                                                Local_Plugin_Name, Query_URL,
                                                "blockchain.com",
                                                "Blockchain Address", Task_ID,
                                                General.Get_Title(Query_URL),
                                                Plugin_Name.lower())

                        Data_to_Cache.append(Query_URL)
                        Current_Step += 1

            else:
                logging.warning(General.Date() +
                                " Failed to match regular expression.")

        else:
            logging.warning(General.Date() +
                            " Failed to match regular expression.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")
def Search(Query_List, Task_ID, Type_of_Query, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    try:

        if kwargs.get('Limit'):

            if int(kwargs["Limit"]) > 0:
                Limit = kwargs["Limit"]

        else:
            Limit = 10

        Directory = General.Make_Directory(Concat_Plugin_Name)

        logger = logging.getLogger()
        logger.setLevel(logging.INFO)

        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)

        try:
            pyhibp.set_api_key(key=Load_Configuration())

        except:
            logging.warning(General.Date() + " Failed to set API key, make sure it is set in the configuration file.")

        Query_List = General.Convert_to_List(Query_List)

        if Type_of_Query == "email":
            Local_Plugin_Name = Plugin_Name + "-" + Type_of_Query
            Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)

            if not Cached_Data:
                Cached_Data = []

            for Query in Query_List:
                Query_Response = pyhibp.get_pastes(email_address=Query)
                logging.info(Query_Response)

                if Query_Response:
                    Domain = Query_Response[0]["Source"]
                    ID = Query_Response[0]["Id"]
                    Link = "https://www." + Domain + ".com/" + ID
                    JSON_Query_Response = json.dumps(Query_Response, indent=4, sort_keys=True)

                    if Link not in Cached_Data and Link not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, JSON_Query_Response, "email", The_File_Extension)

                        if Output_file:
                            General.Connections(Output_file, Query, Local_Plugin_Name, Link, "haveibeenpwned.com", "Data Leakage", Task_ID, General.Get_Title(Link), Local_Plugin_Name.lower())

                        Data_to_Cache.append(Link)

            if Cached_Data:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a")

            else:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")

        elif Type_of_Query == "breach":
            Local_Plugin_Name = Plugin_Name + "-" + Type_of_Query
            Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)

            if not Cached_Data:
                Cached_Data = []

            for Query in Query_List:
                Query_Response = pyhibp.get_single_breach(breach_name=Query)

                if Query_Response:
                    Domain = Query_Response["Domain"]
                    Link = "https://www." + Domain + ".com/"
                    JSON_Query_Response = json.dumps(Query_Response, indent=4, sort_keys=True)

                    if Link not in Cached_Data and Link not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, JSON_Query_Response, "breach", The_File_Extension)

                        if Output_file:
                            General.Connections(Output_file, Query, Local_Plugin_Name, Link, "haveibeenpwned.com", "Data Leakage", Task_ID, General.Get_Title(Link), Local_Plugin_Name.lower())

                        Data_to_Cache.append(Link)

            if Cached_Data:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a")

            else:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")

        elif Type_of_Query == "password":
            Local_Plugin_Name = Plugin_Name + "-" + Type_of_Query
            Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)

            if not Cached_Data:
                Cached_Data = []

            for Query in Query_List:
                Query_Response = pw.is_password_breached(password=Query)
                logging.info(Query_Response)

                if Query_Response:
                    Link = "https://haveibeenpwned.com/Passwords?" + Query

                    if Link not in Cached_Data and Link not in Data_to_Cache:
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, str(Query_Response), "password", ".txt")

                        if Output_file:
                            General.Connections(Output_file, Query, Local_Plugin_Name, Link, "haveibeenpwned.com", "Data Leakage", Task_ID, General.Get_Title(Link), Local_Plugin_Name.lower())

                        Data_to_Cache.append(Link)

            if Cached_Data:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a")

            else:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")

        elif Type_of_Query == "account":
            Local_Plugin_Name = Plugin_Name + "-" + Type_of_Query
            Cached_Data = General.Get_Cache(Directory, Local_Plugin_Name)

            if not Cached_Data:
                Cached_Data = []

            for Query in Query_List:
                Query_Response = pyhibp.get_account_breaches(account=Query, truncate_response=True)

                if Query_Response:
                    Current_Step = 0

                    for Response in Query_Response:
                        Current_Response = pyhibp.get_single_breach(breach_name=Response['Name'])
                        JSON_Query_Response = json.dumps(Current_Response, indent=4, sort_keys=True)
                        Link = "https://" + Current_Response['Domain']

                        if Current_Response['Domain'] not in Cached_Data and Current_Response['Domain'] not in Data_to_Cache and Current_Step < int(Limit):
                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Local_Plugin_Name, JSON_Query_Response, "account", The_File_Extension)

                            if Output_file:
                                General.Connections(Output_file, Query, Local_Plugin_Name, Link, Current_Response['Domain'], "Data Leakage", Task_ID, General.Get_Title(Link), Local_Plugin_Name.lower())

                            Data_to_Cache.append(Current_Response['Domain'])
                            Current_Step += 1

            if Cached_Data:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "a")

            else:
                General.Write_Cache(Directory, Data_to_Cache, Local_Plugin_Name, "w")

        else:
            logging.warning(General.Date() + " Invalid type provided.")

    except:
        logging.warning(General.Date() + " Execution error.")
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

        else:
            Limit = 10

    else:
        Limit = 10

    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Location = General.Load_Location_Configuration()
    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        try:
            Response = requests.get("http://itunes.apple.com/search?term=" +
                                    Query + "&country=" + Location +
                                    "&entity=software&limit=" +
                                    str(Limit)).text

        except:
            logging.warning(
                General.Date() + " - " + __name__.strip('plugins.') +
                " - Failed to make request, are you connected to the internet?"
            )

        JSON_Response = json.loads(Response)
        General.Main_File_Create(
            Directory, "iTunes", json.dumps(Response, indent=4,
                                            sort_keys=True), Query, ".json")

        if 'resultCount' in JSON_Response:

            if not JSON_Response['resultCount'] == 0:

                if JSON_Response['resultCount'] > 0:
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, "instagram.com", "Data Leakage",
                        Task_ID, Concat_Plugin_Name)

                    for JSON_Object in JSON_Response['results']:
                        JSON_Object_Response = requests.get(
                            JSON_Object['artistViewUrl']).text

                        if JSON_Object[
                                'artistViewUrl'] not in Cached_Data and JSON_Object[
                                    'artistViewUrl'] not in Data_to_Cache:
                            iTunes_Regex = re.search(
                                "https\:\/\/itunes\.apple\.com\/" + Location +
                                "\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?mt\=\d\&uo\=\d",
                                JSON_Object['artistViewUrl'])

                            if iTunes_Regex:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query,
                                    Plugin_Name, JSON_Object_Response,
                                    iTunes_Regex.group(1), The_File_Extension)

                                if Output_file:
                                    Output_Connections.Output(
                                        Output_file,
                                        JSON_Object['artistViewUrl'],
                                        General.Get_Title(
                                            JSON_Object['artistViewUrl']))

                            Data_to_Cache.append(JSON_Object['artistViewUrl'])

                else:
                    logging.warning(
                        General.Date() + " - " + __name__.strip('plugins.') +
                        " - Invalid value provided, value less than 0.")

            else:
                logging.warning(General.Date() + " - " +
                                __name__.strip('plugins.') +
                                " - Invalid value provided, value equal to 0.")

        else:
            logging.warning(General.Date() + " - " +
                            __name__.strip('plugins.') + " - Invalid value.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")