Example #1
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            if Common.Regex_Handler(Query, Type="Email"):
                Link = f"https://{Domain}/home/verify-as-guest/{Query}"
                JSON_Response = Common.Request_Handler(Link)
                JSON_Object = Common.JSON_Handler(JSON_Response)

                if JSON_Object.Is_JSON():
                    JSON_Response = JSON_Object.To_JSON_Loads()
                    JSON_Output_Response = JSON_Object.Dump_JSON()
                    Table_JSON = {}

                    for Key, Value in JSON_Response.items():

                        if Key != "response":
                            Table_JSON[Key] = Value

                        else:

                            for Det_Key, Det_Val in JSON_Response[
                                    "response"].items():
                                Table_JSON[Det_Key] = Det_Val

                    Filter_JSON = [Table_JSON]
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain, "Email Information",
                        Task_ID, Concat_Plugin_Name)

                    if Query not in Cached_Data and Query not in Data_to_Cache:
                        Title = f"Email Verification | {Query}"
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Concat_Plugin_Name,
                            JSON_Output_Response, Title,
                            The_File_Extensions["Main"])
                        HTML_Output_File_Data = General.JSONDict_to_HTML(
                            Filter_JSON, JSON_Output_Response,
                            f"Email Verification Query {Query}")
                        HTML_Output_File = General.Create_Query_Results_Output_File(
                            Directory, Query, Concat_Plugin_Name,
                            HTML_Output_File_Data, Title,
                            The_File_Extensions["Main_Converted"])

                        if Output_file and HTML_Output_File:
                            Output_Connections.Output(
                                [Output_file, HTML_Output_File], Link, Title,
                                Concat_Plugin_Name)
                            Data_to_Cache.append(Link)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID):
    Data_to_Cache = []
    Cached_Data = []
    Directory = General.Make_Directory(Concat_Plugin_Name)

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Concat_Plugin_Name)
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        for State in States:
            Post_URL = 'https://general-insurance.coles.com.au/bin/wesfarmers/search/vehiclerego'
            data = '''{"isRegoSearch":"YES","regoSearchCount":2,"regoMatchCount":1,"regoSearchFailureCount":0,"failPaymentAttempts":0,"pauseStep":"false","campaignBaseURL":"https://secure.colesinsurance.com.au/campaignimages/","sessionState":"OPEN","sessionStep":"0","policyHolders":[],"updateSessionURL":"http://dev.gtw.gp-mdl.auiag.corp:9000/sys/colessessionservice/motor/v1/update-session","insuranceType":"COMP","startDate":"03/07/2019","drivers":[{"driverRef":"MainDriver","yearsLicenced":{"vehRef":"veh1"}}],"priceBeatAttemptsRemaining":"2","currentInsurerOptions":[{"id":"AAMI","value":"AAMI","text":"AAMI"},{"id":"Allianz","value":"Allianz","text":"Allianz"},{"id":"Apia","value":"Apia","text":"Apia"},{"id":"Bingle","value":"Bingle","text":"Bingle"},{"id":"Broker","value":"Broker","text":"Broker"},{"id":"BudgDirect","value":"BudgDirect","text":"Budget Direct"},{"id":"Buzz","value":"Buzz","text":"Buzz"},{"id":"CGU","value":"CGU","text":"CGU"},{"id":"Coles","value":"Coles","text":"Coles"},{"id":"CommInsure","value":"CommInsure","text":"CommInsure"},{"id":"GIO","value":"GIO","text":"GIO"},{"id":"HBF","value":"HBF","text":"HBF"},{"id":"JustCar","value":"JustCar","text":"Just Car"},{"id":"NRMA","value":"NRMA","text":"NRMA"},{"id":"Progress","value":"Progress","text":"Progressive"},{"id":"QBE","value":"QBE","text":"QBE"},{"id":"RAA","value":"RAA","text":"RAA"},{"id":"RAC","value":"RAC","text":"RAC"},{"id":"RACQ","value":"RACQ","text":"RACQ"},{"id":"RACT","value":"RACT","text":"RACT"},{"id":"RACV","value":"RACV","text":"RACV"},{"id":"Real","value":"Real","text":"Real"},{"id":"SGIC","value":"SGIC","text":"SGIC"},{"id":"SGIO","value":"SGIO","text":"SGIO"},{"id":"Shannons","value":"Shannons","text":"Shannons"},{"id":"Suncorp","value":"Suncorp","text":"Suncorp"},{"id":"Youi","value":"Youi","text":"Youi"},{"id":"None","value":"None","text":"Car is not currently insured"},{"id":"Dontknow","value":"Dontknow","text":"Don't Know"},{"id":"Other","value":"Other","text":"Other"}],"coverLevelOptions":[{"id":"Gold","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"NRMA","code":"Gold","order":"1"},{"id":"Gold1","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"BudgDirect","code":"Gold","order":"1"},{"id":"Standard2","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIC","code":"Standard","order":"2"},{"id":"Gold6","value":"Comprehensive Advantages Car Insurance","text":"Comprehensive Advantages Car Insurance","flagname":"Suncorp","code":"Gold","order":"1"},{"id":"Standard","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"GIO","code":"Standard","order":"2"},{"id":"Standard0","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"NRMA","code":"Standard","order":"2"},{"id":"Gold4","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIC","code":"Gold","order":"1"},{"id":"Standard5","value":"Full Comprehensive Car Insurance","text":"Full Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Standard","order":"2"},{"id":"Gold5","value":"Comprehensive Plus Car Insurance","text":"Comprehensive Plus Car Insurance","flagname":"SGIO","code":"Gold","order":"1"},{"id":"Gold2","value":"Platinum Car Insurance","text":"Platinum Car Insurance","flagname":"GIO","code":"Gold","order":"1"},{"id":"Standard3","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"SGIO","code":"Standard","order":"2"},{"id":"Gold3","value":"Complete Care Motor Insurance","text":"Complete Care Motor Insurance","flagname":"RACV","code":"Gold","order":"1"},{"id":"Standard4","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"Suncorp","code":"Standard","order":"2"},{"id":"Gold0","value":"Gold Comprehensive Car Insurance","text":"Gold Comprehensive Car Insurance","flagname":"1300 Insurance","code":"Gold","order":"1"},{"id":"Standard1","value":"Comprehensive Car Insurance","text":"Comprehensive Car Insurance","flagname":"RACV","code":"Standard","order":"2"}],"riskAddress":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"postcode":{"latitude":"-33.86579240","locality":"PYRMONT","postcode":"2009","extraAddressInfo":{"houseNumber":"1","lotNumber":"1","streetName":"HARRIS","streetSuffix":"STREET","unitNumber":"1"},"state":"''' + State + '''","line3":null,"isVerificationRequired":null,"gnaf":"GANSW709981139","line2":null,"line1":"1 Harris Street","longitude":"151.19109690","displayString":"1 HARRIS STREET, PYRMONT, NSW, 2009"},"carRegistration":"''' + Query + '''","chooseValue":"","whatValueInsure":"Marketvalue","whatValueInsure_value":{"key":"Marketvalue","value":"Market Value"}}'''
            headers = {
                'Content-Type': 'ext/plain;charset=UTF-8',
                'User-Agent':
                'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.90 Safari/537.36',
                'Accept': '*/*',
                'Accept-Encoding': 'gzip, deflate, br',
                'Referer':
                'https://general-insurance.coles.com.au/motor/get-quote',
                'Origin': 'https://general-insurance.coles.com.au',
                'Host': 'general-insurance.coles.com.au'
            }
            Registration_Response = requests.post(Post_URL,
                                                  data=data,
                                                  headers=headers).text
            Registration_Response = json.loads(Registration_Response)

            try:
                Title = Registration_Response['vehicles'][0][
                    'make'] + " " + Registration_Response['vehicles'][0][
                        'model']
                Item_URL = Post_URL + "?" + Query

                if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache:
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name,
                        json.dumps(Registration_Response,
                                   indent=4,
                                   sort_keys=True), Title, The_File_Extension)

                    if Output_file:
                        General.Connections(Output_file, Query, Plugin_Name,
                                            Item_URL,
                                            "general-insurance.coles.com.au",
                                            "Data Leakage", Task_ID, Title,
                                            Concat_Plugin_Name)

                    Data_to_Cache.append(Item_URL)

            except:
                logging.info(
                    str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
                    + " No result found for given query " + Query +
                    " for state " + State + ".")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        Tor_Pull_URL = Tor_General_URL + Query
        Tor_Scrape_URLs = General.Get_Latest_URLs(Tor_Pull_URL,
                                                  Tor_Scrape_Regex_URL, Is_Tor)

        if Tor_Scrape_URLs:
            Output_file = General.Main_File_Create(Directory,
                                                   Tor_Plugin_Name.lower(),
                                                   "\n".join(Tor_Scrape_URLs),
                                                   Query, The_File_Extension)

            if Output_file:
                Current_Step = 0

                for URL in Tor_Scrape_URLs:

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        General.Connections(Output_file, Query,
                                            Tor_Plugin_Name, URL, "ahmia.fl",
                                            "Domain Spoof", Task_ID,
                                            General.Get_Title(URL),
                                            Plugin_Name.lower())
                        Data_to_Cache.append(URL)
                        Current_Step += 1

        I2P_Pull_URL = I2P_General_URL + Query
        I2P_Scrape_URLs = General.Get_Latest_URLs(I2P_Pull_URL,
                                                  I2P_Scrape_Regex_URL, Is_Tor)

        if I2P_Scrape_URLs:
            Output_file = General.Main_File_Create(Directory,
                                                   I2P_Plugin_Name.lower(),
                                                   "\n".join(Scrape_URLs),
                                                   Query, The_File_Extension)

            if Output_file:
                Current_Step = 0

                for URL in I2P_Scrape_URLs:

                    if URL not in Cached_Data and URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        General.Connections(Output_file, Query,
                                            I2P_Plugin_Name, URL, "ahmia.fl",
                                            "Domain Spoof", Task_ID,
                                            General.Get_Title(URL),
                                            Plugin_Name.lower())
                        Data_to_Cache.append(URL)
                        Current_Step += 1

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Example #4
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Yandex_Details = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            Yandex_Response = requests.get(
                f"https://yandex.com/search/xml?user={Yandex_Details[0]}&key={Yandex_Details[1]}&query={Query}&l10n=en&sortby=rlv&filter=none&maxpassages=five&groupby=attr% 3D% 22% 22.mode% 3Dflat.groups-on-page% 3D{str(Limit)}.docs-in-group% 3D1"
            ).text
            JSON_Response = xmltodict.parse(Yandex_Response)
            JSON_Output_Response = json.dumps(JSON_Response,
                                              indent=4,
                                              sort_keys=True)
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 JSON_Output_Response, Query,
                                                 The_File_Extensions["Main"])
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     "yandex.ru",
                                                     "Search Result", Task_ID,
                                                     Plugin_Name.lower())
            New_JSON_Response = Recursive_Dict_Check(
                ["yandexsearch", "response", "results", "grouping", "group"],
                JSON_Response)

            if New_JSON_Response:

                for Yandex_Item_Line in New_JSON_Response:

                    try:

                        if Recursive_Dict_Check(["doc", "url"],
                                                Yandex_Item_Line):
                            Yandex_Item_Line = Yandex_Item_Line['doc']
                            Yandex_URL = Yandex_Item_Line['url']
                            Title = Recursive_Dict_Check(["title", "#text"],
                                                         JSON_Response)

                            if Title:
                                Title = f"Yandex | {Title}"

                            else:
                                Title = General.Get_Title(Yandex_URL)
                                Title = f"Yandex | {Title}"

                            if Yandex_URL not in Cached_Data and Yandex_URL not in Data_to_Cache:
                                headers = {
                                    'Content-Type': 'application/json',
                                    'User-Agent':
                                    'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0',
                                    'Accept':
                                    'ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
                                    'Accept-Language': 'en-US,en;q=0.5'
                                }
                                Yandex_Item_Response = requests.get(
                                    Yandex_URL, headers=headers).text
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    Yandex_Item_Response, Yandex_URL,
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file], Yandex_URL,
                                        Title, Plugin_Name.lower())
                                    Data_to_Cache.append(Yandex_URL)

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                    except Exception as e:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}"
                        )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - No results found."
                )

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:
        Pull_URL = "https://www.phishtank.com/target_search.php?target_id=" + Query + "&valid=y&active=All&Search=Search"
        Content = requests.get(Pull_URL).text
        soup = BeautifulSoup(Content, features="lxml")
        tds = soup.findAll('td')
        Links = []

        for td in tds:
            link = td.find('a')

            if link and 'phish_detail.php?phish_id=' in link.attrs['href']:
                Full_Link = "https://www.phishtank.com/" + link.attrs['href']
                Links.append(Full_Link)

        Current_Step = 0

        for Link in Links:
            Current_Content = requests.get(Link).text
            Current_Soup = BeautifulSoup(Current_Content, features="lxml")
            Spans = Current_Soup.find('span',
                                      {"style": "word-wrap:break-word;"})
            Current_Link = Spans.string

            if Current_Link:
                Phish_Site_Response = requests.get(Current_Link).text
                Output_file_query = Query.replace(" ", "-")
                Output_file = General.Create_Query_Results_Output_File(
                    Directory, Output_file_query, Plugin_Name,
                    Phish_Site_Response,
                    Link.replace(
                        "https://www.phishtank.com/phish_detail.php?phish_id=",
                        ""), The_File_Extension)

                if Output_file:

                    if Current_Link not in Cached_Data and Current_Link not in Data_to_Cache and Current_Step < int(
                            Limit):
                        General.Connections(Output_file, Query, Plugin_Name,
                                            Current_Link, "phishtank.com",
                                            "Phishing", Task_ID,
                                            General.Get_Title(Current_Link),
                                            Plugin_Name.lower())
                        Data_to_Cache.append(Current_Link)
                        Current_Step += 1

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Example #6
0
def Search(Query_List, Task_ID, Type, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Shodan_API_Key = Load_Configuration()
        API_Session = Shodan(Shodan_API_Key)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            try:

                if Type == "Search":
                    Local_Plugin_Name = Plugin_Name + "-Search"

                    try:
                        API_Response = API_Session.search(Query)

                    except Exception as e:
                        logging.error(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}."
                        )
                        break

                    JSON_Object = Common.JSON_Handler(API_Response)
                    JSON_Output_Response = JSON_Object.Dump_JSON()
                    Main_File = General.Main_File_Create(
                        Directory, Local_Plugin_Name, JSON_Output_Response,
                        Query, The_File_Extensions["Main"])
                    Output_Connections = General.Connections(
                        Query, Local_Plugin_Name, Domain, "Domain Information",
                        Task_ID, Plugin_Name.lower())
                    Current_Step = 0

                    for Shodan_Item in API_Response["matches"]:
                        Shodan_Item_Module = Shodan_Item['_shodan']['module']
                        Shodan_Item_Module = Shodan_Item_Module.replace(
                            '-simple-new', '')

                        if Shodan_Item_Module.startswith("http"):
                            Shodan_Item_Host = ""
                            Shodan_Item_Port = 0

                            if 'http' in Shodan_Item:
                                Shodan_Item_Host = Shodan_Item['http']['host']
                                Shodan_Item_Response = Shodan_Item['http'][
                                    'html']

                            elif 'ip_str' in Shodan_Item and 'domains' in Shodan_Item and len(
                                    Shodan_Item['domains']) > 0:
                                Shodan_Item_Host = Shodan_Item['domains'][0]
                                Shodan_Item_Response = Shodan_Item['data']

                            elif 'ip_str' in Shodan_Item and 'domains' not in Shodan_Item:
                                Shodan_Item_Host = Shodan_Item['ip_str']
                                Shodan_Item_Response = Shodan_Item['data']

                            if Shodan_Item_Host:

                                if 'port' in Shodan_Item_Host:

                                    if int(Shodan_Item['port']) not in [
                                            80, 443
                                    ]:
                                        Shodan_Item_Port = Shodan_Item['port']

                                if Shodan_Item_Port != 0:
                                    Shodan_Item_URL = f"{Shodan_Item_Module}://{Shodan_Item_Host}:{str(Shodan_Item_Port)}"

                                else:
                                    Shodan_Item_URL = f"{Shodan_Item_Module}://{Shodan_Item_Host}"

                                Title = "Shodan | " + str(Shodan_Item_Host)

                                if Shodan_Item_URL not in Cached_Data and Shodan_Item_URL not in Data_to_Cache and Current_Step < int(
                                        Limit):
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, Local_Plugin_Name,
                                        Shodan_Item_Response, Shodan_Item_Host,
                                        The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output(
                                            [Main_File, Output_file],
                                            Shodan_Item_URL, Title,
                                            Plugin_Name.lower())
                                        Data_to_Cache.append(Shodan_Item_URL)

                                    else:
                                        logging.warning(
                                            f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                        )

                                    Current_Step += 1

                elif Type == "Host":
                    Local_Plugin_Name = Plugin_Name + "-Host"

                    try:
                        API_Response = API_Session.host(Query)

                    except Exception as e:
                        logging.error(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}."
                        )
                        break

                    JSON_Object = Common.JSON_Handler(API_Response)
                    JSON_Output_Response = JSON_Object.Dump_JSON()
                    Main_File = General.Main_File_Create(
                        Directory, Local_Plugin_Name, JSON_Output_Response,
                        Query, The_File_Extensions["Main"])
                    Output_Connections = General.Connections(
                        Query, Local_Plugin_Name, Domain, "Domain Information",
                        Task_ID, Plugin_Name.lower())
                    Shodan_URL = f"https://www.{Domain}/host/{Query}"
                    Title = "Shodan | " + Query

                    if Shodan_URL not in Cached_Data and Shodan_URL not in Data_to_Cache:
                        Shodan_Responses = Common.Request_Handler(
                            Shodan_URL,
                            Application_JSON_CT=True,
                            Accept_XML=True,
                            Accept_Language_EN_US=True,
                            Filter=True,
                            Host=f"https://www.{Domain}")
                        Shodan_Response = Shodan_Responses["Filtered"]
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Shodan_Response,
                            Query, The_File_Extensions["Query"])

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Shodan_URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(Shodan_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                    else:
                        logging.warning(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - No results found."
                        )

            except:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to complete task."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #7
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Concat_Plugin_Name)
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Concat_Plugin_Name)
            handler = logging.FileHandler(os.path.join(Directory, Log_File),
                                          "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:

                try:

                    if self.Type == "ABN":
                        Main_URL = f'https://{self.Domain}/ABN/View?id=' + Query
                        Responses = Common.Request_Handler(
                            Main_URL,
                            Filter=True,
                            Host=f"https://www.{self.Domain}")
                        Response = Responses["Regular"]

                        try:

                            if 'Error searching ABN Lookup' not in Response:
                                Query = str(int(Query))
                                Response = Responses["Filtered"]

                                if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                    Output_file = General.Create_Query_Results_Output_File(
                                        Directory, Query, self.Plugin_Name,
                                        Response, General.Get_Title(Main_URL),
                                        self.The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections = General.Connections(
                                            Query, self.Plugin_Name,
                                            self.Domain, self.Result_Type,
                                            self.Task_ID, self.Plugin_Name)
                                        Output_Connections.Output(
                                            [Output_file], Main_URL,
                                            General.Get_Title(Main_URL).strip(
                                                " | ABN Lookup"),
                                            self.Concat_Plugin_Name)
                                        Data_to_Cache.append(Main_URL)

                                    else:
                                        logging.warning(
                                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                        )

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - ABN Lookup returned error."
                                )

                        except:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for ABN Search."
                            )

                    elif self.Type == "ACN":
                        Main_URL = f'https://{self.Domain}/Search/Run'
                        Data = {
                            'SearchParameters.SearchText':
                            Query,
                            'SearchParameters.AllNames':
                            'true',
                            'ctl00%24ContentPagePlaceholder%24SearchBox%24MainSearchButton':
                            'Search'
                        }
                        Responses = Common.Request_Handler(
                            Main_URL,
                            Method="POST",
                            Filter=True,
                            Host=f"https://www.{self.Domain}",
                            Data=Data)
                        Response = Responses["Regular"]
                        Filtered_Response = Responses["Filtered"]

                        try:
                            ACN_Regex = Common.Regex_Handler(
                                Query, Type="Company_Name")

                            if ACN_Regex:
                                Main_File = General.Main_File_Create(
                                    Directory, self.Plugin_Name,
                                    Filtered_Response, Query,
                                    self.The_File_Extensions["Main"])
                                Current_Step = 0
                                ABNs_Regex = Common.Regex_Handler(
                                    Response,
                                    Custom_Regex=
                                    r"\<input\sid\=\"Results\_NameItems\_\d+\_\_Compressed\"\sname\=\"Results\.NameItems\[\d+\]\.Compressed\"\stype\=\"hidden\"\svalue\=\"(\d{11})\,\d{2}\s\d{3}\s\d{3}\s\d{3}\,0000000001\,Active\,active\,([\d\w\s\&\-\_\.]+)\,Current\,",
                                    Findall=True)

                                if ABNs_Regex:
                                    Output_Connections = General.Connections(
                                        Query, self.Plugin_Name, self.Domain,
                                        self.Result_Type, self.Task_ID,
                                        self.Plugin_Name)

                                    for ABN_URL, ACN in ABNs_Regex:
                                        Full_ABN_URL = f'https://{self.Domain}/ABN/View?abn={ABN_URL}'

                                        if Full_ABN_URL not in Cached_Data and Full_ABN_URL not in Data_to_Cache and Current_Step < int(
                                                self.Limit):
                                            ACN = ACN.rstrip()
                                            Current_Responses = Common.Request_Handler(
                                                Full_ABN_URL,
                                                Filter=True,
                                                Host=
                                                f"https://www.{self.Domain}")
                                            Current_Response = Current_Responses[
                                                "Filtered"]
                                            Output_file = General.Create_Query_Results_Output_File(
                                                Directory, Query,
                                                self.Plugin_Name,
                                                str(Current_Response),
                                                ACN.replace(' ', '-'), self.
                                                The_File_Extensions["Query"])

                                            if Output_file:
                                                Output_Connections.Output(
                                                    [Main_File, Output_file],
                                                    Full_ABN_URL,
                                                    General.Get_Title(
                                                        Full_ABN_URL).strip(
                                                            " | ABN Lookup"),
                                                    self.Concat_Plugin_Name)
                                                Data_to_Cache.append(
                                                    Full_ABN_URL)

                                            else:
                                                logging.warning(
                                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist."
                                                )

                                            Current_Step += 1

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Response did not match regular expression."
                                    )

                            else:
                                logging.warning(
                                    f"{Common.Date()} - {self.Logging_Plugin_Name} - Query did not match regular expression."
                                )

                        except:
                            logging.warning(
                                f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid query provided for ACN Search."
                            )

                    else:
                        logging.warning(
                            f"{Common.Date()} - {self.Logging_Plugin_Name} - Invalid request type."
                        )

                except:
                    logging.warning(
                        f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to make request."
                    )

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(
                f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Example #8
0
    def Search(self):

        try:
            Data_to_Cache = []
            Directory = General.Make_Directory(self.Plugin_Name.lower())
            logger = logging.getLogger()
            logger.setLevel(logging.INFO)
            Log_File = General.Logging(Directory, self.Plugin_Name.lower())
            handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
            handler.setLevel(logging.DEBUG)
            formatter = logging.Formatter("%(levelname)s - %(message)s")
            handler.setFormatter(formatter)
            logger.addHandler(handler)
            Cached_Data_Object = General.Cache(Directory, self.Plugin_Name)
            Cached_Data = Cached_Data_Object.Get_Cache()

            for Query in self.Query_List:
                URL_Query = urllib.parse.quote(Query)
                URL = f"https://api.duckduckgo.com/?q={URL_Query}&format=json"
                DDG_Response = Common.Request_Handler(URL)
                JSON_Object = Common.JSON_Handler(DDG_Response)
                JSON_Response = JSON_Object.To_JSON_Loads()
                JSON_Output_Response = JSON_Object.Dump_JSON()
                Main_File = General.Main_File_Create(Directory, self.Plugin_Name, JSON_Output_Response, Query, self.The_File_Extensions["Main"])
                Output_Connections = General.Connections(Query, self.Plugin_Name, self.Domain, self.Result_Type, self.Task_ID, self.Plugin_Name.lower())

                if JSON_Response.get('RelatedTopics'):
                    Current_Step = 0

                    for DDG_Item_Link in JSON_Response['RelatedTopics']:

                        try:

                            if 'FirstURL' in DDG_Item_Link:
                                DDG_URL = DDG_Item_Link['FirstURL']
                                Title = General.Get_Title(DDG_URL)
                                Title = f"DuckDuckGo | {Title}"

                                if DDG_URL not in Cached_Data and DDG_URL not in Data_to_Cache and Current_Step < int(self.Limit):
                                    DDG_Item_Responses = Common.Request_Handler(DDG_URL, Filter=True, Host=f"https://www.{self.Domain}")
                                    DDG_Item_Response = DDG_Item_Responses["Filtered"]
                                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, self.Plugin_Name, DDG_Item_Response, DDG_URL, self.The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections.Output([Main_File, Output_file], DDG_URL, Title, self.Plugin_Name.lower())
                                        Data_to_Cache.append(DDG_URL)

                                    else:
                                        logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - Failed to create output file. File may already exist.")

                                    Current_Step += 1

                                else:
                                    break

                            elif 'Topics' in DDG_Item_Link:

                                if type(DDG_Item_Link['Topics']) == list:
                                    JSON_Response['RelatedTopics'].extend(DDG_Item_Link['Topics'])

                        except Exception as e:
                            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")

                else:
                    logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - No results found.")

            Cached_Data_Object.Write_Cache(Data_to_Cache)

        except Exception as e:
            logging.warning(f"{Common.Date()} - {self.Logging_Plugin_Name} - {str(e)}")
Example #9
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Results = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Reddit_Details = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Limit = General.Get_Limit(kwargs)
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            try:
                Reddit_Connection = praw.Reddit(
                    client_id=Reddit_Details[0],
                    client_secret=Reddit_Details[1],
                    user_agent=Reddit_Details[2],
                    username=Reddit_Details[3],
                    password=Reddit_Details[4])
                All_Subreddits = Reddit_Connection.subreddit(Reddit_Details[5])

                for Subreddit in All_Subreddits.search(
                        Query, limit=Limit
                ):  # Limit, subreddit and search to be controlled by the web app.
                    Results.append(Subreddit.url)

            except:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to get results. Are you connected to the internet?"
                )

            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     "reddit.com", "Forum",
                                                     Task_ID,
                                                     Plugin_Name.lower())

            for Result in Results:

                if Result not in Cached_Data and Result not in Data_to_Cache:

                    try:
                        Reddit_Regex = re.search(
                            "https\:\/\/www\.reddit\.com\/r\/(\w+)\/comments\/(\w+)\/([\w\d]+)\/",
                            Result[0])

                        if Reddit_Regex:
                            Reddit_Response = requests.get(
                                Result, headers=headers).text
                            Output_file = General.Create_Query_Results_Output_File(
                                Directory, Query, Plugin_Name, Reddit_Response,
                                Reddit_Regex.group(3), The_File_Extension)

                            if Output_file:
                                Output_Connections.Output(
                                    [Output_file], Result,
                                    General.Get_Title(Result[0]),
                                    Plugin_Name.lower())
                                Data_to_Cache.append(Result[0])

                            else:
                                logging.warning(
                                    f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                )

                    except:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create file."
                        )

        if Cached_Data:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

        else:
            General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #10
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Craigslist_Location = Load_Configuration()
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            if Craigslist_Location:
                Main_URL = f"https://{Craigslist_Location.lower()}.craigslist.org/search/sss?format=rss&query={Query}"
                Craigslist_Response = feedparser.parse(Main_URL)
                Craigslist_Items = Craigslist_Response["items"]
                Current_Step = 0

                for Item in Craigslist_Items:
                    Item_URL = Item["link"]

                    if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Craigslist_Responses = Common.Request_Handler(
                            Item_URL,
                            Filter=True,
                            Host=
                            f"https://{Craigslist_Location.lower()}.craigslist.org"
                        )
                        Craigslist_Response = Craigslist_Responses["Filtered"]
                        Local_URL = f"https://{Craigslist_Location.lower()}.craigslist.org/"
                        Local_Domain = f"{Craigslist_Location.lower()}.craigslist.org"
                        Filename = Item_URL.replace(Local_URL, "")
                        Filename = Filename.replace(".html/", "")
                        Filename = Filename.replace(".html", "")
                        Filename = Filename.replace("/", "-")
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Craigslist_Response,
                            Filename, The_File_Extension)

                        if Output_file:
                            Output_Connections = General.Connections(
                                Query, Plugin_Name, Local_Domain,
                                "Search Result", Task_ID, Plugin_Name.lower())
                            Output_Connections.Output(
                                [Output_file], Item_URL,
                                General.Get_Title(Item_URL),
                                Plugin_Name.lower())
                            Data_to_Cache.append(Item_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #11
0
def Search(Query_List, Task_ID, **kwargs):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Yandex_Details = Load_Configuration()
        Cached_Data = General.Get_Cache(Directory, Plugin_Name)
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(kwargs)

        for Query in Query_List:
            Yandex_Response = General.Request_Handler(
                f"https://{Domain}/search/xml?user={Yandex_Details[0]}&key={Yandex_Details[1]}&query={Query}&l10n=en&sortby=rlv&filter=none&maxpassages=five&groupby=attr% 3D% 22% 22.mode% 3Dflat.groups-on-page% 3D{str(Limit)}.docs-in-group% 3D1"
            )
            JSON_Response = xmltodict.parse(Yandex_Response)
            JSON_Output_Response = json.dumps(JSON_Response,
                                              indent=4,
                                              sort_keys=True)
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 JSON_Output_Response, Query,
                                                 The_File_Extensions["Main"])
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain, "Search Result",
                                                     Task_ID,
                                                     Plugin_Name.lower())
            New_JSON_Response = Recursive_Dict_Check(
                ["yandexsearch", "response", "results", "grouping", "group"],
                JSON_Response)

            if New_JSON_Response:

                for Yandex_Item_Line in New_JSON_Response:

                    try:

                        if Recursive_Dict_Check(["doc", "url"],
                                                Yandex_Item_Line):
                            Yandex_Item_Line = Yandex_Item_Line['doc']
                            Yandex_URL = Yandex_Item_Line['url']
                            Title = Recursive_Dict_Check(["title", "#text"],
                                                         JSON_Response)

                            if Title:
                                Title = f"Yandex | {Title}"

                            else:
                                Title = General.Get_Title(Yandex_URL)
                                Title = f"Yandex | {Title}"

                            if Yandex_URL not in Cached_Data and Yandex_URL not in Data_to_Cache:
                                Yandex_Item_Responses = General.Request_Handler(
                                    Yandex_URL,
                                    Application_JSON_CT=True,
                                    Accept_XML=True,
                                    Accept_Language_EN_US=True,
                                    Filter=True,
                                    Host=f"https://{Domain}")
                                Yandex_Item_Response = Yandex_Item_Responses[
                                    "Filtered"]
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    Yandex_Item_Response, Yandex_URL,
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file], Yandex_URL,
                                        Title, Plugin_Name.lower())
                                    Data_to_Cache.append(Yandex_URL)

                                else:
                                    logging.warning(
                                        f"{General.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                    except Exception as e:
                        logging.warning(
                            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}"
                        )

            else:
                logging.warning(
                    f"{General.Date()} - {__name__.strip('plugins.')} - No results found."
                )

        General.Write_Cache(Directory, Cached_Data, Data_to_Cache, Plugin_Name)

    except Exception as e:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #12
0
def Search(Query_List, Task_ID, Type, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:

            try:

                if Type == "UKBN":
                    Authorization_Key = Load_Configuration()

                    if Authorization_Key:
                        Authorization_Key = "Basic " + Authorization_Key.decode('ascii')
                        headers_auth = {"Authorization": Authorization_Key}
                        Main_URL = f'https://api.{Domain}/company/{Query}'
                        Response = Common.Request_Handler(Main_URL, Optional_Headers=headers_auth)
                        JSON_Object = Common.JSON_Handler(Response)
                        JSON_Response = JSON_Object.To_JSON_Loads()
                        Indented_JSON_Response = JSON_Object.Dump_JSON()

                        try:
                            Query = str(int(Query))

                            if Response and '{"errors":[{"error":"company-profile-not-found","type":"ch:service"}]}' not in Response:

                                if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                                    Current_Company_Number = str(JSON_Response["company_number"])
                                    Result_URL = f'https://beta.{Domain}/company/{Current_Company_Number}'
                                    Result_Responses = Common.Request_Handler(Result_URL, Filter=True, Host=f"https://beta.{Domain}")
                                    Result_Response = Result_Responses["Filtered"]
                                    UKCN = str(JSON_Response["company_name"])
                                    Main_Output_File = General.Main_File_Create(Directory, Plugin_Name, Indented_JSON_Response, Query, The_File_Extensions["Main"])
                                    Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Result_Response, UKCN, The_File_Extensions["Query"])

                                    if Output_file:
                                        Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Company Details", Task_ID, Plugin_Name)
                                        Output_Connections.Output([Main_Output_File, Output_file], Result_URL, f"UK Business Number {Query}", Concat_Plugin_Name)
                                        Data_to_Cache.append(Main_URL)

                                    else:
                                        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                        except:
                            logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for UKBN Search.")

                    else:
                        logging.info(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to retrieve API key.")

                elif Type == "UKCN":
                    Authorization_Key = Load_Configuration()

                    if Authorization_Key:
                        Authorization_Key = "Basic " + Authorization_Key.decode('ascii')
                        Limit = General.Get_Limit(Limit)

                        try:
                            Main_URL = f'https://api.{Domain}/search/companies?q={Query}&items_per_page={Limit}'
                            headers_auth = {"Authorization": Authorization_Key}
                            Response = Common.Request_Handler(Main_URL, Optional_Headers=headers_auth)
                            JSON_Object = Common.JSON_Handler(Response)
                            JSON_Response = JSON_Object.To_JSON_Loads()
                            Indented_JSON_Response = JSON_Object.Dump_JSON()

                            try:

                                if JSON_Response['total_results'] > 0:
                                    Main_Output_File = General.Main_File_Create(Directory, Plugin_Name, Indented_JSON_Response, Query, The_File_Extensions["Main"])
                                    Output_Connections = General.Connections(Query, Plugin_Name, Domain, "Company Details", Task_ID, Plugin_Name)

                                    for Item in JSON_Response['items']:
                                        UKBN_URL = Item['links']['self']
                                        Full_UKBN_URL = f'https://beta.{Domain}{str(UKBN_URL)}'
                                        UKBN = UKBN_URL.strip("/company/")

                                        if Full_UKBN_URL not in Cached_Data and Full_UKBN_URL not in Data_to_Cache:
                                            UKCN = Item['title']
                                            Current_Responses = Common.Request_Handler(Full_UKBN_URL, Filter=True, Host=f"https://beta.{Domain}")
                                            Current_Response = Current_Responses["Filtered"]
                                            Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, str(Current_Response), UKCN, The_File_Extensions["Query"])

                                            if Output_file:
                                                Output_Connections.Output([Main_Output_File, Output_file], Full_UKBN_URL, f"UK Business Number {UKBN} for Query {Query}", Concat_Plugin_Name)
                                                Data_to_Cache.append(Full_UKBN_URL)

                                            else:
                                                logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist.")

                            except:
                                logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Error during UKCN Search, perhaps the rate limit has been exceeded.")

                        except:
                            logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid query provided for UKCN Search.")

                    else:
                        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to retrieve API key.")

                else:
                    logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid request type.")

            except:
                logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request.")

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #13
0
def Search(Query_List, Task_ID):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)

        for Query in Query_List:
            Main_URL = f"http://{Domain}/{Query}"
            Responses = Common.Request_Handler(Main_URL,
                                               Filter=True,
                                               Host=f"https://www.{Domain}")
            Response = Responses["Regular"]
            Filtered_Response = Responses["Filtered"]
            Kik_Item_Regex = Common.Regex_Handler(
                Response,
                Custom_Regex=
                rf"\<h1\sclass\=\"display\-name\"\>(.+)\<\/h1>\s+\<h2\sclass\=\"username\"\>{Query}\<\/h2\>"
            )

            if Kik_Item_Regex:

                if Kik_Item_Regex.group(1) != " ":
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain, "Social Media - Person",
                        Task_ID, Plugin_Name.lower())
                    Title = f"Kik | {Kik_Item_Regex.group(1)}"

                    if Main_URL not in Cached_Data and Main_URL not in Data_to_Cache:
                        Output_file = General.Main_File_Create(
                            Directory, Plugin_Name, Filtered_Response, Query,
                            The_File_Extension)

                        if Output_file:
                            print(Main_URL, Title)
                            Output_Connections.Output([Output_file],
                                                      Main_URL, Title,
                                                      Plugin_Name.lower())
                            Data_to_Cache.append(Main_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                else:
                    logging.info(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Query didn't match regex pattern."
                    )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #14
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:
            # Query can be Title or ISBN
            Main_URL = f"http://{Domain}/search.php?req={Query}&lg_topic=libgen&open=0&view=simple&res=100&phrase=1&column=def"
            Lib_Gen_Response = Common.Request_Handler(Main_URL)
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 Lib_Gen_Response, Query,
                                                 The_File_Extension)
            Lib_Gen_Regex = Common.Regex_Handler(
                Lib_Gen_Response,
                Custom_Regex=r"book\/index\.php\?md5=[A-Fa-f0-9]{32}",
                Findall=True)

            if Lib_Gen_Regex:
                Current_Step = 0

                for Regex in Lib_Gen_Regex:
                    Item_URL = f"http://{Domain}/{Regex}"
                    Title = General.Get_Title(Item_URL).replace(
                        "Genesis:", "Genesis |")
                    Lib_Item_Responses = Common.Request_Handler(
                        Item_URL, Filter=True, Host=f"http://{Domain}")
                    Lib_Item_Response = Lib_Item_Responses["Filtered"]

                    if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Lib_Item_Response,
                            Regex, The_File_Extension)

                        if Output_file:
                            Output_Connections = General.Connections(
                                Query, Plugin_Name, Domain, "Publication",
                                Task_ID, Concat_Plugin_Name)
                            Output_Connections.Output([Main_File, Output_file],
                                                      Item_URL, Title,
                                                      Concat_Plugin_Name)
                            Data_to_Cache.append(Item_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            else:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Example #15
0
def Search(Query_List, Task_ID, **kwargs):
    Data_to_Cache = []
    Cached_Data = []

    if kwargs.get('Limit'):

        if int(kwargs["Limit"]) > 0:
            Limit = kwargs["Limit"]

    else:
        Limit = 10

    Directory = General.Make_Directory(Plugin_Name.lower())

    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    Log_File = General.Logging(Directory, Plugin_Name.lower())
    handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)s - %(message)s")
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    try:
        File_Dir = os.path.dirname(os.path.realpath('__file__'))
        Configuration_File = os.path.join(File_Dir, 'plugins/common/config/RSS_Feeds.txt')
        Current_File = open(Configuration_File, "r") # Open the provided file and retrieve each client to test.
        URLs = Current_File.read().splitlines()
        Current_File.close()

    except:
        logging.warning(General.Date() + " Please provide a valid file, failed to open the file which contains the data to search for.")

    Cached_Data = General.Get_Cache(Directory, Plugin_Name)

    if not Cached_Data:
        Cached_Data = []

    Query_List = General.Convert_to_List(Query_List)

    for Query in Query_List:

        for URL in URLs: # URLs to be controlled by the web app.
            RSS = feedparser.parse(URL)
            Current_Step = 0

            for Feed in RSS.entries:

                if Query in Feed.description:
                    Dump_Types = General.Data_Type_Discovery(Feed.description)
                    File_Link = Feed.link.replace("https://", "")
                    File_Link = File_Link.replace("http://", "")
                    File_Link = File_Link.replace("www.", "")
                    File_Link = File_Link.replace("/", "-")
                    Domain = URL.replace("https://", "")
                    Domain = Domain.replace("http://", "")
                    Domain = Domain.replace("www.", "")

                    if Feed.link not in Cached_Data and Feed.link not in Data_to_Cache and Current_Step < int(Limit):
                        Output_file = General.Create_Query_Results_Output_File(Directory, Query, Plugin_Name, Feed.description, File_Link, The_File_Extension)

                        if Output_file:
                            General.Connections(Output_file, Query, Plugin_Name, Feed.link, Domain, "Data Leakage", Task_ID, General.Get_Title(Feed.link), Plugin_Name.lower(), Dump_Types=Dump_Types)

                        Data_to_Cache.append(Feed.link)
                        Current_Step += 1

                else:
                    logging.info(General.Date() + " Query not found.")

    if Cached_Data:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "a")

    else:
        General.Write_Cache(Directory, Data_to_Cache, Plugin_Name, "w")
Example #16
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Plugin_Name.lower())
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        YouTube_Details = Load_Configuration()
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:
            YouTube_Handler = discovery.build(YouTube_Details[1],
                                              YouTube_Details[2],
                                              developerKey=YouTube_Details[0],
                                              cache_discovery=False)
            Search_Response = YouTube_Handler.search().list(
                q=Query,
                type='video',
                part='id,snippet',
                maxResults=Limit,
            ).execute()
            JSON_Object = Common.JSON_Handler(Search_Response.get('items', []))
            JSON_Output_Response = JSON_Object.Dump_JSON()
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 JSON_Output_Response, Query,
                                                 The_File_Extensions["Main"])
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain,
                                                     "Social Media - Media",
                                                     Task_ID,
                                                     Plugin_Name.lower())

            for Search_Result in Search_Response.get('items', []):
                Full_Video_URL = f"https://www.{Domain}/watch?v=" + Search_Result[
                    'id']['videoId']
                Search_Video_Responses = Common.Request_Handler(
                    Full_Video_URL, Filter=True, Host=f"https://www.{Domain}")
                Search_Video_Response = Search_Video_Responses["Filtered"]
                Title = "YouTube | " + Search_Result['snippet']['title']

                if Full_Video_URL not in Cached_Data and Full_Video_URL not in Data_to_Cache:
                    Output_file = General.Create_Query_Results_Output_File(
                        Directory, Query, Plugin_Name, Search_Video_Response,
                        Search_Result['id']['videoId'],
                        The_File_Extensions["Query"])

                    if Output_file:
                        Output_Connections.Output([Main_File, Output_file],
                                                  Full_Video_URL, Title,
                                                  Plugin_Name.lower())
                        Data_to_Cache.append(Full_Video_URL)

                    else:
                        logging.warning(
                            f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                        )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")