Exemple #1
0
def Load_Configuration():
    logging.info(
        f"{General.Date()} - {__name__.strip('plugins.')} - Loading configuration data."
    )

    try:
        with open(Connectors.Set_Configuration_File()) as JSON_File:
            Configuration_Data = json.load(JSON_File)
            Reddit_Details = Configuration_Data[Plugin_Name.lower()]
            Reddit_Client_ID = Reddit_Details['client_id']
            Reddit_Client_Secret = Reddit_Details['client_secret']
            Reddit_User_Agent = Reddit_Details['user_agent']
            Reddit_Username = Reddit_Details['username']
            Reddit_Password = Reddit_Details['password']
            Subreddit_to_Search = Reddit_Details["subreddits"]

            if Reddit_Client_ID and Reddit_Client_Secret and Reddit_User_Agent and Reddit_Username and Reddit_Password and Subreddit_to_Search:
                return [
                    Reddit_Client_ID, Reddit_Client_Secret, Reddit_User_Agent,
                    Reddit_Username, Reddit_Password, Subreddit_to_Search
                ]

            else:
                return None
    except:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to load Reddit details."
        )
Exemple #2
0
    def __init__(self, File_Path, Internally_Requested, **kwargs):
        self.Internally_Requested = Internally_Requested
        self.Chrome_Config = Connectors.Load_Chrome_Configuration()
        self.File_Path = File_Path
        self.Connection = Connectors.Load_Main_Database()
        self.Cursor = self.Connection.cursor()

        if not self.Internally_Requested and kwargs.get(
                'Screenshot_ID') and kwargs.get('Screenshot_User'):
            self.Screenshot_ID = kwargs['Screenshot_ID']
            self.Screenshot_User = kwargs['Screenshot_User']

        elif self.Internally_Requested and kwargs.get('Screenshot_Link'):
            self.Screenshot_ID = False
            self.Screenshot_User = False
            self.Screenshot_Link = kwargs['Screenshot_Link']
def Load_Configuration():
    logging.info(
        f"{General.Date()} - {__name__.strip('plugins.')} - Loading configuration data."
    )

    try:

        with open(Connectors.Set_Configuration_File()) as JSON_File:
            Configuration_Data = json.load(JSON_File)
            Twitter_Details = Configuration_Data[Plugin_Name.lower()]
            Consumer_Key = Twitter_Details['CONSUMER_KEY']
            Consumer_Secret = Twitter_Details['CONSUMER_SECRET']
            Access_Key = Twitter_Details['ACCESS_KEY']
            Access_Secret = Twitter_Details['ACCESS_SECRET']

            if Consumer_Key and Consumer_Secret and Access_Key and Access_Secret:
                return [
                    Consumer_Key, Consumer_Secret, Access_Key, Access_Secret
                ]

            else:
                return None

    except:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to load Twitter details."
        )
Exemple #4
0
def Starter(Task_ID):
    Connection = Connectors.Load_Main_Database()
    Cursor = Connection.cursor()
    PSQL_Update_Query = 'UPDATE tasks SET status = %s WHERE task_id = %s'
    Cursor.execute(PSQL_Update_Query, (
        "Running",
        int(Task_ID),
    ))
    Connection.commit()
Exemple #5
0
def Load_Configuration():
    
    try:

        with open(Connectors.Set_Configuration_File()) as JSON_File:  
            Configuration_Data = json.load(JSON_File)
            SSLMate_Details = Configuration_Data[Plugin_Name.lower()]
            SSLMate_Subdomains = SSLMate_Details['search_subdomain']
            return [SSLMate_Subdomains]

    except:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to load configuration details.")
Exemple #6
0
def Load_Configuration():
    logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Loading configuration data.")

    try:

        with open(Connectors.Set_Configuration_File()) as JSON_File:
            Configuration_Data = json.load(JSON_File)
            Pinterest_Details = Configuration_Data[Plugin_Name.lower()]

            if Pinterest_Details['oauth_token']:
                return Pinterest_Details['oauth_token']

            else:
                return None

    except:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to load location details.")
Exemple #7
0
def Load_Location_Configuration():
    Valid_Locations = [
        'ac', 'ac', 'ad', 'ae', 'af', 'af', 'ag', 'ag', 'ai', 'ai', 'al', 'am',
        'am', 'ao', 'aq', 'ar', 'as', 'at', 'au', 'az', 'ba', 'bd', 'be', 'bf',
        'bg', 'bh', 'bi', 'bi', 'bj', 'bn', 'bo', 'bo', 'br', 'bs', 'bt', 'bw',
        'by', 'by', 'bz', 'ca', 'cc', 'cd', 'cf', 'cg', 'ch', 'ci', 'ck', 'cl',
        'cm', 'cn', 'cn', 'co', 'co', 'co', 'cr', 'cu', 'cv', 'cy', 'cz', 'de',
        'dj', 'dk', 'dm', 'do', 'dz', 'ec', 'ec', 'ee', 'eg', 'es', 'et', 'eu',
        'fi', 'fj', 'fm', 'fr', 'ga', 'ge', 'ge', 'gf', 'gg', 'gh', 'gi', 'gl',
        'gm', 'gp', 'gp', 'gr', 'gr', 'gt', 'gy', 'gy', 'gy', 'hk', 'hk', 'hn',
        'hr', 'ht', 'ht', 'hu', 'hu', 'id', 'id', 'ie', 'il', 'im', 'im', 'in',
        'in', 'io', 'iq', 'iq', 'is', 'it', 'je', 'je', 'jm', 'jo', 'jo', 'jp',
        'jp', 'ke', 'kg', 'kh', 'ki', 'kr', 'kw', 'kz', 'kz', 'la', 'lb', 'lc',
        'li', 'lk', 'ls', 'lt', 'lu', 'lv', 'ly', 'ma', 'ma', 'md', 'me', 'mg',
        'mk', 'ml', 'mm', 'mn', 'ms', 'mt', 'mu', 'mv', 'mw', 'mx', 'mx', 'my',
        'mz', 'na', 'ne', 'nf', 'ng', 'ng', 'ni', 'nl', 'no', 'np', 'nr', 'nr',
        'nu', 'nz', 'om', 'pa', 'pe', 'pe', 'pf', 'pg', 'ph', 'pk', 'pk', 'pl',
        'pl', 'pn', 'pr', 'ps', 'ps', 'pt', 'py', 'qa', 'qa', 're', 'ro', 'rs',
        'rs', 'ru', 'ru', 'rw', 'sa', 'sb', 'sc', 'se', 'sg', 'sh', 'si', 'sk',
        'sl', 'sl', 'sm', 'sn', 'so', 'sr', 'st', 'sv', 'sy', 'td', 'tg', 'th',
        'tj', 'tk', 'tl', 'tm', 'tn', 'to', 'tt', 'tz', 'ua', 'ua', 'ug', 'uk',
        'us', 'us', 'uy', 'uz', 'uz', 'vc', 've', 've', 'vg', 'vi', 'vn', 'vu',
        'ws', 'za', 'zm', 'zw'
    ]

    try:

        with open(Connectors.Set_Configuration_File()) as JSON_File:
            Configuration_Data = json.load(JSON_File)
            General_Details = Configuration_Data['general']
            Location = General_Details['location']

            if (len(Location) > 2) or (Location not in Valid_Locations):
                logging.warning(
                    f"{Date()} General Library - An invalid location has been specified, please provide a valid location in the config.json file."
                )

            else:
                logging.info(
                    f"{Date()} General Library - Country code {Location} selected."
                )
                return Location

    except:
        logging.warning(
            f"{Date()} General Library - Failed to load location details.")
Exemple #8
0
def Load_Configuration():
    logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Loading configuration data.")

    try:

        with open(Connectors.Set_Configuration_File()) as JSON_File:
            Configuration_Data = json.load(JSON_File)
            Flickr_Details = Configuration_Data[Plugin_Name.lower()]

            if Flickr_Details['api_key'] and Flickr_Details['api_secret']:
                return [Flickr_Details['api_key'], Flickr_Details['api_secret']]

            else:
                return None

    except:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to load location details.")
Exemple #9
0
def Load_Configuration():
    logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Loading configuration data.")

    try:

        with open(Connectors.Set_Configuration_File()) as JSON_File:  
            Configuration_Data = json.load(JSON_File)
            Google_Details = Configuration_Data[Plugin_Name.lower()]
            Google_CX = Google_Details['cx']
            Google_Application_Name = Google_Details['application_name']
            Google_Application_Version = Google_Details['application_version']
            Google_Developer_Key = Google_Details['developer_key']

            if Google_CX and Google_Application_Name and Google_Application_Version and Google_Developer_Key:
                return [Google_CX, Google_Application_Name, Google_Application_Version, Google_Developer_Key]

            else:
                return None

    except:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to load API details.")
Exemple #10
0
def Load_Web_Scrape_Risk_Configuration():

    try:

        with open(Connectors.Set_Configuration_File()) as JSON_File:
            Configuration_Data = json.load(JSON_File)
            Web_Scrape_Details = Configuration_Data['web-scraping']
            Risk_Level = int(Web_Scrape_Details['risk-level'])
            Automated_Screenshots = Web_Scrape_Details['automated-screenshots']

            if Risk_Level > 3 or Risk_Level < 0:
                logging.warning(
                    f"{Date()} General Library - An invalid number has been specified, please provide a valid risk level in the config.json file, with a value from 1 to 3."
                )

            else:
                return [Risk_Level, Automated_Screenshots]

    except:
        logging.warning(
            f"{Date()} General Library - Failed to load location details.")
Exemple #11
0
def Load_Configuration():
    logging.info(
        f"{General.Date()} - {__name__.strip('plugins.')} - Loading configuration data."
    )

    try:

        with open(Connectors.Set_Configuration_File()) as JSON_File:
            Configuration_Data = json.load(JSON_File)
            HIBP_Details = Configuration_Data[Concat_Plugin_Name]
            API_Key = HIBP_Details['api_key']

            if API_Key:
                return API_Key

            else:
                return None

    except:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to load API details."
        )
def Load_Configuration():
    logging.info(
        f"{General.Date()} - {__name__.strip('plugins.')} - Loading configuration data."
    )

    try:

        with open(Connectors.Set_Configuration_File()) as JSON_File:
            Configuration_Data = json.load(JSON_File)
            VK_Details = Configuration_Data[Plugin_Name.lower()]
            VK_Access_Token = VK_Details['access_token']

            if VK_Access_Token:
                return VK_Access_Token

            else:
                return None

    except:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to load API details."
        )
Exemple #13
0
def Load_Configuration():
    logging.info(f"{General.Date()} - {__name__.strip('plugins.')} - Loading configuration data.")

    try:

        with open(Connectors.Set_Configuration_File()) as JSON_File:  
            Configuration_Data = json.load(JSON_File)
            YouTube_Details = Configuration_Data[Plugin_Name.lower()]
            YouTube_Developer_Key = YouTube_Details['developer_key']
            YouTube_Application_Name = YouTube_Details['application_name']
            YouTube_Application_Version = YouTube_Details['application_version']
            YouTube_Location = YouTube_Details['location']
            YouTube_Location_Radius = YouTube_Details['location_radius']

            if YouTube_Developer_Key and YouTube_Application_Name and YouTube_Application_Version:
                return [YouTube_Developer_Key, YouTube_Application_Name, YouTube_Application_Version, YouTube_Location, YouTube_Location_Radius]

            else:
                return None

    except:
        logging.warning(f"{General.Date()} - {__name__.strip('plugins.')} - Failed to load location details.")
Exemple #14
0
def Load_Configuration():
    logging.info(
        f"{General.Date()} - {__name__.strip('plugins.')} - Loading configuration data."
    )

    try:

        with open(Connectors.Set_Configuration_File()) as JSON_File:
            Configuration_Data = json.load(JSON_File)
            Yandex_Details = Configuration_Data[Plugin_Name.lower()]
            Yandex_User = Yandex_Details['username']
            Yandex_API_Key = Yandex_Details['api_key']

            if Yandex_User and Yandex_API_Key:
                return [Yandex_User, Yandex_API_Key]

            else:
                return None

    except:
        logging.warning(
            f"{General.Date()} - {__name__.strip('plugins.')} - Failed to load API details."
        )
Exemple #15
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Plugin_Name.lower())
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Location = Connectors.Load_Location_Configuration()
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:

            try:
                Request_Query = urllib.parse.quote(Query)
                Main_URL = f"http://{Domain}/search?term={Request_Query}&country={Location}&entity=software&limit={str(Limit)}"
                Response = Common.Request_Handler(Main_URL)

            except:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to make request, are you connected to the internet?"
                )
                break

            JSON_Object = Common.JSON_Handler(Response)
            JSON_Response = JSON_Object.To_JSON_Loads()
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 JSON_Object.Dump_JSON(),
                                                 Query,
                                                 The_File_Extensions["Main"])

            if 'resultCount' in JSON_Response:

                if JSON_Response['resultCount'] > 0:
                    Output_Connections = General.Connections(
                        Query, Plugin_Name, Domain, "Application", Task_ID,
                        Concat_Plugin_Name)

                    for JSON_Object in JSON_Response['results']:
                        JSON_Object_Responses = Common.Request_Handler(
                            JSON_Object['artistViewUrl'],
                            Filter=True,
                            Host=f"https://{Domain}")
                        JSON_Object_Response = JSON_Object_Responses[
                            "Filtered"]

                        if JSON_Object[
                                'artistViewUrl'] not in Cached_Data and JSON_Object[
                                    'artistViewUrl'] not in Data_to_Cache:
                            Apple_Store_Regex = Common.Regex_Handler(
                                JSON_Object['artistViewUrl'],
                                Custom_Regex=r"https\:\/\/apps\.apple\.com\/" +
                                rf"{Location}" +
                                r"\/developer\/[\w\d\-]+\/(id[\d]{9,10})\?.+")

                            if Apple_Store_Regex:
                                Output_file = General.Create_Query_Results_Output_File(
                                    Directory, Query, Plugin_Name,
                                    JSON_Object_Response,
                                    Apple_Store_Regex.group(1),
                                    The_File_Extensions["Query"])

                                if Output_file:
                                    Output_Connections.Output(
                                        [Main_File, Output_file],
                                        JSON_Object['artistViewUrl'],
                                        General.Get_Title(
                                            JSON_Object['artistViewUrl']),
                                        Concat_Plugin_Name)
                                    Data_to_Cache.append(
                                        JSON_Object['artistViewUrl'])

                                else:
                                    logging.warning(
                                        f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                                    )

                else:
                    logging.warning(
                        f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value provided, value not greater than 0."
                    )

            else:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Invalid value."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Exemple #16
0
def Connections(Complete_File, Input, Plugin_Name, Link, Domain, Result_Type,
                Task_ID, DB_Title, Concat_Plugin_Name, **kwargs):

    if "Dump_Types" in kwargs:
        Dump_Types = kwargs["Dump_Types"]
        Title = "Data for input: " + Input + ", found by Scrummage plugin " + Plugin_Name + ".\nData types include: " + ", ".join(
            Dump_Types) + ".\nAll data is stored in " + Complete_File + "."
        Ticket_Subject = "Scrummage " + Plugin_Name + " results for query " + Input + "."
        Ticket_Text = "Results were identified for the search " + Input + " performed by the Scrummage plugin " + Plugin_Name + ".\nThe following types of sensitive data were found:\n - " + "\n - ".join(
            Dump_Types
        ) + ". Please ensure these results do not pose a threat to your organisation, and take the appropriate action necessary if they pose a security risk. The result data is stored in a file located at " + Complete_File + "."

    else:
        Title = "Data for input: " + Input + ", found by Scrummage plugin " + Plugin_Name + ".\nAll data is stored in " + Complete_File + "."
        Ticket_Subject = "Scrummage " + Plugin_Name + " results for query " + Input + "."
        Ticket_Text = "Results were identified for the search " + Input + " performed by the Scrummage plugin " + Plugin_Name + ". Please ensure these results do not pose a threat to your organisation, and take the appropriate action necessary if they pose a security risk. The result data is stored in a file located at " + Complete_File + "."

    Connectors.Scumblr_Main(Input, DB_Title, Title)
    Connectors.RTIR_Main(Ticket_Subject, Ticket_Text)
    Connectors.JIRA_Main(Ticket_Subject, Ticket_Text)
    Connectors.Email_Main(Ticket_Subject, Ticket_Text)
    Connectors.Slack_Main(Ticket_Text)
    Relative_File = Complete_File.replace(
        os.path.dirname(os.path.realpath('__file__')), "")
    logging.info(
        str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) +
        " Adding item to Scrummage database.")

    if DB_Title:
        Connectors.Main_Database_Insert(DB_Title, Plugin_Name, Domain, Link,
                                        Result_Type, Relative_File, Task_ID)
        Connectors.Elasticsearch_Main(DB_Title, Plugin_Name, Domain, Link,
                                      Result_Type, Relative_File, Task_ID,
                                      Concat_Plugin_Name)
        Connectors.CSV_Output(DB_Title, Plugin_Name, Domain, Link, Result_Type,
                              Relative_File, Task_ID)
        Connectors.Defect_Dojo_Output(DB_Title, Ticket_Text)

    else:
        Connectors.Main_Database_Insert(Plugin_Name, Plugin_Name, Domain, Link,
                                        Result_Type, Relative_File, Task_ID)
        Connectors.Elasticsearch_Main(Plugin_Name, Plugin_Name, Domain, Link,
                                      Result_Type, Relative_File, Task_ID,
                                      Concat_Plugin_Name)
        Connectors.CSV_Output(Plugin_Name, Plugin_Name, Domain, Link,
                              Result_Type, Relative_File, Task_ID)
        Connectors.Defect_Dojo_Output(Plugin_Name, Ticket_Text)
Exemple #17
0
def Search(Query_List, Task_ID, Limit=10):

    try:
        Data_to_Cache = []
        Directory = General.Make_Directory(Concat_Plugin_Name)
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        Log_File = General.Logging(Directory, Concat_Plugin_Name)
        handler = logging.FileHandler(os.path.join(Directory, Log_File), "w")
        handler.setLevel(logging.DEBUG)
        formatter = logging.Formatter("%(levelname)s - %(message)s")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        Location = Connectors.Load_Location_Configuration()
        Cached_Data_Object = General.Cache(Directory, Plugin_Name)
        Cached_Data = Cached_Data_Object.Get_Cache()
        Query_List = General.Convert_to_List(Query_List)
        Limit = General.Get_Limit(Limit)

        for Query in Query_List:
            Main_URL = f"https://www.{Domain}/en-{Location}/search?q={Query}"
            Win_Store_Response = Common.Request_Handler(
                Main_URL,
                Application_JSON_CT=True,
                Accept_XML=True,
                Accept_Language_EN_US=True)
            Main_File = General.Main_File_Create(Directory, Plugin_Name,
                                                 Win_Store_Response, Query,
                                                 The_File_Extension)
            Win_Store_Regex = Common.Regex_Handler(
                Win_Store_Response,
                Custom_Regex=r"\/en\-au\/p\/([\w\-]+)\/([\w\d]+)",
                Findall=True)
            Output_Connections = General.Connections(Query, Plugin_Name,
                                                     Domain, "Application",
                                                     Task_ID,
                                                     Concat_Plugin_Name)

            if Win_Store_Regex:
                Current_Step = 0

                for Regex_Group_1, Regex_Group_2 in Win_Store_Regex:
                    Item_URL = f"https://www.microsoft.com/en-au/p/{Regex_Group_1}/{Regex_Group_2}"
                    Win_Store_Responses = Common.Request_Handler(
                        Item_URL,
                        Application_JSON_CT=True,
                        Accept_XML=True,
                        Accept_Language_EN_US=True,
                        Filter=True,
                        Host=f"https://www.{Domain}")
                    Win_Store_Response = Win_Store_Responses["Filtered"]
                    Title = "Windows Store | " + General.Get_Title(Item_URL)

                    if Item_URL not in Cached_Data and Item_URL not in Data_to_Cache and Current_Step < int(
                            Limit):
                        Output_file = General.Create_Query_Results_Output_File(
                            Directory, Query, Plugin_Name, Win_Store_Response,
                            Regex_Group_1, The_File_Extension)

                        if Output_file:
                            Output_Connections.Output([Main_File, Output_file],
                                                      Item_URL, Title,
                                                      Concat_Plugin_Name)
                            Data_to_Cache.append(Item_URL)

                        else:
                            logging.warning(
                                f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to create output file. File may already exist."
                            )

                        Current_Step += 1

            else:
                logging.warning(
                    f"{Common.Date()} - {__name__.strip('plugins.')} - Failed to match regular expression."
                )

        Cached_Data_Object.Write_Cache(Data_to_Cache)

    except Exception as e:
        logging.warning(
            f"{Common.Date()} - {__name__.strip('plugins.')} - {str(e)}")
Exemple #18
0
    def Output(self, Complete_File_List, Link, DB_Title, Directory_Plugin_Name,
               **kwargs):

        try:
            Text_Complete_Files = "\n- ".join(Complete_File_List)

            if kwargs.get("Dump_Types"):
                self.Dump_Types = kwargs["Dump_Types"]
                Joined_Dump_Types = ", ".join(Dump_Types)
                self.Title = f"Data for input: {self.Input}, found by Scrummage plugin {self.Plugin_Name}.\nData types include: {Joined_Dump_Types}.\nAll data is stored in\n- {Text_Complete_Files}."
                self.Ticket_Subject = f"Scrummage {self.Plugin_Name} results for query {self.Input}."
                NL_Joined_Dump_Types = "\n- ".join(Dump_Types)
                self.Ticket_Text = f"Results were identified for the search {self.Input} performed by the Scrummage plugin {self.Plugin_Name}.\nThe following types of sensitive data were found:\n- {NL_Joined_Dump_Types}. Please ensure these results do not pose a threat to your organisation, and take the appropriate action necessary if they pose a security risk.\n\nResult data can be found in the following output files:\n- {Text_Complete_Files}."

            else:
                self.Title = f"Data for input: {self.Input}, found by Scrummage plugin {self.Plugin_Name}.\nAll data is stored in the files:\n- {Text_Complete_Files}."
                self.Ticket_Subject = f"Scrummage {self.Plugin_Name} results for query {self.Input}."
                self.Ticket_Text = f"Results were identified for the search {self.Input} performed by the Scrummage plugin {self.Plugin_Name}. Please ensure these results do not pose a threat to your organisation, and take the appropriate action necessary if they pose a security risk.\n\nResult data can be found in the following output files:\n- {Text_Complete_Files}."

        except:
            logging.warning(
                f"{Date()} General Library - Error setting unique variables.")

        logging.info(
            f"{Date()} General Library - Adding item to Scrummage database and other configured outputs."
        )
        CSV_File = Connectors.CSV_Output(DB_Title, self.Plugin_Name,
                                         self.Domain, Link, self.Result_Type,
                                         ", ".join(Complete_File_List),
                                         self.Task_ID, Directory_Plugin_Name)
        DOCX_File = Connectors.DOCX_Output(DB_Title, self.Plugin_Name,
                                           self.Domain, Link, self.Result_Type,
                                           "\n".join(Complete_File_List),
                                           self.Task_ID, Directory_Plugin_Name)

        if CSV_File:
            Complete_File_List.append(CSV_File)

        if DOCX_File:
            Complete_File_List.append(DOCX_File)

        Relative_File_List = []

        for File in Complete_File_List:
            Relative_File = File.replace(
                os.path.dirname(os.path.realpath('__file__')), "")
            Relative_File_List.append(Relative_File)

        Connectors.Main_Database_Insert(DB_Title, self.Plugin_Name,
                                        self.Domain, Link, self.Result_Type,
                                        ", ".join(Relative_File_List),
                                        self.Task_ID)
        Connectors.Elasticsearch_Main(DB_Title, self.Plugin_Name, self.Domain,
                                      Link, self.Result_Type,
                                      ", ".join(Complete_File_List),
                                      self.Task_ID, self.Concat_Plugin_Name)
        Connectors.Defect_Dojo_Output(DB_Title, self.Ticket_Text)
        Connectors.Scumblr_Main(self.Input, DB_Title, self.Title)
        Connectors.RTIR_Main(self.Ticket_Subject, self.Ticket_Text)
        Connectors.JIRA_Main(self.Ticket_Subject, self.Ticket_Text)
        Connectors.Email_Main(self.Ticket_Subject, self.Ticket_Text)
        Connectors.Slack_Main(self.Ticket_Text)
Exemple #19
0
        description='Plugin Caller calls Scrummage plugins.')
    Parser.add_argument(
        '-t',
        '--task',
        help=
        'This option is used to specify a task ID to run. ./plugin_caller.py -t 1'
    )
    Arguments = Parser.parse_args()

    Task_ID = 0

    if Arguments.task:

        try:
            Task_ID = int(Arguments.task)
            Connection = Connectors.Load_Main_Database()
            cursor = Connection.cursor()
            PSQL_Select_Query = 'SELECT * FROM tasks WHERE task_id = %s;'
            cursor.execute(PSQL_Select_Query, (Task_ID, ))
            result = cursor.fetchone()

            if result:
                print(result[2])
                print(result[5])
                Plugin_to_Call = Plugin_Caller(Plugin_Name=result[2],
                                               Limit=result[5],
                                               Task_ID=Task_ID,
                                               Query=result[1])
                Plugin_to_Call.Call_Plugin()

        except:
Exemple #20
0
    def Output(self, Complete_File, Link, DB_Title, **kwargs):

        try:

            if "Dump_Types" in kwargs:
                self.Dump_Types = kwargs["Dump_Types"]
                self.Title = "Data for input: " + self.Input + ", found by Scrummage plugin " + self.Plugin_Name + ".\nData types include: " + ", ".join(
                    Dump_Types
                ) + ".\nAll data is stored in " + Complete_File + "."
                self.Ticket_Subject = "Scrummage " + self.Plugin_Name + " results for query " + self.Input + "."
                self.Ticket_Text = "Results were identified for the search " + self.Input + " performed by the Scrummage plugin " + self.Plugin_Name + ".\nThe following types of sensitive data were found:\n - " + "\n - ".join(
                    Dump_Types
                ) + ". Please ensure these results do not pose a threat to your organisation, and take the appropriate action necessary if they pose a security risk. The result data is stored in a file located at " + Complete_File + "."

            else:
                self.Title = "Data for input: " + self.Input + ", found by Scrummage plugin " + self.Plugin_Name + ".\nAll data is stored in " + Complete_File + "."
                self.Ticket_Subject = "Scrummage " + self.Plugin_Name + " results for query " + self.Input + "."
                self.Ticket_Text = "Results were identified for the search " + self.Input + " performed by the Scrummage plugin " + self.Plugin_Name + ". Please ensure these results do not pose a threat to your organisation, and take the appropriate action necessary if they pose a security risk. The result data is stored in a file located at " + Complete_File + "."

        except:
            logging.warning(Date() + " Error setting unique variables.")

        Connectors.Scumblr_Main(self.Input, DB_Title, self.Title)
        Connectors.RTIR_Main(self.Ticket_Subject, self.Ticket_Text)
        Connectors.JIRA_Main(self.Ticket_Subject, self.Ticket_Text)
        Connectors.Email_Main(self.Ticket_Subject, self.Ticket_Text)
        Connectors.Slack_Main(self.Ticket_Text)
        Relative_File = Complete_File.replace(
            os.path.dirname(os.path.realpath('__file__')), "")
        logging.info(Date() + " Adding item to Scrummage database.")

        if DB_Title:
            Connectors.Main_Database_Insert(DB_Title, self.Plugin_Name,
                                            self.Domain, Link,
                                            self.Result_Type, Relative_File,
                                            self.Task_ID)
            Connectors.Elasticsearch_Main(DB_Title, self.Plugin_Name,
                                          self.Domain, Link, self.Result_Type,
                                          Relative_File, self.Task_ID,
                                          self.Concat_Plugin_Name)
            Connectors.CSV_Output(DB_Title, self.Plugin_Name, self.Domain,
                                  Link, self.Result_Type, Relative_File,
                                  self.Task_ID)
            Connectors.DOCX_Output(DB_Title, self.Plugin_Name, self.Domain,
                                   Link, self.Result_Type, Relative_File,
                                   self.Task_ID)
            Connectors.Defect_Dojo_Output(DB_Title, self.Ticket_Text)

        else:
            Connectors.Main_Database_Insert(self.Plugin_Name, self.Plugin_Name,
                                            self.Domain, Link,
                                            self.Result_Type, Relative_File,
                                            self.Task_ID)
            Connectors.Elasticsearch_Main(self.Plugin_Name, self.Plugin_Name,
                                          self.Domain, Link, self.Result_Type,
                                          Relative_File, self.Task_ID,
                                          self.Concat_Plugin_Name)
            Connectors.CSV_Output(self.Plugin_Name, self.Plugin_Name,
                                  self.Domain, Link, self.Result_Type,
                                  Relative_File, self.Task_ID)
            Connectors.DOCX_Output(self.Plugin_Name, self.Plugin_Name,
                                   self.Domain, Link, self.Result_Type,
                                   Relative_File, self.Task_ID)
            Connectors.Defect_Dojo_Output(self.Plugin_Name, self.Ticket_Text)