Exemplo n.º 1
0
	def handleEdit(self, confInfo):
		facility = config_file + '_edit'
		logger = setup_logger(app_config["log_level"], setup_log, facility)
		logger.debug("Setup edit handler started")

		config_id = self.callerArgs.id
		config = self.callerArgs.data
		logger.debug("Config: %s/%s" % (config_id, config))

		new_config = {}
		for k, v in list(config.items()):
			try:
				if isinstance(v, list) and len(v) == 1:
					v = v[0]
				logger.debug(v)
				if k == 'stanza':
					logger.debug("Setting stanza to %s" % v)
					config_id = v
				else:
					if v is None:
						logger.debug('%s Setting %s to blank', facility, k)
						new_config[k] = ''
					else:
						new_config[k] = v
			except BaseException as e:
				logger.exception("Error parsing config value \"%s\": %s" % (v, repr(e)))
		logger.debug("%s Writing new config for %s: %s", facility, config_id, str(new_config))
		try:
			# Write the config stanza
			self.writeConf(config_file, config_id, new_config)
		except BaseException as e:
			logger.critical("%s Error writing config: %s", facility, repr(e))
			exit(1)
Exemplo n.º 2
0
    def delete_key_from_event(self, delete_event):
        try:
            cfg = cli.getConfStanza('kvstore_tools', 'settings')
        except BaseException as e:
            eprint("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(cfg["log_level"], 'kvstore_tools.log',
                                  facility)
        except BaseException as e:
            eprint("Could not create logger: " + repr(e))
            print("Could not create logger: " + repr(e))
            exit(1)

        url_tmpl_delete = '%(server_uri)s/servicesNS/%(owner)s/%(app)s/storage/collections/data/%(collection)s/%(id)s?output_mode=json'
        headers = {
            'Authorization': 'Splunk %s' % self.session_key,
            'Content-Type': 'application/json'
        }

        for key, value in list(delete_event.items()):
            delete_event[key] = value
            if key == '_key' and len(value) > 0:
                logger.debug("Found %s (%s) in event" % (key, value))
                try:
                    delete_url = url_tmpl_delete % dict(
                        server_uri=self.splunkd_uri,
                        owner='nobody',
                        app=self.app,
                        collection=self.collection,
                        id=urllib.parse.quote(value, safe=''))
                    logger.debug("Delete url: " + delete_url)

                    try:
                        response, response_code = request(
                            'DELETE', delete_url, '', headers)
                        logger.debug('Server response: %s' % response)
                    except BaseException as e:
                        logger.error('ERROR Failed to delete key: %s', repr(e))

                    if response_code == 200:
                        logger.debug("Successfully deleted " + key)
                        delete_event['delete_status'] = "success"
                        return delete_event
                    else:
                        logger.error("Error %d deleting %s: %s" %
                                     (response_code, key, response))
                        delete_event['delete_status'] = "error"
                        return delete_event
                except BaseException as e:
                    logger.error("Error deleting %s: %s" % (key, repr(e)))
                    delete_event['delete_status'] = "error"
                    return delete_event
Exemplo n.º 3
0
    def handleList(self, confInfo):
        facility = config_file + '_list'
        logger = setup_logger(app_config["log_level"], setup_log, facility)
        confDict = self.readConf(config_file)
        credentials = {}
        logger.info(config_file + " list handler started")

        try:
            session_key = self.getSessionKey()
            entity = en.getEntity('/server',
                                  'settings',
                                  namespace='-',
                                  sessionKey=session_key,
                                  owner='-')
            splunkd_port = entity["mgmtHostPort"]
            service = client.connect(token=session_key, port=splunkd_port)

            # Get all credentials for this app
            storage_passwords = service.storage_passwords

            for credential in storage_passwords:
                if credential.access.app == app:
                    credentials[credential._state.title] = {
                        'username': credential.content.get('username'),
                        'password': credential.content.get('clear_password'),
                        'realm': credential.content.get('realm')
                    }

        except BaseException as e:
            logger.exception('Could not connect to service: %s' % e)
            raise (e)

        if None != confDict:
            for stanza, settings in list(confDict.items()):
                for k, v in list(settings.items()):
                    if stanza != 'default':
                        logger.debug("%s stanza: %s, key: %s, value: %s",
                                     facility, stanza, k, v)
                        if k.lower(
                        ) in password_options and v is not None and len(
                                v) > 0 and not '$7$' in v:
                            v = encrypt_new(splunk_secret, v)

                        if 'credential' in k:
                            if v in list(credentials.keys()):
                                confInfo[stanza].append(
                                    k + '_username',
                                    credentials[v]['username'])
                                confInfo[stanza].append(
                                    k + '_realm', credentials[v]['realm'])
                                confInfo[stanza].append(
                                    k + '_password',
                                    credentials[v]['password'])

                        confInfo[stanza].append(k, v)
Exemplo n.º 4
0
    def setup(self):
        facility = config_file + '_setup'
        logger = setup_logger(app_config["log_level"], setup_log, facility)
        logger.debug("Setup script started")

        try:
            if self.requestedAction == admin.ACTION_EDIT:  # ACTION_EDIT == 4
                for arg in options:
                    self.supportedArgs.addOptArg(arg)
        except BaseException as e:
            logger.exception(e)
Exemplo n.º 5
0
	def handleList(self, confInfo):
		self.capabilityRead = 'read_kvst_config'

		try:
			cfg = cli.getConfStanza('kvstore_tools','settings')
		except BaseException as e:
			raise Exception("Could not read configuration: " + repr(e))
		
		# Facility info - prepended to log lines
		facility = os.path.basename(__file__)
		facility = os.path.splitext(facility)[0]
		try:
			logger = setup_logger(cfg["log_level"], 'kvstore_tools.log', facility)
		except BaseException as e:
			raise Exception("Could not create logger: " + repr(e))

		logger.debug('KV Store Tools Settings handler started (List)')
		
		# Check for permissions to read the configuration
		session_key = self.getSessionKey()
		content = rest.simpleRequest('/services/authentication/current-context?output_mode=json', sessionKey=session_key, method='GET')[1]
		content = json.loads(content)
		current_user = content['entry'][0]['content']['username']
		current_user_capabilities = content['entry'][0]['content']['capabilities']
		if self.capabilityRead in current_user_capabilities:
			logger.debug("User %s is authorized" % current_user)

			confDict = self.readConf("kvstore_tools")
			if None != confDict:
				for stanza, settings in list(confDict.items()):
					for key, val in list(settings.items()):
						logger.debug("key: {0}, value: {1}".format(key, val))
						if key in ['compression']:
							if str2bool(val):
								val = '1'
							else:
								val = '0'
						'''
						if key in ['default_path'] and val in [None, '', 'unset']:
							val = os.path.join('$SPLUNK_HOME', 'etc', 'apps', 'kvstore_tools', 'backups')
							# Windows wildcard support (works with $ but this is more clear).
							if '\\' in val:
								val = val.replace('$SPLUNK_HOME', '%SPLUNK_HOME%')
						if key in ['backup_batch_size'] and val in [None, '']:
							val = '50000'
						if key in ['retention_days'] and val in [None, '']:
							val = '0'
						if key in ['retention_size'] and val in [None, '']:
							val = '0'
						'''
						confInfo[stanza].append(key, val)
		else:
			raise Exception("User %s is unauthorized. Has the read_kvst_config capability been granted?" % current_user)
Exemplo n.º 6
0
	def handleList(self, confInfo):
		facility = config_file + '_list'
		logger = setup_logger(app_config["log_level"], setup_log, facility)
		logger.info("Setup list handler started")

		confDict = self.readConf(config_file)

		if None != confDict:
			for stanza, settings in list(confDict.items()):
				for k, v in list(settings.items()):
					logger.debug("%s stanza: %s, key: %s, value: %s", facility, stanza, k, v)
					# Set blank value for each setting if one does not exist
					if v is None:
						v = ''
					confInfo[stanza].append(k, v)
Exemplo n.º 7
0
    def handleRemove(self, confInfo):
        facility = config_file + '_delete'
        logger = setup_logger(app_config["log_level"], setup_log, facility)
        logger.debug(config_file + " delete handler started")

        config_id = self.callerArgs.id
        logger.debug("Config: %s" % config_id)
        try:
            en.deleteEntity('/configs/conf-' + config_file,
                            self.callerArgs.id,
                            namespace=self.appName,
                            owner=self.userName,
                            sessionKey=self.getSessionKey())
        except BaseException as e:
            logger.exception(e)
Exemplo n.º 8
0
    def handleEdit(self, confInfo):
        facility = config_file + '_edit'
        logger = setup_logger(app_config["log_level"], setup_log, facility)
        logger.debug(config_file + " edit handler started")
        config_id = self.callerArgs.id
        config = self.callerArgs.data
        logger.debug("Config: %s/%s" % (config_id, config))

        new_config = {}
        for k, v in list(config.items()):
            try:
                if isinstance(v, list) and len(v) == 1:
                    v = v[0]
                # Dynamic stanza name - GUIDs only
                guid_pattern = r'^([0-9A-Fa-f]{8}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{12})$'
                if k == 'stanza' and re.match(guid_pattern, str(v)):
                    config_id = v
                    logger.debug("Setting stanza to %s" % v)
                else:
                    if v is None:
                        logger.debug('%s Setting %s to blank', facility, k)
                        new_config[k] = ''
                    else:
                        #logger.debug('%s Setting %s to %s', facility, k, v)
                        if k.lower() in password_options and not '$7$' in v:
                            logger.debug(
                                '%s Value has an unencrypted password. Encrypting.',
                                facility)
                            try:
                                v = encrypt_new(splunk_secret, v)
                            except BaseException as e:
                                logger.error(
                                    "%s Error saving encrypted password for %s: %s",
                                    facility, v, repr(e))
                                continue
                        new_config[k] = v
            except BaseException as e:
                logger.exception("Error parsing config value \"%s\": %s" %
                                 (v, repr(e)))

        logger.debug("%s Writing new config for %s: %s", facility, config_id,
                     str(new_config))
        try:
            # Write the config stanza
            self.writeConf(config_file, config_id, new_config)
        except BaseException as e:
            logger.exception("%s Error writing config: %s", facility, e)
Exemplo n.º 9
0
    def generate(self):
        try:
            cfg = cli.getConfStanza('kvstore_tools', 'settings')
        except BaseException as e:
            eprint("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(cfg["log_level"], 'kvstore_tools.log',
                                  facility)
        except BaseException as e:
            eprint("Could not create logger: " + repr(e))
            exit(1)

        logger.info('Script started by %s' %
                    self._metadata.searchinfo.username)

        session_key = self._metadata.searchinfo.session_key
        splunkd_uri = self._metadata.searchinfo.splunkd_uri

        # Check for permissions to run the command
        content = rest.simpleRequest(
            '/services/authentication/current-context?output_mode=json',
            sessionKey=session_key,
            method='GET')[1]
        content = json.loads(content)
        current_user = self._metadata.searchinfo.username
        current_user_capabilities = content['entry'][0]['content'][
            'capabilities']
        if 'run_kvstore_restore' in current_user_capabilities or 'run_kvst_all' in current_user_capabilities:
            logger.debug("User %s is authorized." % current_user)
        else:
            logger.error(
                "User %s is unauthorized. Has the run_kvstore_restore capability been granted?"
                % current_user)
            yield ({
                'Error':
                'User %s is unauthorized. Has the run_kvstore_restore capability been granted?'
                % current_user
            })
            sys.exit(3)

        # Sanitize input
        if self.filename:
            logger.debug('Restore filename: %s' % self.filename)
            list_only = False
        else:
            self.filename = "*#*#*.json*"
            list_only = True

        if self.append:
            logger.debug('Appending to existing collection')
        else:
            self.append = False
            logger.debug('Append to existing collection: %s' %
                         str(self.append))

        backup_file_list = []

        # Get the default path from the configuration
        default_path_dirlist = cfg.get('default_path').split('/')
        default_path = os.path.abspath(
            os.path.join(os.sep, *default_path_dirlist))
        # Replace environment variables
        default_path = os.path.expandvars(default_path)
        default_path = default_path.replace('//', '/')

        if '*' in self.filename:
            # Expand the wildcard to include all matching files from the filesystem
            for name in glob.glob(self.filename):
                backup_file_list.append(name)

            if len(backup_file_list) == 0:
                self.filename = os.path.join(default_path, self.filename)
                for name in glob.glob(self.filename):
                    backup_file_list.append(name)

            if len(backup_file_list) == 0:
                logger.critical("No matching files: %s" % self.filename)
                sys.exit(1)
        else:
            logger.debug('No wildcard string found in %s' % self.filename)
            if os.path.isfile(self.filename):
                backup_file_list.append(self.filename)
            elif os.path.isfile(os.path.join(default_path, self.filename)):
                backup_file_list.append(
                    os.path.join(default_path, self.filename))
            else:
                logger.critical("File does not exist: %s" % self.filename)
                sys.exit(1)

        deleted_collections = []

        # f is now an array of filenames
        for name in backup_file_list:
            logger.debug('Parsing filename: %s' % name)
            try:
                # Isolate the filename from the path
                matches = re.search(r'(.*)(?:\/|\\)([^\/\\]+)', name)
                #path = matches.group(1)
                file_param = matches.group(2)
                name_split = file_param.split('#')
            except BaseException as e:
                logger.critical('Invalid filename: %s\n\t%s' % (name, repr(e)))
                yield ({'Error': 'Invalid filename: %s' % name})
                sys.exit(1)

            # Open the file if it's a supported format
            if (name.endswith('.json')
                    or name.endswith('.json.gz')) and len(name_split) == 3:

                # Extract the app name and collection name from the file name
                file_app = name_split[0]
                file_collection = name_split[1]

                if list_only:
                    yield {
                        'filename': name,
                        'app': file_app,
                        'collection': file_collection,
                        'status': 'ready'
                    }
                else:
                    if not self.append:
                        # Delete the collection contents using the KV Store REST API
                        try:
                            collection_id = file_app + "/" + file_collection
                            # Make sure we aren't trying to delete the same collection twice
                            if not collection_id in deleted_collections:
                                kv.delete_collection(logger, splunkd_uri,
                                                     session_key, file_app,
                                                     file_collection)
                                deleted_collections.append(collection_id)
                        except BaseException as e:
                            logger.critical(repr(e), exc_info=True)
                            yield ({
                                'Error':
                                'Failed to delete collection %s/%s: %s' %
                                (file_app, file_collection, repr(e))
                            })
                            sys.exit(4)

                    # Upload the collection to the KV Store REST API
                    try:
                        result, message, record_count = kv.upload_collection(
                            logger, splunkd_uri, session_key, file_app,
                            file_collection, name)
                        yield ({
                            'result': result,
                            'message': message,
                            'records': record_count
                        })
                    except BaseException as e:
                        logger.error("Error restoring collection: %s" %
                                     repr(e),
                                     exc_info=True)
                        yield ({
                            'result':
                            'error',
                            'message':
                            'Failed to delete collection: %s' % repr(e),
                            'records':
                            0
                        })

            elif name.endswith('.tar.gz') or name.endswith('.tgz'):
                logger.info('Skipping filename (unsupported format): %s' %
                            name)
                yield {
                    '_time': time.time(),
                    'source': name,
                    'app': '',
                    'collection': '',
                    'records': 0,
                    'result': 'error'
                }
                continue
            else:
                # Skip this file
                logger.info(
                    'Skipping filename (does not meet naming convention): %s' %
                    name)
                yield {
                    '_time': time.time(),
                    'source': name,
                    'app': '',
                    'collection': '',
                    'records': 0,
                    'result': 'error'
                }
                continue
Exemplo n.º 10
0
    def reduce(self, events):

        try:
            app_config = cli.getConfStanza('ep_general', 'settings')
            cmd_config = cli.getConfStanzas('ep_box')
        except BaseException as e:
            raise Exception("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(app_config["log_level"],
                                  'export_everything.log', facility)
        except BaseException as e:
            raise Exception("Could not create logger: " + repr(e))

        logger.info('Box Export search command initiated')
        logger.debug('search_ep_box command: %s', self)  # logs command line

        # Enumerate proxy settings
        http_proxy = os.environ.get('HTTP_PROXY')
        https_proxy = os.environ.get('HTTPS_PROXY')
        proxy_exceptions = os.environ.get('NO_PROXY')

        if http_proxy is not None:
            logger.debug("HTTP proxy: %s" % http_proxy)
        if https_proxy is not None:
            logger.debug("HTTPS proxy: %s" % https_proxy)
        if proxy_exceptions is not None:
            logger.debug("Proxy Exceptions: %s" % proxy_exceptions)

        # Enumerate settings
        app = self._metadata.searchinfo.app
        user = self._metadata.searchinfo.username
        dispatch = self._metadata.searchinfo.dispatch_dir
        session_key = self._metadata.searchinfo.session_key

        if self.target is None and 'target=' in str(self):
            recover_parameters(self)
        # Replace all tokenized parameter strings
        replace_object_tokens(self)

        try:
            target_config = get_config_from_alias(session_key, cmd_config,
                                                  self.target)
            if target_config is None:
                exit_error(
                    logger,
                    "Unable to find target configuration (%s)." % self.target,
                    100937)
            logger.debug("Target configuration: " + str(target_config))
        except BaseException as e:
            exit_error(logger,
                       "Error reading target server configuration: " + repr(e),
                       124812)

        file_extensions = {
            'raw': '.log',
            'kv': '.log',
            'pipe': '.log',
            'csv': '.csv',
            'tsv': '.tsv',
            'json': '.json'
        }

        if self.outputformat is None:
            self.outputformat = 'csv'

        # Create the default filename
        now = str(int(time.time()))
        default_filename = ('export_' + user + '___now__' +
                            file_extensions[self.outputformat]).strip("'")

        # Split the output into folder and filename
        if self.outputfile is not None:
            folder_list = self.outputfile.split('/')
            if len(folder_list) == 1:
                # No folder specified, use the default
                use_default_folder = True
                filename = folder_list[0]
            elif folder_list[0] == '':
                # Length > 1, outputfile points to the root folder (leading /)
                use_default_folder = False
            else:
                # Length > 1 and outputfile points to a relative path (no leading /)
                use_default_folder = True

            if len(folder_list) > 1 and folder_list[-1] == '':
                # No filename provided, trailing /
                filename = default_filename
                folder_list.pop()
            elif len(folder_list) > 1 and len(folder_list[-1]) > 0:
                filename = folder_list[-1]
                folder_list.pop()
        else:
            use_default_folder = True
            filename = default_filename
            folder_list = []

        if use_default_folder:
            if 'default_folder' in list(target_config.keys()):
                # Use the configured default folder
                folder_list = target_config['default_folder'].strip('/').split(
                    '/') + folder_list
            else:
                # Use the root folder
                folder_list = ['']

        # Replace keywords from output filename and folder
        folder = replace_keywords('/'.join(folder_list))
        filename = replace_keywords(filename)
        logger.debug("Folder = " + folder)
        logger.debug("Filename = " + filename)

        if self.compress is not None:
            logger.debug('Compression: %s', self.compress)
        else:
            try:
                self.compress = target_config.get('compress')
            except:
                self.compress = False

        # Use the random number to support running multiple outputs in a single search
        random_number = str(random.randint(10000, 100000))
        staging_filename = 'export_everything_staging_' + random_number + '.txt'
        local_output_file = os.path.join(dispatch, staging_filename)
        if self.compress:
            local_output_file = local_output_file + '.gz'
        logger.debug("Staging file: %s" % local_output_file)

        # Append .gz to the output file if compress=true
        if not self.compress and len(filename) > 3:
            if filename[-3:] == '.gz':
                # We have a .gz extension when compression was not specified. Enable compression.
                self.compress = True
        elif self.compress and len(filename) > 3:
            if filename[-3:] != '.gz':
                filename = filename + '.gz'

        #if auth is not None:

        # Use the credential to connect to Box
        try:
            client = get_box_connection(target_config)
        except BaseException as e:
            exit_error(logger, "Could not connect to box: " + repr(e))

        subfolders = folder.strip('/').split('/')
        if '' in subfolders:
            subfolders.remove('')
        logger.debug("Folders: %s" % str(subfolders))
        # Prepend the list with the root element
        box_folder_object = client.root_folder().get()
        # Walk the folder path until we find the target directory
        for subfolder_name in subfolders:
            logger.debug("Looking for folder: %s" % subfolder_name)
            # Get the folder ID for the string specified from the list of child subfolders
            # folder object is from the previous iteration
            folder_contents = box_folder_object.get_items()
            folder_found = False
            for item in folder_contents:
                if item.type == 'folder':
                    #logger.debug('{0} {1} is named "{2}"'.format(item.type.capitalize(), item.id, item.name))
                    if subfolder_name == item.name:
                        logger.debug("Found a target folder ID: %s" %
                                     str(item.id))
                        box_folder_object = client.folder(folder_id=item.id)
                        folder_found = True
            if not folder_found:
                # Create the required subfolder
                box_folder_object = box_folder_object.create_subfolder(
                    subfolder_name)

        try:
            event_counter = 0
            # Write the output file to disk in the dispatch folder
            logger.debug(
                "Writing events to dispatch file. file=\"%s\" format=%s compress=%s fields=%s",
                local_output_file, self.outputformat, self.compress,
                self.fields)
            for event in event_file.write_events_to_file(
                    events, self.fields, local_output_file, self.outputformat,
                    self.compress):
                yield event
                event_counter += 1

        except BoxAPIException as be:
            exit_error(logger, be.message, 833928)
        except BaseException as e:
            exit_error(logger, "Error writing file to upload", 398372)

        try:
            new_file = box_folder_object.upload(local_output_file,
                                                file_name=filename)
            message = "Box Export Status: Success. File name: %s, File ID: %s" % (
                new_file.name, new_file.id)
            eprint(message)
            logger.info(message)
        except BaseException as e:
            exit_error(logger, "Error uploading file to Box: " + repr(e),
                       109693)
Exemplo n.º 11
0
    def stream(self, events):
        try:
            app_config = cli.getConfStanza('ep_general', 'settings')
            cmd_config = cli.getConfStanzas('ep_hec')
        except BaseException as e:
            raise Exception("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(app_config["log_level"],
                                  'export_everything.log', facility)
        except BaseException as e:
            raise Exception("Could not create logger: " + repr(e))

        logger.info('HEC Export search command initiated')
        logger.debug('search_ep_hec command: %s', self)  # logs command line

        # Set defaults
        if self.host is None:
            self.host = "$host$"
        # Get the default values used for data originating from this machine
        inputs_host = cli.getConfStanza('inputs', 'splunktcp')["host"]

        if self.source is None:
            self.source = "$source$"

        if self.sourcetype is None:
            self.sourcetype = "$sourcetype$"

        if self.index is None:
            self.index = "$index$"

        # Enumerate proxy settings
        http_proxy = os.environ.get('HTTP_PROXY')
        https_proxy = os.environ.get('HTTPS_PROXY')
        proxy_exceptions = os.environ.get('NO_PROXY')

        if http_proxy is not None:
            logger.debug("HTTP proxy: %s" % http_proxy)
        if https_proxy is not None:
            logger.debug("HTTPS proxy: %s" % https_proxy)
        if proxy_exceptions is not None:
            logger.debug("Proxy Exceptions: %s" % proxy_exceptions)

        # Enumerate settings
        searchinfo = self._metadata.searchinfo
        app = searchinfo.app
        user = searchinfo.username
        session_key = self._metadata.searchinfo.session_key

        if self.target is None and 'target=' in str(self):
            recover_parameters(self)
        # Replace all tokenized parameter strings
        replace_object_tokens(self)

        try:
            target_config = get_config_from_alias(session_key, cmd_config,
                                                  self.target)
            if target_config is None:
                exit_error(
                    logger,
                    "Unable to find target configuration (%s)." % self.target,
                    100937)

            logger.debug("Target configuration: " + str(target_config))
            hec_token = target_config['token']
            hec_host = target_config['host']
            hec_port = target_config['port']
            hec_ssl = str2bool(target_config['ssl'])
        except BaseException as e:
            exit_error(logger,
                       "Error reading target server configuration: " + repr(e),
                       124812)

        if len(hec_host) == 0:
            exit_error(logger, "No host specified", 119371)

        # Create HEC object
        hec = http_event_collector(hec_token,
                                   hec_host,
                                   http_event_port=hec_port,
                                   http_event_server_ssl=hec_ssl)
        if port_is_open(hec_host, hec_port):
            logger.debug("Port connectivity check passed")
            if hec.check_connectivity():

                # Special event key fields that can be specified/overridden in the alert action
                meta_keys = ['source', 'sourcetype', 'host', 'index']
                event_count = 0
                for event in events:
                    # Get the fields list for the event
                    event_keys = list(event.keys())

                    payload = {}
                    payload_event_src = {}
                    # Copy event to new event, so we can change it
                    for f in event_keys:
                        payload_event_src[f] = event[f]

                    if '_time' in event_keys:
                        payload.update({"time": payload_event_src['_time']})
                        del (payload_event_src['_time'])
                    else:
                        payload.update({"time": time.time()})

                    for k in meta_keys:
                        # Loop through the metadata keys: host/source/sourcetype/index
                        if getattr(self, k)[0] == getattr(
                                self, k)[-1] and getattr(self, k)[-1] == "$":
                            if k in event_keys:
                                # If the key field is in the event and its output argument is set to a variable
                                payload.update({
                                    k:
                                    payload_event_src[getattr(self, k)[1:-1]]
                                })
                                # Delete it from the payload event source so it's not included when we dump the rest of the fields later.
                                del (payload_event_src[getattr(self, k)[1:-1]])
                            elif k == "host" and self.host == "$host$":
                                # "host" field not found in event, but has the default value. Use the one from inputs.conf.
                                payload.update({k: inputs_host})
                        else:
                            # Plaintext entry
                            payload.update({k: self[k]})

                    # Only send _raw (no other fields) if the _raw field was included in the search result.
                    # (Don't include other fields/values)
                    if '_raw' in list(payload_event_src.keys()):
                        #logger.debug("Using _raw from search result")
                        payload.update({"event": payload_event_src['_raw']})
                    else:
                        payload.update({"event": payload_event_src})

                    event_count += 1
                    logger.debug("Payload = " + str(payload))
                    hec.batchEvent(payload)
                    yield (event)

                hec.flushBatch()
                logger.info(
                    "Successfully exported events. count=%s target=%s app=%s user=%s"
                    % (event_count, hec_host, app, user))
            else:  # Connectivity check failed
                exit_error(
                    logger,
                    "HEC endpoint port open but connection test failed.",
                    104893)
        else:
            if str2bool(hec_ssl):
                protocol = 'https'
            else:
                protocol = 'http'
            exit_error(
                logger,
                "Unable to connect to the HEC endpoint: %s" % protocol +
                '://' + hec_host + ':' + hec_port, 100253)
Exemplo n.º 12
0
	def handleEdit(self, confInfo):
		self.capabilityWrite = 'write_kvst_config'

		try:
			cfg = cli.getConfStanza('kvstore_tools','settings')
		except BaseException as e:
			raise Exception("Could not read configuration: " + repr(e))
		
		# Facility info - prepended to log lines
		facility = os.path.basename(__file__)
		facility = os.path.splitext(facility)[0]
		try:
			logger = setup_logger(cfg["log_level"], 'kvstore_tools.log', facility)
		except BaseException as e:
			raise Exception("Could not create logger: " + repr(e))

		logger.debug('KV Store Tools Settings handler started (Edit)')

		# Check for permissions to read the configuration
		session_key = self.getSessionKey()
		content = rest.simpleRequest('/services/authentication/current-context?output_mode=json', sessionKey=session_key, method='GET')[1]
		content = json.loads(content)
		current_user = content['entry'][0]['content']['username']
		current_user_capabilities = content['entry'][0]['content']['capabilities']
		if self.capabilityWrite in current_user_capabilities:
			logger.debug("User %s is authorized" % current_user)

			# Read the splunk.secret file
			with open(os.path.join(os.getenv('SPLUNK_HOME'), 'etc', 'auth', 'splunk.secret'), 'r') as ssfh:
				splunk_secret = ssfh.readline()

			config = self.callerArgs.data
			new_config = {}
			for k, v in list(config.items()):
				if isinstance(v, list) and len(v) == 1:
					v = v[0]
				if v is None:
					logger.debug('Setting %s to blank' % k)
					new_config[k] = ''
				else:
					logger.debug('Setting %s to %s' % (k, v))
					if k[:10] == 'credential' and not '$7$' in v:
						logger.debug('Value has an unencrypted password. Encrypting.')
						# Split the value into alias/username/password
						hostname, username, password = v.split(':')
						try:
							v = hostname + ":" + username + ":" + encrypt_new(splunk_secret, password)
						except BaseException as e:
							logger.error("Error saving encrypted password for %s: %s" % (hostname, repr(e)))
							continue
							
					logger.debug('Encrypted')
					new_config[k] = v
					logger.debug('applied to configuration dict')
			try:
				if 'compression' in list(new_config.keys()):
					if str2bool(config['compression'][0]):
						new_config['compression'][0] = '1'
					else:
						new_config['compression'][0] = '0'
			
				if 'default_path' in list(new_config.keys()):
					if config['default_path'][0] in [None, '']:
						new_config['default_path'][0] = None
			
				if 'backup_batch_size' in list(new_config.keys()):
					if config['backup_batch_size'][0] in [None, '']:
						new_config['backup_batch_size'][0] = None
				
				logger.debug("Writing configuration")
			except BaseException as e:
				logger.critical("Error parsing configuration: %s" % repr(e))
			# Write the config stanza
			self.writeConf('kvstore_tools', 'settings', new_config)
		else:
			raise Exception("User %s is unauthorized. Has the write_kvst_config capability been granted?" % current_user)
Exemplo n.º 13
0
    def generate(self):
        try:
            cfg = cli.getConfStanza('kvstore_tools', 'settings')
        except BaseException as e:
            eprint("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(cfg["log_level"], 'kvstore_tools.log',
                                  facility)
        except BaseException as e:
            eprint("Could not create logger: " + repr(e))
            print("Could not create logger: " + repr(e))
            exit(1)

        logger.info('Script started by %s' %
                    self._metadata.searchinfo.username)

        session_key = self._metadata.searchinfo.session_key
        splunkd_uri = self._metadata.searchinfo.splunkd_uri

        if self.app:
            logger.debug('App: %s' % self.app)
        else:
            self.app = self._metadata.searchinfo.app

        if self.collection:
            logger.debug('Collection: %s' % self.collection)
        else:
            logger.critical("No collection specified. Exiting.")
            print("Error: No collection specified.")
            exit(1)

        if self.key:
            logger.debug('Key ID: %s' % self.collection)
        else:
            logger.critical("No key value specified. Exiting.")
            print("Error: No key value specified.")
            exit(1)

        headers = {
            'Authorization': 'Splunk %s' % session_key,
            'Content-Type': 'application/json'
        }
        #url_tmpl_app = '%(server_uri)s/servicesNS/%(owner)s/%(app)s/storage/collections/config?output_mode=json&count=0'

        # Enumerate all apps
        app_list = kv.get_server_apps(splunkd_uri, session_key, self.app)
        collection_list = kv.get_app_collections(splunkd_uri, session_key,
                                                 self.collection, self.app,
                                                 app_list, True)

        logger.debug('Collections present: %s', str(collection_list))

        try:
            # Create an object for the collection
            collection_present = False
            for c in collection_list:
                # Extract the app and collection name from the array
                # c[0] = app, c[1] = collection name
                collection_app = c[0]
                collection_name = c[1]
                if (collection_name == self.collection):
                    if self.app is None or self.app == collection_app:
                        self.app = collection_app
                        collection_present = True
                    elif self.app != collection_app:
                        pass
                    logger.debug("Collection found: {0} in app {1}".format(
                        self.collection, self.app))
            if not collection_present:
                logger.critical(
                    "KVStore collection %s not found within app %s" %
                    (self.collection, self.app))
                exit(1)

        except BaseException as e:
            logger.critical('Error enumerating collections: ' + str(e))
            exit(1)

        url_tmpl_delete = '%(server_uri)s/servicesNS/%(owner)s/%(app)s/storage/collections/data/%(collection)s/%(id)s?output_mode=json'
        try:
            delete_url = url_tmpl_delete % dict(server_uri=splunkd_uri,
                                                owner='nobody',
                                                app=self.app,
                                                collection=self.collection,
                                                id=urllib.parse.quote(self.key,
                                                                      safe=''))
            logger.debug("Delete url: " + delete_url)

            try:
                response, response_code = request('DELETE', delete_url, '',
                                                  headers)
                logger.debug('Server response: %s', response)
            except BaseException as e:
                logger.error(
                    'Failed to delete key %s from collection %s/%s: %s' %
                    (self.key, self.app, self.collection, repr(e)))

            if response_code == 200:
                logger.debug(
                    "Successfully deleted key %s from collection %s/%s" %
                    (self.key, self.app, self.collection))
                result = "success"
            else:
                logger.error(
                    "Error deleting key %s from collection %s/%s: %s" %
                    (self.key, self.app, self.collection, response))
                result = "error"

        except BaseException as e:
            logger.error("Error deleting key %s from collection %s/%s: %s" %
                         (self.key, self.app, self.collection, repr(e)))
            result = "error"

        # Entry deleted
        yield {
            '_time': time.time(),
            'app': self.app,
            'collection': self.collection,
            'key': self.key,
            'result': result
        }
Exemplo n.º 14
0
    def reduce(self, events):

        try:
            app_config = cli.getConfStanza('ep_general', 'settings')
            cmd_config = cli.getConfStanzas('ep_smb')
        except BaseException as e:
            raise Exception("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(app_config["log_level"],
                                  'export_everything.log', facility)
        except BaseException as e:
            raise Exception("Could not create logger: " + repr(e))

        logger.info('SMB Export search command initiated')
        logger.debug('search_ep_smb command: %s', self)  # logs command line

        # Enumerate proxy settings
        http_proxy = os.environ.get('HTTP_PROXY')
        https_proxy = os.environ.get('HTTPS_PROXY')
        proxy_exceptions = os.environ.get('NO_PROXY')

        if http_proxy is not None:
            logger.debug("HTTP proxy: %s" % http_proxy)
        if https_proxy is not None:
            logger.debug("HTTPS proxy: %s" % https_proxy)
        if proxy_exceptions is not None:
            logger.debug("Proxy Exceptions: %s" % proxy_exceptions)

        # Enumerate settings
        app = self._metadata.searchinfo.app
        user = self._metadata.searchinfo.username
        dispatch = self._metadata.searchinfo.dispatch_dir
        session_key = self._metadata.searchinfo.session_key

        if self.target is None and 'target=' in str(self):
            recover_parameters(self)
        # Replace all tokenized parameter strings
        replace_object_tokens(self)

        # Use the random number to support running multiple outputs in a single search
        random_number = str(random.randint(10000, 100000))

        try:
            target_config = get_config_from_alias(session_key, cmd_config,
                                                  self.target)
            if target_config is None:
                exit_error(
                    logger,
                    "Unable to find target configuration (%s)." % self.target,
                    100937)
        except BaseException as e:
            exit_error(logger,
                       "Error reading target server configuration: " + repr(e),
                       124812)

        # Get the local client hostname
        client_name = socket.gethostname()
        # Delete any domain from the client hostname string
        if '.' in client_name:
            client_name = client_name[0:client_name.index('.')]

        # Check to see if we have credentials
        valid_settings = []
        for l in list(target_config.keys()):
            if len(target_config[l]) > 0:
                valid_settings.append(l)
        if 'host' in valid_settings:
            # A target has been configured. Check for credentials.
            try:
                if 'credential_username' in valid_settings and 'credential_password' in valid_settings and 'share_name' in valid_settings:
                    domain = target_config[
                        'credential_realm'] if 'credential_realm' in list(
                            target_config.keys()) else target_config['host']

                    try:
                        # Try port 445 first
                        conn = SMBConnection(
                            target_config['credential_username'],
                            target_config['credential_password'],
                            client_name,
                            target_config['host'],
                            domain=domain,
                            use_ntlm_v2=True,
                            sign_options=SMBConnection.SIGN_WHEN_SUPPORTED,
                            is_direct_tcp=True)
                        connected = conn.connect(target_config['host'],
                                                 445,
                                                 timeout=5)

                        if target_config['share_name'] not in (
                                s.name for s in conn.listShares(timeout=10)):
                            exit_error(
                                logger,
                                "Unable to find the specified share name on the server",
                                553952)
                        '''
						p445_error = repr(e445)
						try:
							# Try port 139 if that didn't work
							conn = SMBConnection(target_config['credential_username'], target_config['credential_password'], client_name, 
							target_config['host'], domain=domain, use_ntlm_v2=True,
							sign_options = SMBConnection.SIGN_WHEN_SUPPORTED) 
							connected = conn.connect(target_config['host'], 139, timeout=5)
						except BaseException as e139:
							p139_error = repr(e139)
							raise Exception("Errors connecting to host: \\nPort 139: %s\\nPort 445: %s" % (p139_error, p445_error))

						conn = SMBConnection(target_config['credential_username'], target_config['credential_password'], client_name, 
							target_config['host'], domain=domain, use_ntlm_v2=True,
							sign_options = SMBConnection.SIGN_WHEN_SUPPORTED) 
						connected = conn.connect(target_config['host'], 139)
						shares = 
						share_exists = False
						for i in range(len(shares)):
							if shares[i].name == target_config['share_name']:
								share_exists = True
								break
						'''
                    except BaseException as e:
                        exit_error(
                            logger,
                            "Unable to setup SMB connection: " + repr(e),
                            921982)
                else:
                    exit_error(logger, "Required settings not found", 101926)
            except BaseException as e:
                exit_error(logger,
                           "Error reading the configuration: " + repr(e),
                           230494)
        else:
            exit_error(logger,
                       "Could not find required configuration settings",
                       2823874)

        file_extensions = {
            'raw': '.log',
            'kv': '.log',
            'pipe': '.log',
            'csv': '.csv',
            'tsv': '.tsv',
            'json': '.json'
        }

        if self.outputformat is None:
            self.outputformat = 'csv'
        # Create the default filename
        default_filename = ('export_' + user + '___now__' +
                            file_extensions[self.outputformat]).strip("'")

        folder, filename = event_file.parse_outputfile(self.outputfile,
                                                       default_filename,
                                                       target_config)

        if self.compress is not None:
            logger.debug('Compression: %s', self.compress)
        else:
            try:
                self.compress = target_config.get('compress')
            except:
                self.compress = False

        staging_filename = 'export_everything_staging_' + random_number + '.txt'
        local_output_file = os.path.join(dispatch, staging_filename)
        if self.compress:
            local_output_file = local_output_file + '.gz'

        # Append .gz to the output file if compress=true
        if not self.compress and len(filename) > 3:
            if filename[-3:] == '.gz':
                # We have a .gz extension when compression was not specified. Enable compression.
                self.compress = True
        elif self.compress and len(filename) > 3:
            if filename[-3:] != '.gz':
                filename = filename + '.gz'

        if conn is not None:
            # Use the credential to connect to the SFTP server
            try:
                # Check to see if the folder exists
                folder_attrs = conn.getAttributes(target_config['share_name'],
                                                  folder,
                                                  timeout=10)
            except BaseException:
                # Remote directory could not be loaded. It must not exist. Create it.
                # Create the folders required to store the file
                subfolders = ['/'] + folder.strip('/').split('/')
                if '' in subfolders:
                    subfolders.remove('')
                logger.debug("Folders list for dir creation: %s" %
                             str(subfolders))
                current_folder = ''
                folder_depth = len(subfolders) - 1
                for i, subfolder_name in enumerate(subfolders):
                    current_folder = (current_folder + '/' +
                                      subfolder_name).replace('//', '/')
                    logger.debug("Current folder = " + current_folder)
                    try:
                        conn.getAttributes(target_config['share_name'],
                                           current_folder,
                                           timeout=10)
                    except:
                        conn.createDirectory(target_config['share_name'],
                                             current_folder,
                                             timeout=10)
                try:
                    folder_attrs = conn.getAttributes(
                        target_config['share_name'], folder, timeout=10)
                except BaseException as e:
                    exit_error(
                        logger, "Could not load or create remote directory: " +
                        repr(e), 377890)

            # This should always be true
            if folder_attrs is not None:
                if folder_attrs.isReadOnly or not folder_attrs.isDirectory:
                    exit_error(
                        logger,
                        "Could not access the remote directory: " + repr(e),
                        184772)
                else:
                    try:
                        event_counter = 0
                        # Write the output file to disk in the dispatch folder
                        logger.debug(
                            "Writing events to dispatch file. file=\"%s\" format=%s compress=%s fields=%s",
                            local_output_file, self.outputformat,
                            self.compress, self.fields)
                        for event in event_file.write_events_to_file(
                                events, self.fields, local_output_file,
                                self.outputformat, self.compress):
                            yield event
                            event_counter += 1
                    except BaseException as e:
                        exit_error(logger,
                                   "Error writing file to upload: " + repr(e),
                                   296733)

                    # Write the file to the remote location
                    try:
                        with open(local_output_file, 'rb',
                                  buffering=0) as local_file:
                            bytes_uploaded = conn.storeFile(
                                target_config['share_name'],
                                folder + '/' + filename, local_file)
                    except BaseException as e:
                        exit_error(
                            logger,
                            "Error uploading file to SMB server: " + repr(e),
                            109693)

                    if bytes_uploaded > 0:
                        message = "SMB Export Status: Success. File name: %s" % (
                            folder + '/' + filename)
                        eprint(message)
                        logger.info(message)
                    else:
                        exit_error(logger, "Zero bytes uploaded", 771293)
        else:
            exit_error(logger, "Could not connect to server.", 159528)
Exemplo n.º 15
0
# pyright: reportMissingImports=false
from splunk.clilib import cli_common as cli
import splunk.entity as en
import splunklib.client as client
from splunk.persistconn.application import PersistentServerConnectionApplication
from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path
import splunk.rest

from deductiv_helpers import setup_logger
from ep_helpers import get_config_from_alias, get_aws_s3_directory, get_box_directory, get_sftp_directory, get_smb_directory

config = cli.getConfStanza('ep_general', 'settings')
# Facility info - prepended to log lines
facility = os.path.basename(__file__)
facility = os.path.splitext(facility)[0]
logger = setup_logger(config["log_level"], 'export_everything.log', facility)
temp_dir = os.path.join(os.environ.get('SPLUNK_HOME'), 'etc', 'users',
                        'splunk-system-user', '.eptemp')
os.makedirs(temp_dir, exist_ok=True)
os.chdir(temp_dir)

app = 'export_everything'


def return_error(error_text):
    error_text = re.sub(r'Exception\(|\\|\'|"', '', error_text)
    error_text = re.sub(r'\(+', '(', error_text)
    error_text = re.sub(r'\)+', ')', error_text)
    return {'error': error_text, 'payload': error_text, 'status': 500}

Exemplo n.º 16
0
    def generate(self):
        try:
            cfg = cli.getConfStanza('kvstore_tools', 'settings')
        except BaseException as e:
            eprint("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(cfg["log_level"], 'kvstore_tools.log',
                                  facility)
        except BaseException as e:
            eprint("Could not create logger: " + repr(e))
            print("Could not create logger: " + repr(e))
            exit(1)

        logger.info('Script started by %s' %
                    self._metadata.searchinfo.username)

        batch_size = int(cfg.get('backup_batch_size'))
        logger.debug("Batch size: %d rows" % batch_size)

        local_session_key = self._metadata.searchinfo.session_key
        splunkd_uri = self._metadata.searchinfo.splunkd_uri

        # Check for permissions to run the command
        content = rest.simpleRequest(
            '/services/authentication/current-context?output_mode=json',
            sessionKey=local_session_key,
            method='GET')[1]
        content = json.loads(content)
        current_user = self._metadata.searchinfo.username
        current_user_capabilities = content['entry'][0]['content'][
            'capabilities']
        if 'run_kvstore_pull' in current_user_capabilities or 'run_kvst_all' in current_user_capabilities:
            logger.debug("User %s is authorized." % current_user)
        else:
            logger.error(
                "User %s is unauthorized. Has the run_kvstore_pull capability been granted?"
                % current_user)
            yield ({
                'Error':
                'User %s is unauthorized. Has the run_kvstore_pull capability been granted?'
                % current_user
            })
            sys.exit(3)

        # Sanitize input
        if self.app:
            logger.debug('App Context: %s' % self.app)
        else:
            self.app = None

        if self.collection:
            logger.debug('Collection: %s' % self.collection)
        else:
            self.collection = None

        if self.global_scope:
            logger.debug('Global Scope: %s' % self.global_scope)
        else:
            self.global_scope = False

        if self.append:
            logger.debug('Appending to existing collection')
        else:
            self.append = False
            logger.debug('Append to existing collection: %s' %
                         str(self.append))

        if self.targetport:
            logger.debug('Port for remote connect: %s' % self.targetport)
        else:
            self.targetport = '8089'

        # Get credentials
        try:
            # Use the credential where the realm matches the target hostname
            # Otherwise, use the last entry in the list
            credentials = kv.parse_custom_credentials(logger, cfg)
            try:
                credential = credentials[self.target]
            except:
                try:
                    hostname = self.target.split('.')[0]
                    credential = credentials[hostname]
                except:
                    logger.critical("Could not get password for %s: %s" %
                                    (self.target, repr(e)))
                    print("Could not get password for %s: %s" %
                          (self.target, repr(e)))
                    exit(1593)

            remote_user = credential['username']
            remote_password = credential['password']

        except BaseException as e:
            logger.critical(
                'Failed to get credentials for remote Splunk instance: %s' %
                repr(e),
                exc_info=True)
            yield ({
                'Error':
                'Failed to get credentials for remote Splunk instance: %s' %
                repr(e)
            })
            exit(7372)

        # Login to the remote host and get the session key
        try:
            remote_host = self.target
            remote_port = self.targetport
            remote_uri = 'https://%s:%s' % (self.target, self.targetport)

            remote_service = client.connect(host=remote_host,
                                            port=remote_port,
                                            username=remote_user,
                                            password=remote_password)
            remote_service.login()

            remote_session_key = remote_service.token.replace('Splunk ', '')
            logger.debug('Remote Session_key: %s' % remote_session_key)

        except (urllib.error.HTTPError, BaseException) as e:
            logger.exception('Failed to login on remote Splunk instance: %s' %
                             repr(e))
            yield ({
                'Error':
                'Failed to login on remote Splunk instance: %s' % repr(e)
            })
            sys.exit(4424)

        # Get the list of remote apps and collections
        remote_app_list = kv.get_server_apps(remote_uri, remote_session_key,
                                             self.app)
        remote_collection_list = kv.get_app_collections(
            remote_uri, remote_session_key, self.collection, self.app,
            remote_app_list, self.global_scope)
        logger.debug('Collections to pull: %s' % str(remote_collection_list))

        for remote_collection in remote_collection_list:
            # Extract the app and collection name from the array
            collection_app = remote_collection[0]
            collection_name = remote_collection[1]
            try:
                yield (kv.copy_collection(logger, remote_session_key,
                                          remote_uri, local_session_key,
                                          splunkd_uri, collection_app,
                                          collection_name, self.append))
            except BaseException as e:
                logger.critical(
                    'Failed to copy collections from %s to local KV store: %s'
                    % (self.target, repr(e)),
                    exc_info=True)
                yield ({
                    'Error':
                    'Failed to copy collections from %s to local KV store: %s'
                    % (self.target, repr(e))
                })
                sys.exit(11)
Exemplo n.º 17
0
    def stream(self, events):
        try:
            cfg = cli.getConfStanza('kvstore_tools', 'settings')
        except BaseException as e:
            eprint("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(cfg["log_level"], 'kvstore_tools.log',
                                  facility)
        except BaseException as e:
            eprint("Could not create logger: " + repr(e))
            print("Could not create logger: " + repr(e))
            exit(1)

        logger.info('Script started by %s' %
                    self._metadata.searchinfo.username)

        if self.app:
            logger.debug('App: %s' % self.app)
        else:
            self.app = self._metadata.searchinfo.app

        if self.collection:
            logger.debug('Collection: %s' % self.collection)
        else:
            logger.critical("No collection specified. Exiting.")
            print("Error: No collection specified.")
            exit(1)

        self.session_key = self._metadata.searchinfo.session_key
        self.splunkd_uri = self._metadata.searchinfo.splunkd_uri

        # Enumerate all app_list
        app_list = kv.get_server_apps(self.splunkd_uri, self.session_key,
                                      self.app)
        collection_list = kv.get_app_collections(self.splunkd_uri,
                                                 self.session_key,
                                                 self.collection, self.app,
                                                 app_list, True)

        logger.debug('Collections present: %s', str(collection_list))

        try:
            # Create an object for the collection
            collection_present = False
            for c in collection_list:
                # Extract the app and collection name from the array
                # c[0] = app, c[1] = collection name
                collection_app = c[0]
                collection_name = c[1]
                if (collection_name == self.collection):
                    if self.app is None or self.app == collection_app:
                        self.app = collection_app
                        collection_present = True
                    elif self.app != collection_app:
                        pass
                    logger.debug("Collection {0} found in app {1}".format(
                        self.collection, self.app))
            if not collection_present:
                logger.critical("KVStore collection %s/%s not found" %
                                (self.app, self.collection))
                exit(1)

        except BaseException as e:
            logger.critical('Error enumerating collections: %s' % repr(e))
            exit(1)

        # Make a Pool of workers
        pool = ThreadPool(4)

        try:
            results = pool.map(self.delete_key_from_event, events)
        except BaseException as e:
            logger.error("%s" % repr(e), exc_info=True)
            results = {}

        for result in results:
            yield result
Exemplo n.º 18
0
	def generate(self):
		try:
			cfg = cli.getConfStanza('kvstore_tools','settings')
		except BaseException as e:
			eprint("Could not read configuration: " + repr(e))
		
		# Facility info - prepended to log lines
		facility = os.path.basename(__file__)
		facility = os.path.splitext(facility)[0]
		try:
			logger = setup_logger(cfg["log_level"], 'kvstore_tools.log', facility)
		except BaseException as e:
			eprint("Could not create logger: " + repr(e))
			exit(1)

		logger.info('Script started by %s' % self._metadata.searchinfo.username)

		batch_size = int(cfg.get('backup_batch_size'))
		logger.debug("Batch size: %d rows" % batch_size)
		session_key = self._metadata.searchinfo.session_key
		splunkd_uri = self._metadata.searchinfo.splunkd_uri

		# Check for permissions to run the command
		content = rest.simpleRequest('/services/authentication/current-context?output_mode=json', sessionKey=session_key, method='GET')[1]
		content = json.loads(content)
		current_user = self._metadata.searchinfo.username
		current_user_capabilities = content['entry'][0]['content']['capabilities']
		if 'run_kvstore_backup' in current_user_capabilities or 'run_kvst_all' in current_user_capabilities:
			logger.debug("User %s is authorized." % current_user)
		else:
			logger.error("User %s is unauthorized. Has the run_kvstore_backup capability been granted?" % current_user)
			yield({'Error': 'User %s is unauthorized. Has the run_kvstore_backup capability been granted?' % current_user })
			sys.exit(3)

		# Sanitize input
		if self.app:
			logger.debug('App Context: %s' % self.app)
		else:
			self.app = None

		if self.path:
			pass
		else:
			# Get path from configuration
			try:
				# Break path out and re-join it so it's OS independent
				default_path = cfg.get('default_path').split('/')
				self.path = os.path.abspath(os.path.join(os.sep, *default_path))
			except:
				logger.critical("Unable to get backup path")
				yield({'Error': "Path not provided in search arguments and default path is not set."})
				sys.exit(1)

		# Replace environment variables
		self.path = os.path.expandvars(self.path)
		self.path = self.path.replace('//', '/')
		logger.debug('Backup path: %s' % self.path)
		if not os.path.isdir(self.path):
			logger.critical("Path does not exist: {0}".format(self.path))
			yield({'Error': "Path does not exist: {0}".format(self.path)})
			sys.exit(1)

		if self.collection:
			logger.debug('Collection: %s' % self.collection)
		else:
			self.collection = None

		if self.global_scope:
			logger.debug('Global Scope: %s' % self.global_scope)
		else:
			self.global_scope = False

		if self.compression or self.compression == False:
			logger.debug('Compression: %s' % self.compression)
		else:
			try:
				self.compression = cfg.get('compression')
			except:
				self.compression = False

		app_list = kv.get_server_apps(splunkd_uri, session_key, self.app)
		logger.debug("Apps list: %s" % str(app_list))
		collection_list = kv.get_app_collections(splunkd_uri, session_key, self.collection, self.app, app_list, self.global_scope)

		logger.info('Collections to backup: %s', str(collection_list))

		for collection in collection_list:
			# Extract the app and collection name from the array
			entry_app = collection[0]
			collection_name = collection[1]

			ts = time.time()
			st = datetime.fromtimestamp(ts).strftime('%Y%m%d_%H%M%S')
			#maxrows = int(limits_cfg.get('max_rows_per_query'))

			# Set the filename and location for the output (expanding environment variables)
			output_filename = entry_app + "#" + collection_name + "#" + st + ".json"
			if self.compression:
				output_filename = output_filename + '.gz'
			output_file = os.path.join(self.path, output_filename)

			# Download the collection to a local file
			result, message, total_record_count = kv.download_collection(logger, splunkd_uri, session_key, entry_app, collection_name, output_file, self.compression)
			logger.debug("Retrieved {0} records from {1}".format(total_record_count, collection_name))
			yield {'_time': time.time(), 'app': entry_app, 'collection': collection_name, 'result': result, 'records': total_record_count, 'message': message, 'file': output_file }

		# Execute retention routine
		max_age = 0
		max_age = int(cfg.get('retention_days'))
		max_size = 0
		max_size = int(cfg.get('retention_size')) * 1024 * 1024

		if max_size > 0 or max_age > 0:
			# Check the size of all *.json and *.json.gz files in the directory
			#dir = self.path
			pattern = os.path.join(self.path, "*#*#*.json*")

			# Get a listing of the files and check the file sizes
			backup_file_list = glob.glob(pattern)

			# Sort descending based on file timestamp
			backup_file_list.sort(key=os.path.getmtime, reverse=True)

			# Count the total bytes in all of the files
			totalbytes = 0
			logger.debug("Max age (days): %s / Max size: %s" % (max_age, max_size))
			
			for f in backup_file_list:
				logger.debug("File %s", f)

				# Get the file size (bytes) and age (days)
				bytes = os.path.getsize(f)
				age_days = old_div((time.time() - os.stat(f)[stat.ST_MTIME]), 86400)
				logger.debug("Age (days): %d", age_days)

				# increment the total byte count
				totalbytes += bytes

				if totalbytes > max_size and max_size > 0:
					# Delete the files
					logger.debug("Total bytes ({0}) > max_size ({1})".format(totalbytes, max_size))
					os.remove(f)
					logger.info("Deleted file due to size retention policy: %s" % f)

				elif age_days > max_age and max_age > 0:
					logger.debug("Age ({0}) > max_age ({1})".format(age_days, max_age))
					os.remove(f)
					logger.info("Deleted file due to age retention policy: %s" % f)
Exemplo n.º 19
0
import kv_common as kv
from deductiv_helpers import eprint, request, str2bool, setup_logger, read_config

# Examples:
# http://docs.splunk.com/Documentation/Splunk/6.5.0/AdvancedDev/CustomAlertKVStoreExample

print(sys.argv)
if len(sys.argv) > 1:
    if sys.argv[1] == "--execute":
        payload = json.loads(sys.stdin.read())

        # Build the logger object based on the config file setting for log_level
        config = read_config('kvstore_tools.conf')
        log_level = config['settings']['log_level']
        facility = os.path.splitext(os.path.basename(__file__))[0]
        logger = setup_logger(log_level, 'kvstore_tools.log', facility)

        # Get the stdin payload
        alert_config = payload.get('configuration', dict())
        # Get the app / collection name supplied by the user/search
        app = urllib.parse.quote(
            alert_config.get('app') if 'app' in
            alert_config else payload.get('app'))
        collection = alert_config.get('collection')

        # Build the URL for the Splunkd REST endpoint
        url_tmpl_batch = '%(server_uri)s/servicesNS/%(owner)s/%(app)s/storage/collections/data/%(collection)s/batch_save?output_mode=json'
        record_url = url_tmpl_batch % dict(
            server_uri=payload.get('server_uri'),
            owner='nobody',
            app=urllib.parse.quote(
Exemplo n.º 20
0
    def stream(self, events):
        try:
            cfg = cli.getConfStanza('kvstore_tools', 'settings')
        except BaseException as e:
            eprint("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(cfg["log_level"], 'kvstore_tools.log',
                                  facility)
        except BaseException as e:
            eprint("Could not create logger: " + repr(e))
            print("Could not create logger: " + repr(e))
            exit(1)

        logger.info('Script started by %s' %
                    self._metadata.searchinfo.username)

        if self.app:
            logger.debug('App: %s' % self.app)
        else:
            self.app = self._metadata.searchinfo.app

        if self.collection:
            logger.debug('Collection: %s' % self.collection)
        else:
            logger.critical("No collection specified. Exiting.")
            print("Error: No collection specified.")
            exit(1)

        if self.outputkeyfield:
            logger.debug('Output Key Field: %s' % self.outputkeyfield)
        else:
            self.outputkeyfield = self.collection + "_key"

        if self.outputvalues:
            logger.debug('Output Values: %s' % self.outputvalues)
        else:
            self.outputvalues = ""

        if self.delimiter:
            logger.debug('Delimiter: %s' % self.delimiter)
        else:
            self.delimiter = ","

        if self.groupby:
            logger.debug('Group by field: %s' % self.groupby)
        else:
            self.groupby = None

        opts = {}
        opts["owner"] = "nobody"
        opts["token"] = self._metadata.searchinfo.session_key
        opts["app"] = self.app

        #epoch_time = int(time.time())
        current_user = self._metadata.searchinfo.username

        lookup_output_kvpairs = []

        # Static output fields are literal values that are given within the search command arguments
        # e.g. "lookup_field1=value1"
        static_output_fields = {}
        # variable output fields are values taken from the events and pushed into the lookup record
        # as events are processed
        # e.g. "lookup_field2=$sourcetype$"
        variable_output_fields = {}
        resolved_variables = {}

        # Check for lockfile from previous invocations for this search ID
        dispatch = self._metadata.searchinfo.dispatch_dir
        static_kvfields_file = os.path.join(dispatch, "kvfields_static")  #dict
        variable_kvfields_file = os.path.join(dispatch,
                                              "kvfields_variable")  #dict
        resolved_variables_file = os.path.join(dispatch,
                                               "resolved_variables")  #dict

        try:
            if os.path.isfile(static_kvfields_file):
                with open(static_kvfields_file, 'r') as f:
                    # Set static kvfields values
                    static_output_fields = json.loads(f.read())  #dict
            if os.path.isfile(variable_kvfields_file):
                with open(variable_kvfields_file, 'r') as f:
                    # Set variable kvfields values
                    variable_output_fields = json.loads(f.read())  #dict

            # Connect to the kv store
            service = connect(**opts)
            if self.collection in service.kvstore:
                obj_collection = service.kvstore[self.collection]
            else:
                logger.critical("KVStore not found: %s" % self.collection)
                print('KVStore not found: %s' % self.collection)
                exit(1)

            # First invocation - build the lists for static and variable values
            if static_output_fields == {} and variable_output_fields == {}:

                # Split the key-value pairs argument into individual key-value pairs
                # Account for quoted string values and delimiters within the quoted value
                kvpair_split_re = r'([^=]+=(?:"[^"\\]*(?:\\.[^"\\]*)*"|[^{}]+))'.format(
                    self.delimiter)
                x = re.findall(kvpair_split_re, self.outputvalues)
                for i in x:
                    i = i.strip(self.delimiter).strip()
                    lookup_output_kvpairs.append(i)

                for lof in lookup_output_kvpairs:
                    k, v = lof.split("=")
                    k = k.strip()
                    v = v.strip().strip('"').replace('\\"', '"')
                    logger.debug("k = %s, v = %s" % (k, v))

                    # Replace special values
                    v = v.replace("$kv_current_userid$", current_user)
                    v = v.replace("$kv_now$", str(time.time()))

                    # Value starts and ends with $ - variable field
                    if v[0] + v[-1] == '$$':
                        # Add to the list of variable fields
                        variable_output_fields[k] = v.replace("$", "")
                    else:
                        # Add to the list of static fields
                        static_output_fields[k] = v
                logger.info(
                    "Unpacked %d static and %d variable fields from arguments"
                    % (len(list(static_output_fields.keys())),
                       len(list(variable_output_fields.keys()))))

                # Write the static payload to the file
                # File doesn't exist. Open/claim it.
                with open(static_kvfields_file, 'w') as f:
                    f.write(
                        json.dumps(static_output_fields, ensure_ascii=False))
                with open(variable_kvfields_file, 'w') as f:
                    f.write(
                        json.dumps(variable_output_fields, ensure_ascii=False))

        except BaseException as e:
            logger.critical('Error connecting to collection: %s' % repr(e),
                            exc_info=True)
            print('Error connecting to collection: %s' % repr(e))
            exit(1)

        # Read the events, resolve the variables, store them on a per-groupby-fieldvalue basis
        i = 0
        inserts = 0
        for e in events:
            update = False
            # (Re)read the latest data
            if os.path.isfile(resolved_variables_file):
                with open(resolved_variables_file, 'r') as f:
                    # Open in non-blocking mode
                    fd = f.fileno()
                    flag = fcntl.fcntl(fd, fcntl.F_GETFL)
                    fcntl.fcntl(fd, fcntl.F_SETFL, flag | os.O_NONBLOCK)
                    # Set static kvfields values
                    resolved_variables = json.loads(
                        f.read())  #dict [groupby value][field name]
            if self.groupby is not None:
                groupby_value = e[self.groupby]
            else:
                # Make this value the same for every event (no group-by)
                groupby_value = '____placeholder'

            new_kv_record = {}
            if groupby_value in list(resolved_variables.keys()):
                # Set the previously recorded key value for this group-by value within the event
                kvstore_entry_key = resolved_variables[groupby_value]["_key"]

                # We've already resolved the variables for this groupby, but see if any are not populated
                for lookup_field, event_field in list(
                        variable_output_fields.items()):
                    if lookup_field not in list(
                            resolved_variables[groupby_value].keys()):
                        if event_field in list(e.keys()):
                            if e[event_field] is not None and e[
                                    event_field] != '':
                                resolved_variables[groupby_value][
                                    lookup_field] = e[event_field]
                                new_kv_record[lookup_field] = e[event_field]
                                update = True
                if update:
                    # Update the collection
                    new_kv_record.update(static_output_fields)
                    response = obj_collection.data.update(
                        kvstore_entry_key, json.dumps(new_kv_record))

                    # Write the data to disk immediately so other threads can benefit
                    with open(resolved_variables_file, 'w') as f:
                        f.write(
                            json.dumps(resolved_variables, ensure_ascii=False))

            else:
                # First time we're seeing this groupby value. Resolve variables and write the KV store record.
                # Define the dictionary
                resolved_variables[groupby_value] = {}
                # Update the static values
                new_kv_record = static_output_fields.copy()

                # Resolve the variables
                for lookup_field, event_field in list(
                        variable_output_fields.items()):
                    if event_field in list(e.keys()):
                        if e[event_field] is not None:
                            resolved_variables[groupby_value][
                                lookup_field] = e[event_field]
                            new_kv_record[lookup_field] = e[event_field]

                # Write the new kvstore record and get the ID (_key)
                response = obj_collection.data.insert(
                    json.dumps(new_kv_record))
                kvstore_entry_key = response["_key"]
                resolved_variables[groupby_value]["_key"] = kvstore_entry_key

                # Write the data to disk immediately so other threads can benefit
                with open(resolved_variables_file, 'w') as f:
                    f.write(json.dumps(resolved_variables, ensure_ascii=False))
                    inserts += 1

            # Write the KV store record's _key value to the event
            e[self.outputkeyfield] = kvstore_entry_key

            yield e
            i += 1
        logger.info("Modified %d events and inserted %s new records into %s" %
                    (i, inserts, self.collection))
Exemplo n.º 21
0
    def reduce(self, events):
        try:
            app_config = cli.getConfStanza('ep_general', 'settings')
            cmd_config = cli.getConfStanzas('ep_aws_s3')
        except BaseException as e:
            raise Exception("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(app_config["log_level"],
                                  'export_everything.log', facility)
        except BaseException as e:
            raise Exception("Could not create logger: " + repr(e))

        logger.info('AWS S3 Export search command initiated')
        logger.debug("Configuration: " + str(cmd_config))
        logger.debug('search_ep_awss3 command: %s', self)  # logs command line

        # Enumerate settings
        app = self._metadata.searchinfo.app
        user = self._metadata.searchinfo.username
        dispatch = self._metadata.searchinfo.dispatch_dir
        session_key = self._metadata.searchinfo.session_key

        if self.target is None and 'target=' in str(self):
            recover_parameters(self)
        # Replace all tokenized parameter strings
        replace_object_tokens(self)

        # Build the configuration
        try:
            aws_config = get_config_from_alias(session_key, cmd_config,
                                               self.target)
            if aws_config is None:
                exit_error(
                    logger,
                    "Unable to find target configuration (%s)." % self.target,
                    100937)
            logger.debug("Target configuration: " + str(aws_config))
        except BaseException as e:
            exit_error(logger,
                       "Error reading target server configuration: " + repr(e),
                       124812)

        if self.bucket is None:
            if 'default_s3_bucket' in list(aws_config.keys()):
                t = aws_config['default_s3_bucket']
                if t is not None and len(t) > 0:
                    self.bucket = t
                else:
                    exit_error(logger, "No bucket specified", 4)
            else:
                exit_error(logger, "No bucket specified", 5)

        file_extensions = {
            'raw': '.log',
            'kv': '.log',
            'pipe': '.log',
            'csv': '.csv',
            'tsv': '.tsv',
            'json': '.json'
        }

        if self.outputformat is None:
            self.outputformat = 'csv'

        if self.outputfile is None:
            # Boto is special. We need repr to give it the encoding it expects to match the hashing.
            self.outputfile = repr('export_' + user + '___now__' +
                                   file_extensions[self.outputformat]).strip(
                                       "'")

        # Replace keywords from output filename
        self.outputfile = replace_keywords(self.outputfile)

        if self.compress is not None:
            logger.debug('Compression: %s', self.compress)
        else:
            try:
                self.compress = str2bool(aws_config['compress'])
            except:
                self.compress = False

        # Use the random number to support running multiple outputs in a single search
        random_number = str(random.randint(10000, 100000))
        staging_filename = 'export_everything_staging_' + random_number + '.txt'
        local_output_file = os.path.join(dispatch, staging_filename)

        # Append .gz to the output file if compress=true
        if not self.compress and len(self.outputfile) > 3:
            # We have a .gz extension when compression was not specified. Enable compression.
            if self.outputfile[-3:] == '.gz':
                self.compress = True
        elif self.compress and len(self.outputfile) > 3:
            if self.outputfile[-3:] != '.gz':
                self.outputfile = self.outputfile + '.gz'

        if self.compress:
            local_output_file = local_output_file + '.gz'

        logger.debug("Staging file: %s" % local_output_file)
        try:
            s3 = get_aws_connection(aws_config)
        except BaseException as e:
            exit_error(logger, "Could not connect to AWS: " + repr(e), 741423)

        event_counter = 0
        # Write the output file to disk in the dispatch folder
        logger.debug(
            "Writing events to file %s in %s format. Compress=%s\n\tfields=%s",
            local_output_file, self.outputformat, self.compress, self.fields)
        for event in event_file.write_events_to_file(events, self.fields,
                                                     local_output_file,
                                                     self.outputformat,
                                                     self.compress):
            yield event
            event_counter += 1

        # Upload file to s3
        try:
            with open(local_output_file, "rb") as f:
                s3.upload_fileobj(f, self.bucket, self.outputfile)
            s3 = None
            sts_client = None
            logger.info(
                "Successfully exported events to s3. app=%s count=%s bucket=%s file=%s user=%s"
                % (app, event_counter, self.bucket, self.outputfile, user))
            os.remove(local_output_file)
        except s3.exceptions.NoSuchBucket as e:
            exit_error(logger, "Error: No such bucket", 123833)
        except BaseException as e:
            exit_error(logger, "Could not upload file to S3: " + repr(e), 9)