Example #1
0
    def get_template_file(self, template_file_name):

        self.log.debug("Parsed template file from settings: {}".format(
            template_file_name))

        local_file = os.path.join(util.get_apps_dir(), "alert_manager",
                                  "local", "templates", template_file_name)
        default_file = os.path.join(util.get_apps_dir(), "alert_manager",
                                    "default", "templates", template_file_name)

        if os.path.isfile(local_file):
            self.log.debug("{} exists in local, using this one...".format(
                template_file_name))
            return local_file
        else:
            self.log.debug(
                "{} not found in local folder, checking if there's one in default..."
                .format(template_file_name))
            if os.path.isfile(default_file):
                self.log.debug(
                    "{} exists in default, using this one...".format(
                        template_file_name))
                return default_file
            else:
                self.log.debug(
                    "{} doesn't exist at all, stopping here.".format(
                        template_file_name))
                return False
Example #2
0
def get_internal_shared_key():
    # create the shared auth key and save it to a file for IPC with splunkd REST handlers
    shared_key_file = os.path.join(util.get_apps_dir(), 'splunk_app_stream',
                                   'local', 'stream_shared_key')
    auth_key = ""
    try:
        shared_key_file_mtime = int(os.stat(shared_key_file).st_mtime)
    except Exception as e:
        shared_key_file_mtime = 0
    now = int(time.time())
    if shared_key_file_mtime + 86400 < now:
        auth_key = str(uuid.uuid4())
        # save it to shared file
        try:
            base_local_dir = os.path.join(util.get_apps_dir(),
                                          'splunk_app_stream', 'local')
            if not os.path.exists(base_local_dir):
                createDir(base_local_dir + os.sep)
            f = open(shared_key_file, 'w+')
            f.write(auth_key)
            f.close()
        except:
            logger.error('Unable to create the shared key file')
    else:
        auth_key = open(shared_key_file, 'r').read()

    return auth_key
Example #3
0
    def __init__(self, sessionKey):
        self.sessionKey = sessionKey

        # Setup template paths
        local_dir = os.path.join(util.get_apps_dir(), "alert_manager",
                                 "default", "templates")
        default_dir = os.path.join(util.get_apps_dir(), "alert_manager",
                                   "local", "templates")
        loader = FileSystemLoader([default_dir, local_dir])
        self.env = Environment(loader=loader,
                               variable_start_string='$',
                               variable_end_string='$')

        # TODO: Add support for custom filters
        self.env.filters['get_type'] = get_type

        # Get mailserver settings from splunk
        uri = '/servicesNS/nobody/system/configs/conf-alert_actions/email?output_mode=json'
        serverResponse, serverContent = rest.simpleRequest(
            uri, sessionKey=self.sessionKey)
        server_settings = json.loads(serverContent.decode('utf-8'))
        server_settings = server_settings["entry"][0]["content"]
        #self.log.debug("server settings from splunk: {}".format(json.dumps(server_settings)))

        self.default_sender = server_settings['from']

        use_ssl = False
        if server_settings['use_ssl']:
            use_ssl = True

        use_tls = False
        if server_settings['use_tls']:
            use_tls = True

        # Configure django settings
        clear_pass = ''
        if 'clear_password' in server_settings:
            clear_pass = server_settings['clear_password']

        auth_username = ""
        if 'auth_username' in server_settings:
            auth_username = server_settings['auth_username']

        mail_server = "localhost"
        if 'mailserver' in server_settings:
            mail_server = server_settings['mailserver']

        self.settings = {
            "MAIL_SERVER": mail_server,
            "EMAIL_HOST_USER": auth_username,
            "EMAIL_HOST_PASSWORD": clear_pass,
            "EMAIL_USE_TLS": use_tls,
            "EMAIL_USE_SSL": use_ssl
        }
Example #4
0
    def add_browser_driver_to_path(cls, logger=None):

        driver_path = None

        if sys.platform == "linux2":
            driver_path = "linux64"
        else:
            # Note that Windows will always return win32 (even on 64-bit hosts)
            # See http://bit.ly/2Dq6xM5
            driver_path = sys.platform

        full_driver_path = os.path.join(get_apps_dir(), "website_input", "bin",
                                        "browser_drivers", driver_path)

        if not full_driver_path in os.environ["PATH"]:

            # Use the correct path separator per the platform
            # https://lukemurphey.net/issues/1782
            if os.name == 'nt':
                os.environ["PATH"] += ";" + full_driver_path
            else:
                os.environ["PATH"] += ":" + full_driver_path

            if logger:
                logger.debug(
                    "Updating path to include selenium driver path=%s, working_path=%s",
                    full_driver_path, os.getcwd())
Example #5
0
 def read_lookup(self, lookup_filename, primary_key=None):
     try:
         lookups_dir = os.path.join(get_apps_dir(), self._app_name,
                                    "lookups")
         lookup_file = os.path.join(lookups_dir, lookup_filename)
         lookup_dict = {"lookup": [], "primary_keys": []}
         if not os.path.isfile(lookup_file):
             return lookup_dict
         with open(lookup_file, mode='r') as infile:
             reader = csv.DictReader(infile)
             for row in reader:
                 if primary_key is not None and primary_key in row:
                     lookup_dict["primary_keys"].append(row[primary_key])
                 lookup_dict["lookup"].append(row)
         return lookup_dict
     except Exception as e:
         exc_type, exc_obj, exc_tb = sys.exc_info()
         fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
         jsondump = {
             "message": str((e)),
             "exception_type": "%s" % type(e),
             "exception_arguments": "%s" % e,
             "filename": fname,
             "line": exc_tb.tb_lineno
         }
         raise Exception(json.dumps(jsondump))
Example #6
0
    def save_categories(self, app, action, **params):
        user = cherrypy.session['user']['name']
        host_app = cherrypy.request.path_info.split('/')[3]
        this_app = App.get(App.build_id(host_app, host_app, user))

        for param in params:
            data = json.loads(param)

        csvData = []
        csvOrder = [1,2,0]

        #logger.error('before: %s' % data)
        self.tree_to_csv(csvData, data, csvOrder, [0,0,0])
        csvHeader = ["host", "unix_category", "unix_group"]
        csvData.insert(0, csvHeader)
        #logger.error('after: %s' % csvData)

        dropdownsCsv = os.path.join(util.get_apps_dir(), 'splunk_app_for_nix', 'lookups', 'dropdowns.csv')
        with open(dropdownsCsv, 'wb') as csvfile:
             writer = csv.writer(csvfile)
             writer.writerows(csvData)

        session_key = cherrypy.session.get('sessionKey')
        success, errcode, reason = self._force_lookup_replication('splunk_app_for_nix', 'dropdowns.csv', session_key)
        logger.info('force lookup replication: %s, %s, %s' % (success, errcode, reason))
Example #7
0
    def show_categories(self, app, action, **params):
        ''' render the unix categories page '''

        host_app = cherrypy.request.path_info.split('/')[3]

        csvData = []
        lookupCSV = os.path.join(util.get_apps_dir(), 'SA-nix', 'lookups',
                                 'dropdowns.csv')
        with open(lookupCSV, 'rb') as csvfile:
            reader = csv.reader(csvfile)
            for row in reader:
                if len(row) == 3:
                    csvData.append(row)

        csvKey = csvData[0]
        keyOrder = ['unix_category', 'unix_group', 'host']
        order = self.getOrder(keyOrder, csvKey)
        csvData = csvData[1:len(csvData)]

        tree = {}
        self.build_tree(csvData, tree, order)
        tree = json.dumps(tree)


        return self.render_template('/%s:/templates/unixSetup/setup_categories.html' \
                                    % host_app,
                                    dict(host_app=host_app, app=app, csvData=csvData, csvKey=csvKey, tree=tree))
Example #8
0
def setupLogger(logger='freezer_inventory'):

    # logger
    fileName = 'freezer_inventory.log'
    if logger != 'freezer_inventory':
        fileName = 'freezer_inventory_%s.log' % logger
        logger = 'freezer_inventory_%s' % logger

    # Get loglevel from config file
    local = os.path.join(util.get_apps_dir(), "FreezerInventoryAppForSplunk",
                         "local", "freezer_inventory.conf")
    default = os.path.join(util.get_apps_dir(), "FreezerInventoryAppForSplunk",
                           "default", "freezer_inventory.conf")

    config = ConfigParser.ConfigParser()

    try:
        config.read(local)
        rootLevel = config.get('logging', 'rootLevel')
    except:
        config.read(default)
        rootLevel = config.get('logging', 'rootLevel')

    try:
        logLevel = config.get('logging', 'logger.%s' % logger)
    except:
        logLevel = rootLevel

    # Setup logger
    log = logging.getLogger(logger)
    lf = os.path.join(os.environ.get('SPLUNK_HOME'), "var", "log", "splunk",
                      fileName)
    fh = logging.handlers.RotatingFileHandler(lf,
                                              maxBytes=25000000,
                                              backupCount=5)
    formatter = logging.Formatter(
        '%(asctime)s %(levelname)-6s pid="%(process)s" logger="%(name)s" message="%(message)s" (%(filename)s:%(lineno)s)'
    )
    formatter.converter = time.gmtime
    fh.setFormatter(formatter)
    log.addHandler(fh)
    level = logging.getLevelName(logLevel)
    log.setLevel(level)

    return log
Example #9
0
def getLookupFile(lookup_name, sessionKey):
    uri = '/servicesNS/nobody/alert_manager/data/transforms/lookups/%s' % lookup_name
    lookup = getRestData(uri, sessionKey)
    #log.debug("getLookupFile(): lookup: %s" % json.dumps(lookup))
    log.debug("Got lookup content for lookup=%s. filename=%s app=%s" %
              (lookup_name, lookup["entry"][0]["content"]["filename"],
               lookup["entry"][0]["acl"]["app"]))
    return os.path.join(util.get_apps_dir(), lookup["entry"][0]["acl"]["app"],
                        'lookups', lookup["entry"][0]["content"]["filename"])
Example #10
0
def getLookupFile(lookup_name, sessionKey):
    try:
        uri = '/servicesNS/nobody/alert_manager/data/transforms/lookups/%s?output_mode=json' % lookup_name
        serverResponse, serverContent = rest.simpleRequest(uri, sessionKey=sessionKey)
        lookup = json.loads(serverContent)
        log.debug("Got lookup content for lookup=%s. filename=%s app=%s" % (lookup_name, lookup["entry"][0]["content"]["filename"], lookup["entry"][0]["acl"]["app"]))
        return os.path.join(util.get_apps_dir(), lookup["entry"][0]["acl"]["app"], 'lookups', lookup["entry"][0]["content"]["filename"])
    except Exception as e:
        exc_type, exc_obj, exc_tb = sys.exc_info()
        log.warn("Unable to get lookup %s. Reason: %s. Line: %s" % (lookup_name, config['default_priority'], exc_type, exc_tb.tb_lineno))
        return ""
    def _get_email_template_files(self, sessionKey, query_params):
        logger.debug("START _get_email_template_files()")

        file_list = []

        file_default_dir = os.path.join(util.get_apps_dir(), "alert_manager", "default", "templates")
        if os.path.exists(file_default_dir):
            for f in os.listdir(file_default_dir):
                if re.match(r'.*\.html', f):
                    if f not in file_list:
                        file_list.append(f)

        file_local_dir = os.path.join(util.get_apps_dir(), "alert_manager", "local", "templates")
        if os.path.exists(file_local_dir):
            for f in os.listdir(file_local_dir):
                if re.match(r'.*\.html', f):
                    if f not in file_list:
                        file_list.append(f)

        return self.response(file_list, httplib.OK)
    def _get_email_template_files(self, sessionKey, query_params):
        logger.debug("START _get_email_template_files()")

        file_list = []

        file_default_dir = os.path.join(util.get_apps_dir(), "alert_manager",
                                        "default", "templates")
        if os.path.exists(file_default_dir):
            for f in os.listdir(file_default_dir):
                if re.match(r'.*\.html', f):
                    if f not in file_list:
                        file_list.append(f)

        file_local_dir = os.path.join(util.get_apps_dir(), "alert_manager",
                                      "local", "templates")
        if os.path.exists(file_local_dir):
            for f in os.listdir(file_local_dir):
                if re.match(r'.*\.html', f):
                    if f not in file_list:
                        file_list.append(f)

        return self.response(file_list, http.client.OK)
def setupLogger(logger='alert_manager'):

	# logger
	fileName = 'alert_manager.log'
	if logger != 'alert_manager':
		fileName = 'alert_manager_%s.log' % logger
		logger = 'alert_manager_%s' % logger

	# Get loglevel from config file
	local = os.path.join(util.get_apps_dir(), "alert_manager", "local", "alert_manager.conf")
	default = os.path.join(util.get_apps_dir(), "alert_manager", "default", "alert_manager.conf")

	config = ConfigParser.ConfigParser()

	try:
		config.read(local)
		rootLevel = config.get('logging', 'rootLevel')
	except:
		config.read(default)
		rootLevel = config.get('logging', 'rootLevel')

	try:
		logLevel = config.get('logging', 'logger.%s' % logger)
	except:
		logLevel = rootLevel

	# Setup logger
	log = logging.getLogger(logger)
	lf = os.path.join(os.environ.get('SPLUNK_HOME'), "var", "log", "splunk", fileName)
	fh = logging.handlers.RotatingFileHandler(lf, maxBytes=25000000, backupCount=5)
	formatter = logging.Formatter('%(asctime)s %(levelname)-6s pid="%(process)s" logger="%(name)s" message="%(message)s" (%(filename)s:%(lineno)s)')
	fh.setFormatter(formatter)
	log.addHandler(fh)
	level = logging.getLevelName(logLevel)
	log.setLevel(level)

	return log
Example #14
0
    def __init__(self, file_path='', lookup_name='', sessionKey=''):
        # Reset on init to avoid strange caching effects
        self.csv_data = []

        log.debug("file_path: '%s', lookup_name: '%s'" %
                  (file_path, lookup_name))

        if file_path == '':
            if lookup_name == '':
                raise Exception("No file_path or lookup_name specified.")
            else:
                if sessionKey == '':
                    raise Exception(
                        "No sessionKey provided, unable to query REST API.")
                else:
                    # Get csv name from API
                    uri = '/servicesNS/nobody/alert_manager/data/transforms/lookups/%s' % lookup_name
                    serverResponse, serverContent = rest.simpleRequest(
                        uri,
                        sessionKey=sessionKey,
                        method='GET',
                        getargs={'output_mode': 'json'})
                    try:
                        lookup = json.loads(serverContent)
                        file_path = os.path.join(
                            util.get_apps_dir(),
                            lookup["entry"][0]["acl"]["app"], 'lookups',
                            lookup["entry"][0]["content"]["filename"])
                        log.debug(
                            "Got file_path=%s from REST API for lookup_name=%s"
                            % (file_path, lookup_name))
                    except:
                        log.error("Unable to retrieve lookup.")
                        raise Exception("Unable to retrieve lookup.")
        else:
            log.debug("file_path=%s is set, don't have to query the API." %
                      file_path)

        if not os.path.exists(file_path):
            log.error("Wasn't able to find file_path=%s, aborting." %
                      file_path)
            raise Exception("File %s not found." % file_path)

        else:
            with open(file_path) as fh:
                reader = csv.DictReader(fh)

                for row in reader:
                    self.csv_data.append(row)
Example #15
0
    def generateResults(self, **kwargs):

        app_name = kwargs.get('client_app', STATIC_APP)
        legacy_setup = os.path.join(app_util.get_apps_dir(), app_name,
                                    'default', 'setup.xml')
                                    
        if os.path.exists(legacy_setup):
            shutil.move(legacy_setup, legacy_setup + '.bak')
            logger.info('disabled legacy setup.xml for %s' % app_name)
            
        for app in App.all():
            if app.name in CONFLICT_APPS and not app.is_disabled:
                return self.render_json({'is_conflict': True, 'app_label': app.label})

        return self.render_json({})
Example #16
0
    def verify_input_exists(self, host_app, user):
    
        name = os.path.join(apps_util.get_apps_dir(), host_app, 
                            'bin', 'scripted_inputs', 'dm_backfill_factory.py')

        try:
            scripted = ScriptedInput.get(ScriptedInput.build_id(name, host_app, user))
            scripted.disabled = False
        except:
            scripted = ScriptedInput(host_app, user, name)
            scripted.pass_auth = 'admin'
            scripted.disabled = False
            scripted.interval = 15

        if not scripted.passive_save():
            logger.error(scripted.errors[0])
            return False

        return True 
    def generateResults(self, **kwargs):

        if not (sys.platform.startswith("win")):
            return self.render_json({'is_windows': False})

        app_name = kwargs.get('client_app', STATIC_APP)
        app_dir = os.path.join(app_util.get_apps_dir(), app_name)

        legacy_js = os.path.join(app_dir, 'appserver', 'static', 'application.js')
        legacy_handler = os.path.join(app_dir, 'bin', 'setuphandler.py')
        legacy_restmap = os.path.join(app_dir, 'default', 'restmap.conf')
        legacy_setup = os.path.join(app_dir, 'default', 'setup.xml')
                                    
        for legacy in [legacy_js, legacy_handler, legacy_restmap, legacy_setup]:
            if os.path.exists(legacy):
                shutil.move(legacy, legacy + '.bak')
                logger.info('disabled legacy setup component %s for app %s' % (legacy, app_name))

        return self.render_json({})
Example #18
0
    def load_categories(self, app, action, **params):
        host_app = cherrypy.request.path_info.split('/')[3]

        csvData = []
        lookupCSV = os.path.join(util.get_apps_dir(), 'splunk_app_for_nix', 'lookups', 'dropdowns.csv')
        with open(lookupCSV, 'rb') as csvfile:
             reader = csv.reader(csvfile)
             for row in reader:
                 csvData.append(row)

        csvHeaders = csvData[0] # this must contain all the column names
        keyOrder = ['unix_category', 'unix_group', 'host']
        order = self.getOrder(keyOrder, csvHeaders)
        csvData = csvData[1:len(csvData)]

        tree = {}
        self.build_tree(csvData, tree, order)

        return self.render_json(tree)
Example #19
0
def is_kv_store_supported_in_splunk(sessionKey=None):
    # check the version for 6.3+ and if KV store is enabled
    global is_supported

    if is_supported is not None:
        return is_supported

    splunk_version = int(splunk.getReleaseVersion().replace('.', ''))
    data_store = readAsJson(data_store_file)

    is_supported = True
    if data_store != 'NotFound':
        if data_store['type'] == 'kv_store':
            return is_supported

    data_store = {}
    data_store['type'] = 'json_file'
    if splunk_version > 630:
        kvStoreStatus = get_kv_store_status(sessionKey)
        logger.info("is_kv_store_supported_in_splunk kvStore Status :: %s" %
                    kvStoreStatus)
        if kvStoreStatus != 'unavailable' and kvStoreStatus != 'unknown':
            'save the state in a datastore file'
            data_store['type'] = 'kv_store'
        else:
            is_supported = False
    else:
        is_supported = False

    # save it to file
    try:
        base_local_dir = os.path.join(util.get_apps_dir(), 'splunk_app_stream',
                                      'local')
        if not os.path.exists(base_local_dir):
            createDir(base_local_dir + os.sep)
        f = open(data_store_file, 'w+')
        f.write(json.dumps(data_store, sort_keys=True, indent=2))
        f.close()
    except:
        logger.error('Unable to create the data_store file')

    return is_supported
Example #20
0
    def verify_input_exists(self, host_app, user):

        name = os.path.join(apps_util.get_apps_dir(), host_app, 'bin',
                            'scripted_inputs', 'dm_backfill_factory.py')

        try:
            scripted = ScriptedInput.get(
                ScriptedInput.build_id(name, host_app, user))
            scripted.disabled = False
        except:
            scripted = ScriptedInput(host_app, user, name)
            scripted.pass_auth = 'admin'
            scripted.disabled = False
            scripted.interval = 15

        if not scripted.passive_save():
            logger.error(scripted.errors[0])
            return False

        return True
Example #21
0
    def generateResults(self, **kwargs):

        if not (sys.platform.startswith("win")):
            return self.render_json({'is_windows': False})

        app_name = kwargs.get('client_app', STATIC_APP)
        app_dir = os.path.join(app_util.get_apps_dir(), app_name)

        legacy_js = os.path.join(app_dir, 'appserver', 'static',
                                 'application.js')
        legacy_handler = os.path.join(app_dir, 'bin', 'setuphandler.py')
        legacy_restmap = os.path.join(app_dir, 'default', 'restmap.conf')
        legacy_setup = os.path.join(app_dir, 'default', 'setup.xml')

        for legacy in [
                legacy_js, legacy_handler, legacy_restmap, legacy_setup
        ]:
            if os.path.exists(legacy):
                shutil.move(legacy, legacy + '.bak')
                logger.info('disabled legacy setup component %s for app %s' %
                            (legacy, app_name))

        return self.render_json({})
Example #22
0
 def write_lookup(self, lookup_filename, data):
     try:
         lookups_dir = os.path.join(get_apps_dir(), self._app_name,
                                    "lookups")
         if not os.path.isdir(lookups_dir):
             os.mkdir(lookups_dir, mode=0o744)
         lookup_file = os.path.join(lookups_dir, lookup_filename)
         mode = 'w'
         if not os.path.isfile(lookup_file):
             mode = 'w+'
         header = []
         for row in data:
             for item in row:
                 if row[item] is None:
                     row[item] = ""
                 if item not in header:
                     header.append(item)
                 if len("{}".format(row[item])) > 0:
                     row[item] = "{}".format(row[item]).strip()
         self._debug("{}".format(json.dumps(header)))
         with open(lookup_file, mode) as outfile:
             writer = csv.DictWriter(outfile, fieldnames=header)
             writer.writeheader()
             for row in data:
                 writer.writerow(row)
         return data
     except Exception as e:
         exc_type, exc_obj, exc_tb = sys.exc_info()
         fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
         jsondump = {
             "message": str((e)),
             "exception_type": "%s" % type(e),
             "exception_arguments": "%s" % e,
             "filename": fname,
             "line": exc_tb.tb_lineno
         }
         raise Exception(json.dumps(jsondump))
    def add_browser_driver_to_path(cls, logger=None):

        driver_path = None

        if sys.platform == "linux2":
            driver_path = "linux64"
        else:
            # Note that Windows will always return win32 (even on 64-bit hosts)
            # See http://bit.ly/2Dq6xM5
            driver_path = sys.platform

        full_driver_path = os.path.join(get_apps_dir(), "website_input", "bin", "browser_drivers", driver_path)

        if not full_driver_path in os.environ["PATH"]:

            # Use the correct path separator per the platform
            # https://lukemurphey.net/issues/1782
            if os.name == 'nt':
                os.environ["PATH"] += ";" +full_driver_path
            else:
                os.environ["PATH"] += ":" +full_driver_path

            if logger:
                logger.debug("Updating path to include selenium driver path=%s, working_path=%s", full_driver_path, os.getcwd())
Example #24
0
import logging
import os
import sys
import json
import cherrypy
import shutil
import cgi

from splunk.search import *
import splunk.appserver.mrsparkle.controllers as controllers
import splunk.appserver.mrsparkle.lib.util as util
from splunk.appserver.mrsparkle.lib.decorators import expose_page
from splunk.appserver.mrsparkle.lib.routes import route

dir = os.path.join(util.get_apps_dir(), __file__.split('.')[-2], 'bin')
if not dir in sys.path:
    sys.path.append(dir)

logger = logging.getLogger('splunk.appserver.controllers.samplefile')

appsDir = util.get_apps_dir()

CONFIG_URI = '/servicesNS/nobody/%s/eventgen/eventgen_conf'


def list_samplefiles(app=""):
    """Retreive a list of samplefiles for a given app context"""
    app_dir = os.path.join(appsDir, app, "samples")
    try:
        filename_list = [
            filename for filename in os.listdir(app_dir) if os.path.isfile(os.path.join(app_dir, filename))
Example #25
0
# from splunk import AuthorizationFailed as AuthorizationFailed
import splunk
import splunk.appserver.mrsparkle.controllers as controllers
import splunk.appserver.mrsparkle.lib.util as util
import splunk.input as input
import splunk.bundle as bundle
import splunk.entity as entity
from splunk.appserver.mrsparkle.lib import jsonresponse
from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path
import splunk.clilib.bundle_paths as bundle_paths
from splunk.util import normalizeBoolean as normBool
from splunk.appserver.mrsparkle.lib.decorators import expose_page
from splunk.appserver.mrsparkle.lib.routes import route
import splunk.rest as rest

dir = os.path.join(util.get_apps_dir(), __file__.split(".")[-2], "bin")
if not dir in sys.path:
    sys.path.append(dir)

dir = os.path.join(os.path.join(os.environ.get("SPLUNK_HOME")), "etc", "apps", "alert_manager", "bin", "lib")
if not dir in sys.path:
    sys.path.append(dir)

from EventHandler import *
from IncidentContext import *


def setup_logger(level):
    """
    Setup a logger for the REST handler.
    """
Example #26
0
_SYS_EXIT_FAILED_GET_OAUTH_CREDENTIALS = 6
_SYS_EXIT_FAILURE_FIND_API = 5
_SYS_EXIT_OAUTH_FAILURE = 4
_SYS_EXIT_FAILED_CONFIG = 3

_APP_NAME = 'TA-gmail-audit'

# Necessary
_CRED = None
_DOMAIN = None

_SPLUNK_HOME = os.getenv("SPLUNK_HOME")
if _SPLUNK_HOME is None:
    _SPLUNK_HOME = make_splunkhome_path([""])

_APP_HOME = os.path.join(util.get_apps_dir(), _APP_NAME)
_app_local_directory = os.path.join(_APP_HOME, "local")
_BIN_PATH = os.path.join(_APP_HOME, "bin")

SCOPES = 'https://www.googleapis.com/auth/admin.directory.user'

kl = KennyLoggins()
# log = kl.get_logger(_APP_NAME, "modularinput", log.INFO)
log = kl.get_logger(_APP_NAME, "modularinput", log.DEBUG)

log.debug("logging setup complete")


def send_to_splunk(splunk_host, auth_token, payload, sourcetype, eventtime):
    """Sends an event to the HTTP Event collector of a Splunk Instance"""
Example #27
0
import ast
from stream_utils import *
from stream_kvstore_utils import *

import jsonschema
from jsonschema import *
from jsonschema.validators import *

from IPy import IP
from ping import Ping

from stream_kvstore_utils import *

logger = setup_logger('captureipaddress')

capture_addresses_dir = os.path.join(util.get_apps_dir(), 'splunk_app_stream', 'local', "captureipaddresses")
schema_file = os.path.join(util.get_apps_dir(), 'splunk_app_stream', 'default', "blacklist_whitelist_schema")
blacklist_whitelist_schema = None
default_capture_ip_addresses_ids = ['whitelist', 'blacklist']
run_once = True
# flag to update app date last updated time if the capture ip address json file is modified out of band
update_app_last_date_time = False

#kv store
use_kv_store = is_kv_store_supported_in_splunk()

# Last updated time used to refresh cache
dateLastUpdated = 0
capture_ip_addresses_json_map = {}
whitelist_kv_store = '/servicesNS/nobody/splunk_app_stream/storage/collections/data/captureipaddresses/whitelist'
blacklist_kv_store = '/servicesNS/nobody/splunk_app_stream/storage/collections/data/captureipaddresses/blacklist'
Example #28
0
import json
import logging
import os
import sys

import cherrypy
import splunk
from splunk.appserver.mrsparkle.lib import jsonresponse
from splunk.appserver.mrsparkle.lib import util as app_util
from splunk.models.field import FieldValue
import controllers.module as module

logger = \
logging.getLogger('splunk.appserver.controllers.module.CFHiddenSearch')

APPS_DIR = app_util.get_apps_dir()
STATIC_APP = __file__.split(os.sep)[-5]
APP_DIR = os.path.join(APPS_DIR, STATIC_APP)

# SPL-44264 - some platforms won't include 
# an app's ./bin directory in the sys.path
BIN_DIR = os.path.join(APP_DIR, 'bin')
if not BIN_DIR in sys.path:
    sys.path.append(BIN_DIR)

def fv2json(fv):
    jsobj = {key[0]:key[1].get() for key in fv.field.fields()}
    
    for k in jsobj.keys():
        if type(jsobj[k]) == FieldValue:
            jsobj[k] = fv2json(jsobj[k])
Example #29
0
import logging
import os, sys
import re
import splunk.appserver.mrsparkle.lib.util as util
from stream_utils import *
from stream_forwarder_group import StreamForwarderGroup
from stream_kvstore_utils import *

logger = setup_logger('indexer')

indexerCacheMaxAge = 10

# Cache indexers list and modified time of the file
indexersFile = os.path.join(util.get_apps_dir(), 'splunk_app_stream', 'local',
                            'indexers')
try:
    indexersFileMtime = int(os.stat(indexersFile).st_mtime)
    indexers = open(indexersFile, 'r').read()
except Exception as e:
    indexersFileMtime = 0
    indexers = '{"collectors":[]}'


# get indexers list by making API call to splunkd
def get_indexers():
    content = None
    auth_key = get_internal_shared_key()
    try:
        uri = 'splunk_app_stream/indexers?output_mode=json&X-SPLUNK-APP-STREAM-KEY=' + auth_key
        serverResponse, serverContent = splunk.rest.simpleRequest(
            make_url(uri, translate=False, relative=True, encode=False),
Example #30
0
from splunk.appserver.mrsparkle.lib.decorators import expose_page
from splunk.appserver.mrsparkle.lib.routes import route
from splunk.appserver.mrsparkle.lib.util import get_apps_dir, Popen
import splunk.appserver.mrsparkle.controllers as controllers
import splunk.entity as en

logger = logging.getLogger('splunk.appserver.mrsparkle.controllers.custom')


# define the python package space in which to house custom controllers
# thay may be packaged with apps
VIRTUAL_PACKAGE_NAME = 'splunk.appserver.mrsparkle.custom_controllers'
VIRTUAL_PACKAGE_LENGTH = 4

# define the local filepath to the apps dir
ETC_PATH = get_apps_dir()

# define path segment within an apps dir
CONTROLLER_SUBPATH = os.path.join('appserver', 'controllers')


class ControllerMetaLoader(object):
    '''
    Unified import hook handler.  Implements base methods to support dynamic
    module importing via the meta hooks method.
    '''

    __qualified_apps = []


    @classmethod
Example #31
0
import urllib
import operator
import socket

if sys.version_info[0] < 3:
    import httplib  # python2
else:
    from http import HTTPStatus as httplib  # python3

import splunk
import splunk.appserver.mrsparkle.lib.util as util
import splunk.entity as entity
import splunk.rest as rest
import splunk.input as input

dir = os.path.join(util.get_apps_dir(), 'FreezerInventoryAppForSplunk', 'bin',
                   'lib')
if not dir in sys.path:
    sys.path.append(dir)

from FreezerInventoryLogger import *

logger = setupLogger('endpoint-items')

if sys.platform == "win32":
    import msvcrt
    # Binary mode is required for persistent mode on Windows.
    msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
    msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
    msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
Example #32
0
import sys

import cherrypy
import splunk
import splunk.auth as auth
import splunk.bundle as bundle
import splunk.util as util
from splunk.appserver.mrsparkle.lib import jsonresponse
from splunk.appserver.mrsparkle.lib import util as app_util
import controllers.module as module

from splunk.models.app import App

logger = logging.getLogger('splunk')

APPS_DIR = app_util.get_apps_dir()
STATIC_APP = __file__.split(os.sep)[-5]
APP_DIR = os.path.join(APPS_DIR, STATIC_APP)
LEGACY_SETUP = os.path.join(APP_DIR, 'default', 'setup.xml')

# SPL-44264 - some platforms won't include 
# an app's ./bin directory in the sys.path
BIN_DIR = os.path.join(APP_DIR, 'bin')
if not BIN_DIR in sys.path:
    sys.path.append(BIN_DIR)

from unix.models.unix import Unix 


class UnixFTR(module.ModuleHandler):
    ''' 
Example #33
0
import splunk.appserver.mrsparkle.controllers as controllers
from splunk.appserver.mrsparkle.lib.decorators import expose_page
from splunk.appserver.mrsparkle.lib.routes import route
import splunk.appserver.mrsparkle.lib.util as util
from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path
import sys
# STREAM-3375: if splunk_app_stream bin path is not present in the sys.path, then add it to sys.path to ensure python modules are loaded
bin_path = make_splunkhome_path(['etc', 'apps', 'splunk_app_stream', 'bin'])
if bin_path not in sys.path:
    sys.path.append(bin_path)
from stream_utils import *

logger = setup_logger('streamfwdinstall')

#download file directory
download_dir = os.path.join(util.get_apps_dir(), 'splunk_app_stream',
                            'install')

# absolute file path to linux 64 bit streamfwd tarball
streamfwd_package_linux64 = None


# Controller class to compile and serve a shell script that downloads and installs streamfwd binary
class InstallStreamfwd(controllers.BaseController):
    ''' InstallStreamfwd Controller '''
    def serve_install_script_file_linux64(self):
        """prepares and serves the install script file for 64 bit linux platform"""
        global streamfwd_package_linux64

        # resolve the package file name if necessary
        if streamfwd_package_linux64 is None:
import splunk
import splunk.rest as rest
import splunk.input as input
import splunk.entity as entity
import splunk
import time
import logging
import logging.handlers
import hashlib
import datetime
import socket
import re
import os.path

import splunk.appserver.mrsparkle.lib.util as util
dir = os.path.join(util.get_apps_dir(), 'alert_manager', 'bin', 'lib')
if not dir in sys.path:
    sys.path.append(dir)

from CsvLookup import *
from AlertManagerLogger import *
from ApiManager import *

# Helpers
def normalize_bool(value):
    if value == True:
        return True
    elif value == False:
        return False
    else:
        return True if value.lower() in ('1', 'true') else False
Example #35
0
import logging
from collections import OrderedDict
import os
import splunk
from splunk.appserver.mrsparkle.lib.util import make_url
import splunk.appserver.mrsparkle.lib.util as util
from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path
import traceback
import sys
from stream_utils import *
import uuid
import datetime
import urllib

logger = setup_logger('stream_kvstore_utils')
data_store_file = os.path.join(util.get_apps_dir(), 'splunk_app_stream',
                               'local') + os.sep + ".data_store"
misc_kv_store_uri = '/servicesNS/nobody/splunk_app_stream/storage/collections/data/miscellaneous'
streams_kv_store_coll = '/servicesNS/nobody/splunk_app_stream/storage/collections/data/streams'
stream_forwarder_groups_kv_store_coll = '/servicesNS/nobody/splunk_app_stream/storage/collections/data/streamforwardergroups'
file_server_mount_points_kv_store_coll = '/servicesNS/nobody/splunk_app_stream/storage/collections/data/fileservermountpoints'
kv_store_ready = None
is_supported = None
server_roles = None
splunkd_ready = None
splunkd_fatal_error = False
kvstore_fatal_error = False
splunkd_time_out = 15
splunkd_connection_exceptions = [
    'The handshake operation timed out', 'Connection refused'
]
Example #36
0
# Update the sys.path so that libraries will be loaded out of the network_tools_app directory.
# This is important in order to make sure that app won't cause issues with other Splunk apps
# that may include these same libraries (since Splunk puts other apps on the sys.path which
# can make the apps override each other).
import sys
import os
import errno

try:
    import ConfigParser
except ModuleNotFoundError:
    from configparser import ConfigParser

import splunk.appserver.mrsparkle.lib.util as util

lib_dir = os.path.join(util.get_apps_dir(), 'network_tools', 'bin',
                       'network_tools_app')

if not lib_dir in sys.path:
    sys.path.append(lib_dir)

# App provided imports
from network_tools_app.event_writer import StashNewWriter
from network_tools_app import pyspeedtest
from network_tools_app import pingparser
from .tracerouteparser import Traceroute
from network_tools_app.wakeonlan import wol
from network_tools_app.ipwhois import IPWhois
from network_tools_app.pythonwhois import get_whois
from network_tools_app.flatten import flatten
Example #37
0
import splunk
import splunk.rest as rest
import splunk.input as input
import splunk.entity as entity
import splunk
import time
import logging
import logging.handlers
import hashlib
import datetime
import socket
import re
import os.path

import splunk.appserver.mrsparkle.lib.util as util
dir = os.path.join(util.get_apps_dir(), 'alert_manager', 'bin', 'lib')
if not dir in sys.path:
    sys.path.append(dir)

from CsvLookup import CsvLookup
from ApiManager import ApiManager

from AlertManagerLogger import setupLogger

if __name__ == "__main__":
    start = time.time()

    # Setup logger
    log = setupLogger('migration')

    sessionKey = sys.stdin.readline().strip()
Example #38
0
def getLookupFile(lookup_name, sessionKey):
    uri = '/servicesNS/nobody/alert_manager/data/transforms/lookups/%s' % lookup_name
    lookup = getRestData(uri, sessionKey)
    #log.debug("getLookupFile(): lookup: %s" % json.dumps(lookup))
    log.debug("Got lookup content for lookup=%s. filename=%s app=%s" % (lookup_name, lookup["entry"][0]["content"]["filename"], lookup["entry"][0]["acl"]["app"]))
    return os.path.join(util.get_apps_dir(), lookup["entry"][0]["acl"]["app"], 'lookups', lookup["entry"][0]["content"]["filename"])
import splunk
import splunk.rest as rest
import splunk.input as input
import splunk.entity as entity
import splunk
import time
import logging
import logging.handlers
import hashlib
import datetime
import socket
import re
import os.path

import splunk.appserver.mrsparkle.lib.util as util
dir = os.path.join(util.get_apps_dir(), 'alert_manager', 'bin', 'lib')
if not dir in sys.path:
    sys.path.append(dir)

from CsvLookup import *
from AlertManagerLogger import *
from ApiManager import *

if __name__ == "__main__":
    start = time.time()

    # Setup logger
    log = setupLogger('migration')

    sessionKey     = sys.stdin.readline().strip()
    splunk.setDefault('sessionKey', sessionKey)
Example #40
0
import urllib
import json
import os
import sys
import splunk.rest as rest

import splunk.appserver.mrsparkle.lib.util as util
dir = os.path.join(util.get_apps_dir(), 'alert_manager', 'bin', 'lib')
if not dir in sys.path:
    sys.path.append(dir)

from AlertManagerLogger import *

class ApiManager(object):

    log = None
    sessionKey = ''

    def __init__(self, sessionKey):
        self.sessionKey = sessionKey
        self.log = setupLogger('apimanager')

    def checkKvStore(self):
        try:
            query = { }
            uri = '/servicesNS/nobody/alert_manager/storage/collections/data/email_templates?query=%s' % urllib.quote(json.dumps(query))
            serverResponse, serverContent = rest.simpleRequest(uri, sessionKey=self.sessionKey)

            if serverResponse['status'] == '503':
                self.log.debug("KVStore unavailable. Response status: %s" % serverResponse['status'])
                return False
from os import remove, close

#from splunk import AuthorizationFailed as AuthorizationFailed
import splunk.appserver.mrsparkle.controllers as controllers
import splunk.appserver.mrsparkle.lib.util as util
import splunk.bundle as bundle
import splunk.entity as entity
from splunk.appserver.mrsparkle.lib import jsonresponse
from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path
import splunk.clilib.bundle_paths as bundle_paths
from splunk.util import normalizeBoolean as normBool
from splunk.appserver.mrsparkle.lib.decorators import expose_page
from splunk.appserver.mrsparkle.lib.routes import route
import splunk.rest as rest

dir = os.path.join(util.get_apps_dir(), __file__.split('.')[-2], 'bin')

if not dir in sys.path:
    sys.path.append(dir)

#sys.stdout = open('/tmp/stdout', 'w')
#sys.stderr = open('/tmp/stderr', 'w')


def mkSample(src_file, pattern, subst, prefix=None, suffix=None):
    fh, abs_path = mkstemp(suffix=suffix, prefix=prefix)
    new_file = open(abs_path, 'w')
    old_file = open(src_file)
    for line in old_file:
        new_file.write(line.replace(pattern, subst))
    #close temp file
Example #42
0
def migrate(sessionKey):
    stream_app = get_stream_app_name()
    local_streams_dir = os.path.join(util.get_apps_dir(), stream_app, 'local',
                                     'streams')
    local_capture_addresses_dir = os.path.join(util.get_apps_dir(), stream_app,
                                               'local', 'captureipaddresses')
    local_file_server_mount_points_dir = os.path.join(util.get_apps_dir(),
                                                      stream_app, 'local',
                                                      'fileservermountpoints')
    local_stream_forwarder_groups_dir = os.path.join(util.get_apps_dir(),
                                                     stream_app, 'local',
                                                     'streamforwardergroups')
    local_users_dir = os.path.join(util.get_apps_dir(), stream_app, 'local',
                                   'users')
    local_dir = os.path.join(util.get_apps_dir(), stream_app, 'local')
    backup_dir = os.path.join(util.get_apps_dir(), stream_app, 'local',
                              'pre_kv_store_config')

    #migrate local/streams to kv store
    if os.path.exists(local_streams_dir):
        logger.info('migrate: streams')
        if perform_migration(local_streams_dir, streams_kv_store_coll,
                             sessionKey):
            #move the version file to the new location under local/.version
            if not os.path.exists(os.path.join(local_dir, '.version')):
                if os.path.exists(os.path.join(local_streams_dir, '.version')):
                    shutil.move(os.path.join(local_streams_dir, '.version'),
                                local_dir)
            else:
                os.remove(os.path.join(local_streams_dir, '.version'))
            if not os.path.exists(os.path.join(backup_dir, 'streams')):
                shutil.move(local_streams_dir,
                            os.path.join(backup_dir, 'streams'))
        else:
            logger.error('migrate: migration failed for local streams folder')
    elif not os.path.exists(local_streams_dir):
        logger.info('migrate: no local streams folder to migrate')
    else:
        logger.info('migrate: local streams migration was already performed')

    #migrate local/captureipaddresses to kv store
    if os.path.exists(local_capture_addresses_dir):
        logger.info('migrate: captureipaddresses')
        if perform_migration(local_capture_addresses_dir, misc_kv_store_uri,
                             sessionKey):
            if not os.path.exists(
                    os.path.join(backup_dir, 'captureipaddresses')):
                shutil.move(local_capture_addresses_dir,
                            os.path.join(backup_dir, 'captureipaddresses'))
        else:
            logger.error(
                'migrate: migration failed for local captureipaddresses folder'
            )
    elif not os.path.exists(local_capture_addresses_dir):
        logger.info('migrate: no local captureipaddresses folder to migrate')
    else:
        logger.info(
            'migrate: local captureipaddresses migration was already performed'
        )

    #migrate local/fileservermountpoints to kv store
    if os.path.exists(local_file_server_mount_points_dir):
        logger.info('migrate: fileservermountpoints')
        if perform_migration(local_file_server_mount_points_dir,
                             file_server_mount_points_kv_store_coll,
                             sessionKey):
            if not os.path.exists(
                    os.path.join(backup_dir, 'fileservermountpoints')):
                shutil.move(local_file_server_mount_points_dir,
                            os.path.join(backup_dir, 'fileservermountpoints'))
        else:
            logger.error(
                'migrate: migration failed for local fileservermountpoints folder'
            )
    else:
        logger.info(
            'migrate: no local fileservermountpoints folder to migrate')

    #migrate local/streamforwardergroups to kv store
    if os.path.exists(local_stream_forwarder_groups_dir):
        logger.info('migrate: streamforwardergroups')
        if perform_migration(local_stream_forwarder_groups_dir,
                             stream_forwarder_groups_kv_store_coll,
                             sessionKey):
            if not os.path.exists(
                    os.path.join(backup_dir, 'streamforwardergroups')):
                shutil.move(local_stream_forwarder_groups_dir,
                            os.path.join(backup_dir, 'streamforwardergroups'))
        else:
            logger.error(
                'migrate: migration failed for local streamforwardergroups folder'
            )
    elif not os.path.exists(local_stream_forwarder_groups_dir):
        logger.info(
            'migrate: no local streamforwardergroups folder to migrate')
    else:
        logger.info(
            'migrate: local streamforwardergroups migration was already performed'
        )

    #migrate local/users to kv store
    if os.path.exists(local_users_dir):
        logger.info('migrate: usertours')
        if perform_migration(local_users_dir, misc_kv_store_uri, sessionKey,
                             'usertours'):
            if not os.path.exists(os.path.join(backup_dir, 'users')):
                shutil.move(local_users_dir, os.path.join(backup_dir, 'users'))
        else:
            logger.error('migrate: migration failed for local users folder')
    elif not os.path.exists(local_users_dir):
        logger.info('migrate: no local users folder to migrate')
    else:
        logger.info('migrate: local users migration was already performed')

    #migrate apps meta to kv store
    if os.path.exists(os.path.join(local_dir, 'apps')):
        logger.info('migrate: appsmeta')
        if perform_migration(local_dir, misc_kv_store_uri, sessionKey,
                             'appsmeta', ['apps']):
            if not os.path.exists(os.path.join(backup_dir, 'apps')):
                shutil.move(os.path.join(local_dir, 'apps'), backup_dir)
            else:
                os.remove(os.path.join(local_dir, 'apps'))
        else:
            logger.error('migrate: migration failed for local apps metadata')
    elif not os.path.exists(os.path.join(local_dir, 'apps')):
        logger.info('migrate: no local apps metadata to migrate')
    else:
        logger.info(
            'migrate: local apps metadata migration was already performed')
import splunk
import splunk.auth as auth
import splunk.bundle as bundle
import splunk.util as util
import splunk.appserver.mrsparkle.lib.util as app_util
from splunk.appserver.mrsparkle.lib import jsonresponse
import controllers.module as module

from splunk.models.app import App 

try:
    APP_NAME = __file__.split(os.sep)[-5]
except:
    APP_NAME = 'splunk_for_hadoopops'
    
dir = os.path.join(app_util.get_apps_dir(), APP_NAME, 'bin')
if not dir in sys.path:
    sys.path.append(dir)

from hadoopops.models.hadoopops import HadoopOps 

logger = logging.getLogger('splunk.module.HadoopOpsFTR')

class HadoopOpsFTR(module.ModuleHandler):
    ''' 
    checks to see if app is configured or if user ignored last call to action 
    also handles setting the ignored bit in user's hadoopops.conf
    '''

    def generateResults(self, **kwargs):