Esempio n. 1
0
def get_total_event_count( server, index, username, password ):
    from splunk import entity, auth, mergeHostPath
    mergeHostPath( server, True )
    auth.getSessionKey( username=username, password=password )
    properties = entity.getEntity( entityPath='/data/indexes', entityName=index ).properties
    if 'totalEventCount' in properties:
        return int( properties['totalEventCount'] )
    else:
        return 0
Esempio n. 2
0
	def getCacheIDForMailbox(self, box):
		if not self.noCache:
			
			#If we are here it means we have to extract the last used UID from splunk...
			import splunk.auth as au
			import splunk.search as se
			import splunk
			import httplib2
			import time
			import string
			
			if self.splunkxpassword:
				try:
					p = subprocess.Popen('openssl bf -d -a -pass file:%s' % (os.path.join(os.environ['SPLUNK_HOME'],'etc','auth', 'splunk.secret')), shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
					self.splunkpassword = p.communicate(self.splunkxpassword + '\n')[0]
				except Exception, e:
					if self.debug:
						logging.error(e)
						print traceback.print_exc(file=sys.stderr)
					raise ConfigError('Could not decrypt splunkxpassword')
		
			logging.debug("decrypted splunk password")
			
			splunk.mergeHostPath(self.splunkHostPath, True)
			try:
				key = au.getSessionKey(self.splunkuser, self.splunkpassword)
			except httplib2.ServerNotFoundError, e:
				raise LoginError("Unable to find the server at %s" % self.splunkHostPath)
Esempio n. 3
0
    def makeSplunkEmbedded(self, sessionKey=None, runOnce=False):
        """Setup operations for being Splunk Embedded.  This is legacy operations mode, just a little bit obfuscated now.
        We wait 5 seconds for a sessionKey or 'debug' on stdin, and if we time out then we run in standalone mode.
        If we're not Splunk embedded, we operate simpler.  No rest handler for configurations. We only read configs 
        in our parent app's directory.  In standalone mode, we read eventgen-standalone.conf and will skip eventgen.conf if
        we detect SA-Eventgen is installed. """

        fileHandler = logging.handlers.RotatingFileHandler(
            os.environ['SPLUNK_HOME'] + '/var/log/splunk/eventgen.log',
            maxBytes=25000000,
            backupCount=5)
        formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
        fileHandler.setFormatter(formatter)
        # fileHandler.setLevel(logging.DEBUG)
        logger.handlers = []  # Remove existing StreamHandler if we're embedded
        logger.addHandler(fileHandler)
        logger.info("Running as Splunk embedded")
        import splunk.auth as auth
        import splunk.entity as entity
        # 5/7/12 CS For some reason Splunk will not import the modules into global in its copy of python
        # This is a hacky workaround, but it does fix the problem
        globals()['auth'] = locals()['auth']
        # globals()['bundle'] = locals()['bundle']
        globals()['entity'] = locals()['entity']
        # globals()['rest'] = locals()['rest']
        # globals()['util'] = locals()['util']

        if sessionKey == None or runOnce == True:
            self.runOnce = True
            self.sessionKey = auth.getSessionKey('admin', 'changeme')
        else:
            self.sessionKey = sessionKey

        self.splunkEmbedded = True
Esempio n. 4
0
def make_ta_for_indexers(username, password):
    '''
    Splunk_TA_ForIndexers spl generation for ES 4.2.0 and up
    '''
    if not username or not password:
        raise Exception("Splunk username and password must be defined.")
    sys.path.append(make_splunkhome_path(['etc', 'apps', 'SA-Utils', 'bin']))
    session_key = auth.getSessionKey(username, password)
    from app_maker.make_index_time_properties import makeIndexTimeProperties
    try:
        archive = makeIndexTimeProperties(
            app_info,
            session_key,
            include_indexes=include_indexes,
            imported_apps_only=imported_apps_only,
            namespace=namespace)
    except TypeError:
        #Some versions have a change that removed the kwarg imported_apps_only
        #For older versions, we'll still need to use the imported_apps_only arg, so that's why we
        #do this second
        archive = makeIndexTimeProperties(app_info,
                                          session_key,
                                          include_indexes=include_indexes,
                                          namespace=namespace)
    print archive
    assert archive.startswith(spl_location)
Esempio n. 5
0
    def makeSplunkEmbedded(self, sessionKey=None, runOnce=False):
        """Setup operations for being Splunk Embedded.  This is legacy operations mode, just a little bit obfuscated now.
        We wait 5 seconds for a sessionKey or 'debug' on stdin, and if we time out then we run in standalone mode.
        If we're not Splunk embedded, we operate simpler.  No rest handler for configurations. We only read configs 
        in our parent app's directory.  In standalone mode, we read eventgen-standalone.conf and will skip eventgen.conf if
        we detect SA-Eventgen is installed. """

        fileHandler = logging.handlers.RotatingFileHandler(os.environ['SPLUNK_HOME'] + '/var/log/splunk/eventgen.log', maxBytes=25000000, backupCount=5)
        formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
        fileHandler.setFormatter(formatter)
        # fileHandler.setLevel(logging.DEBUG)
        logger.handlers = [ ] # Remove existing StreamHandler if we're embedded
        logger.addHandler(fileHandler)
        logger.info("Running as Splunk embedded")
        import splunk.auth as auth
        import splunk.entity as entity
        # 5/7/12 CS For some reason Splunk will not import the modules into global in its copy of python
        # This is a hacky workaround, but it does fix the problem
        globals()['auth'] = locals()['auth']
        # globals()['bundle'] = locals()['bundle']
        globals()['entity'] = locals()['entity']
        # globals()['rest'] = locals()['rest']
        # globals()['util'] = locals()['util']

        if sessionKey == None or runOnce == True:
            self.runOnce = True
            self.sessionKey = auth.getSessionKey('admin', 'changeme')
        else:
            self.sessionKey = sessionKey
        
        self.splunkEmbedded = True
Esempio n. 6
0
def execute():
    results = []
    try:
        results, dummyresults, settings = si.getOrganizedResults()

        # default values
        args = {"namespace": "search"}
        # get commandline args
        keywords, options = si.getKeywordsAndOptions()
        # override default args with settings from search kernel
        args.update(settings)
        # override default args with commandline args
        args.update(options)

        sessionKey = args.get("sessionKey", None)
        owner = args.get("owner", "admin")
        namespace = args.get("namespace", None)

        if namespace.lower() == "none":
            namespace = None

        messages = {}

        if sessionKey == None:
            # this shouldn't happen, but it's useful for testing.
            try:
                sessionKey = sa.getSessionKey("admin", "changeme")
                si.addWarnMessage(
                    messages, "No session given to 'tune' command. Using default admin account and password."
                )
            except splunk.AuthenticationFailed, e:
                si.addErrorMessage(messages, "No session given to 'tune' command.")
                return

        if len(keywords) != 1:
            usage()

        # e.g., '/data/inputs/monitor'
        entity = keywords[0]
        logger.info("Entity: %s Args: %s" % (entity, args))

        results = []  # we don't care about incoming results
        try:
            entitys = en.getEntities(entity, sessionKey=sessionKey, owner=owner, namespace=namespace, count=-1)
            for name, entity in entitys.items():
                try:
                    myapp = entity["eai:acl"]["app"]
                    if namespace != None and myapp != namespace:
                        continue
                except:
                    continue  # if no eai:acl/app, filter out
                result = entityToResult(name, entity)
                results.append(result)
        except splunk.ResourceNotFound, e2:
            pass
Esempio n. 7
0
 def testSimple(self):
     sessionKey = auth.getSessionKey('admin', 'changeme')
     job = search.dispatch('windbag', sessionKey=sessionKey)
     time.sleep(1)
     event = job.results[0]
     custom = Custom(namespace='search')
     renderer = custom.getRenderer(event.fields)
     self.assertEquals(renderer.get('eventtype', None), None)
     self.assertEquals(renderer.get('priority'), 0)
     self.assertEquals(renderer.get('template'), '//results/EventsViewer_default_renderer.html')
     self.assertEquals(renderer.get('css_class', None), None)
Esempio n. 8
0
 def testSimple(self):
     sessionKey = auth.getSessionKey('admin', 'changeme')
     job = search.dispatch('windbag', sessionKey=sessionKey)
     time.sleep(1)
     event = job.results[0]
     custom = Custom(namespace='search')
     renderer = custom.getRenderer(event.fields)
     self.assertEquals(renderer.get('eventtype', None), None)
     self.assertEquals(renderer.get('priority'), 0)
     self.assertEquals(renderer.get('template'),
                       '//results/EventsViewer_default_renderer.html')
     self.assertEquals(renderer.get('css_class', None), None)
def get_node_adapters(local_host_path, local_session_key):
    """
	Given the local session key return an iterable of HydraGatewayAdapters to 
	all Hydra Nodes in the TA Vmware app context for which we have valid access. 
	Any failed logins will be logged but otherwise will not impact data collection. 
	
	@type local_host_path: str
	@param local_host_path: path to the local splunkd mgmt interface
	@type local_session_key: str
	@param local_session_key: valid splunk session key to the local splunkd instance
	
	@rtype: list
	@return: iterable of authenticated HydraGatewayAdapters to all DCN's
	"""
    #Establish node list
    node_stanzas = HydraNodeStanza.all(host_path=local_host_path,
                                       sessionKey=local_session_key)
    node_stanzas._owner = "nobody"  #self.asset_owner
    node_stanzas = node_stanzas.filter_by_app("Splunk_TA_vmware")

    #Iterate on all nodes, checking if alive and sorting appropriately
    adapter_list = []
    for node_stanza in node_stanzas:
        password = SplunkStoredCredential.get_password(
            node_stanza.name,
            node_stanza.user,
            "Splunk_TA_vmware",
            session_key=local_session_key,
            host_path=local_host_path)
        if isinstance(node_stanza.gateway_port, int):
            gateway_port = node_stanza.gateway_port
        else:
            gateway_port = 8008
        gateway_uri = node_stanza.name.rstrip("/0123456789") + str(
            gateway_port)

        try:
            node_session_key = getSessionKey(node_stanza.user,
                                             password,
                                             hostPath=node_stanza.name)
            adapter_list.append(
                HydraGatewayAdapter(node_stanza.name, node_session_key,
                                    gateway_uri))
        except Exception as e:
            logger.exception(
                "[get_node_adapters] failed to establish gateway adapter for node=%s due to error=%s",
                node_stanza.name, str(e))

    return adapter_list
Esempio n. 10
0
    def test_is_fips_mode(self):
        if self.username is not None and self.password is not None:
            from splunk.auth import getSessionKey
            from splunk import SplunkdConnectionException
            try:
                session_key = getSessionKey(username=self.username,
                                            password=self.password)

                # This assumes you are testing against a non-FIPS environment
                self.assertEqual(ServerInfo.is_fips_mode(session_key), False)
            except SplunkdConnectionException:
                pass
        else:
            self.skipTest(
                'Skipping test since Splunk authentication data is not available'
            )
Esempio n. 11
0
def _session_key():
    """
    :return: A session key for calls to Splunk functions.
    """
    if os.isatty(sys.stdout.fileno()):
        print('Script running outside of splunkd process. Getting new sessionKey.')
        splunk_username = raw_input('Splunk Username: '******'Splunk Password: ')
        key = auth.getSessionKey(splunk_username, splunk_password)
    else:
        key = sys.stdin.readline().strip()

    if not key:
        sys.stderr.write("Did not receive a session key from splunkd. Please enable passAuth in inputs.conf")

    return key
    def handleParameters(self):
        self.local_path = None
        query = self.request['query']
        #check required parameters
        if 'sid' not in query:
            raise ReportError(400, "No sid specified")
        if 'dst' not in query:
            raise ReportError(400, "No dst specified")

        self.kerberos_principal = query.get('kerberos_principal', None)

        # get parameters
        self.sid = query.get('sid')
        self.dst = query.get('dst')
        self.output_mode = query.get('output_format', 'raw')
        self.offset = query.get('offset', 0)
        self.count = query.get('count', 0)
        self.field_list = query.get('field_list', '*')

        # this is mainly for testing purposes
        if self.sessionKey == None or len(self.sessionKey) == 0:
            username = query.get('username', 'admin')
            password = query.get('password', 'changeme')
            self.sessionKey = auth.getSessionKey(username, password)

        #validate and cast parameters
        self.count = self.getUnsignedInt(self.count, 'count')
        self.offset = self.getUnsignedInt(self.offset, 'offset')

        if not self.dst.startswith('hdfs://'):
            raise ReportError(400, "dst must start with hdfs://")

        if self.output_mode not in ['raw', 'csv', 'json', 'xml']:
            raise ReportError(
                400,
                "output_mode is not supported, must be one of these: raw, csv, json, xml"
            )

        try:
            if self.field_list != '*':
                self.field_list = toList(self.field_list)
        except:
            raise ReportError(
                400,
                "field_list must be either a comma separated field list or a *"
            )
Esempio n. 13
0
def make_ta_for_indexers(username, password):
    '''
    Splunk_TA_ForIndexers spl generation for ES 4.2.0 and up
    There are now three versions of ES we're now supporting (changes to makeIndexTimeProperties have been made over different versions).
    The try/except blocks below are meant to handle the differences in function signature.
    '''
    if not username or not password:
        raise Exception("Splunk username and password must be defined.")
    sys.path.append(make_splunkhome_path(['etc', 'apps', 'SA-Utils', 'bin']))
    session_key = auth.getSessionKey(username, password)
    from app_maker.make_index_time_properties import makeIndexTimeProperties
    success = False
    try:
        spec = {}
        spec["include_indexes"] = include_indexes
        spec["include_properties"] = include_properties
        spec.update()
        archive = makeIndexTimeProperties(spec, session_key)
        success = True
    except TypeError:
        #Some versions have a change that consolidated app_info, namespace, and include_indexes,
        #and added include_properties.
        #Below code is written to handle older versions.
        pass
    if success:
        print archive
        assert archive.startswith(spl_location)
        return
    try:
        #second-newest version compatible code
        archive = makeIndexTimeProperties(
            app_info,
            session_key,
            include_indexes=include_indexes,
            imported_apps_only=imported_apps_only,
            namespace=namespace)
    except TypeError:
        #Some versions have a change that removed the kwarg imported_apps_only
        #For older versions, we'll still need to use the imported_apps_only arg, so that's why we
        #do this second
        archive = makeIndexTimeProperties(app_info,
                                          session_key,
                                          include_indexes=include_indexes,
                                          namespace=namespace)
    print archive
    assert archive.startswith(spl_location)
Esempio n. 14
0
def user_check(ko_value):
    try:
        username = input('Enter username with admin privileges: ')
        password = getpass.getpass('Enter password: '******'change':
            new_owner = ko_value[4]
            if new_owner:
                userlist = auth.getUser(name=new_owner)
                if not userlist:
                    print('New owner ' + new_owner + ' not found in splunk')
                    sys.exit(1)
        return session_key
    except:
        raise
Esempio n. 15
0
def make_ta_for_indexers_6xx(username, password):
    '''
    Splunk_TA_ForIndexers spl generation for ES 6.0.0 and up
    For this we'll use the rest endpoints
    '''

    uri = '/services/data/appmaker/makeapp'
    APP_INFO_DICT = {
        "app": "Splunk_TA_ForIndexers",
        "label": "Splunk App For Indexers",
        "version": "1.0.0",
        "build": "0"
    }
    INCLUDE_INDEXES = True
    INCLUDE_PROPERTIES = True

    SESSION_KEY = auth.getSessionKey(username, password)
    spec = {
        '_app': APP_INFO_DICT,
        'include_indexes': INCLUDE_INDEXES,
        'include_properties': INCLUDE_PROPERTIES
    }
    postargs = {
        'spec': json.dumps(spec),
        'routine': 'make_index_time_properties:makeIndexTimeProperties'
    }

    unused_r, c = rest.simpleRequest(
        uri,
        sessionKey=SESSION_KEY,
        postargs=postargs,
    )

    archive = make_splunkhome_path([
        'etc',
        'apps',
        json.loads(c)['namespace'],
        'local',
        'data',
        'appmaker',
        json.loads(c)['filename']
    ])
    print(archive)
Esempio n. 16
0
def _session_key():
    """
    :return: A session key for calls to Splunk functions.
    """
    if os.isatty(sys.stdout.fileno()):
        print(
            'Script running outside of splunkd process. Getting new sessionKey.'
        )
        splunk_username = raw_input('Splunk Username: '******'Splunk Password: ')
        key = auth.getSessionKey(splunk_username, splunk_password)
    else:
        key = sys.stdin.readline().strip()

    if not key:
        sys.stderr.write(
            "Did not receive a session key from splunkd. Please enable passAuth in inputs.conf"
        )

    return key
Esempio n. 17
0
        def testDuplicateEventtypePriority(self):
            sessionKey = auth.getSessionKey('admin', 'changeme')
            job = search.dispatch('| windbag | eval eventtype="testeventtype"',
                                  sessionKey=sessionKey)
            time.sleep(1)
            event = job.results[0]
            conf = splunk.bundle.getConf('event_renderers',
                                         sessionKey=sessionKey,
                                         namespace='search')

            conf.beginBatch()
            conf['event_renderer_test1']['eventtype'] = 'testeventtype'
            conf['event_renderer_test1']['priority'] = 300
            conf['event_renderer_test1']['css_class'] = 'testclass1'
            conf['event_renderer_test2']['eventtype'] = 'testeventtype'
            conf['event_renderer_test2']['priority'] = 400
            conf['event_renderer_test2']['css_class'] = 'testclass2'
            conf.commitBatch()
            custom = Custom(namespace='search')
            renderer = custom.getRenderer(event.fields)
            self.assertEquals(renderer.get('eventtype'), 'testeventtype')
            self.assertEquals(renderer.get('priority'), 400)
            self.assertEquals(renderer.get('template'),
                              '//results/EventsViewer_default_renderer.html')
            self.assertEquals(renderer.get('css_class'), 'testclass2')

            conf.beginBatch()
            conf['event_renderer_test1']['eventtype'] = 'testeventtype'
            conf['event_renderer_test1']['priority'] = 500
            conf['event_renderer_test1']['css_class'] = 'testclass1'
            conf['event_renderer_test2']['eventtype'] = 'testeventtype'
            conf['event_renderer_test2']['priority'] = 400
            conf['event_renderer_test2']['css_class'] = 'testclass2'
            conf.commitBatch()
            custom = Custom(namespace='search')
            renderer = custom.getRenderer(event.fields)
            self.assertEquals(renderer.get('eventtype'), 'testeventtype')
            self.assertEquals(renderer.get('priority'), 500)
            self.assertEquals(renderer.get('template'),
                              '//results/EventsViewer_default_renderer.html')
            self.assertEquals(renderer.get('css_class'), 'testclass1')
Esempio n. 18
0
    def validate_authentication(self, username, password, handler):
        """
        This is called to authenticate the user.
        """

        self.logger.info("Asking to authenticate, username=%s", username)

        # See if the user account is valid
        try:
            session_key = getSessionKey(username=username, password=password)
        except SplunkAuthenticationFailed:
            self.logger.info("Failed to authenticate, username=%s", username)
            raise AuthenticationFailed("Authentication failed")

        # See that capabilities the user has
        capabilities = self.getCapabilities4User(username, session_key)

        # Make a list of the perms
        perms = []

        for capability in self.CAPABILITY_MAP:
            if capability in capabilities:
                perms.append(self.CAPABILITY_MAP[capability])

        perm_string = self.combine_capabilities(perms)

        # Stop if the user doesn't have permission
        if len(perms) == 0:
            self.logger.info(
                "User lacks capabilities (needs ftp_read, ftp_write or " +
                "ftp_full_control), username=%s", username)

            raise AuthenticationFailed(
                "User does not have the proper capabilities " +
                "(needs ftp_read, ftp_write or ftp_full_control)")

        # Add the user
        self.logger.info("User authenticated, username=%s, perm=%s", username,
                         perm_string)
        self.add_user(username, '', self.ftp_path, perm=perm_string)
Esempio n. 19
0
    def getCacheIDForMailbox(self, box):
        if not self.noCache:

            #If we are here it means we have to extract the last used UID from splunk...
            import splunk.auth as au
            import splunk.search as se
            import splunk
            import httplib2
            import time
            import string

            if self.splunkxpassword:
                try:
                    p = subprocess.Popen(
                        'openssl bf -d -a -pass file:%s' %
                        (os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'auth',
                                      'splunk.secret')),
                        shell=True,
                        stdin=subprocess.PIPE,
                        stdout=subprocess.PIPE)
                    self.splunkpassword = p.communicate(self.splunkxpassword +
                                                        '\n')[0]
                except Exception, e:
                    if self.debug:
                        logging.error(e)
                        print traceback.print_exc(file=sys.stderr)
                    raise ConfigError('Could not decrypt splunkxpassword')

            logging.debug("decrypted splunk password")

            splunk.mergeHostPath(self.splunkHostPath, True)
            try:
                key = au.getSessionKey(self.splunkuser, self.splunkpassword)
            except httplib2.ServerNotFoundError, e:
                raise LoginError("Unable to find the server at %s" %
                                 self.splunkHostPath)
Esempio n. 20
0
        def testDuplicateEventtypePriority(self):
            sessionKey = auth.getSessionKey('admin', 'changeme')
            job = search.dispatch('| windbag | eval eventtype="testeventtype"', sessionKey=sessionKey)
            time.sleep(1)
            event = job.results[0]
            conf = splunk.bundle.getConf('event_renderers', sessionKey=sessionKey, namespace='search')

            conf.beginBatch()
            conf['event_renderer_test1']['eventtype'] = 'testeventtype'
            conf['event_renderer_test1']['priority'] = 300
            conf['event_renderer_test1']['css_class'] = 'testclass1'
            conf['event_renderer_test2']['eventtype'] = 'testeventtype'
            conf['event_renderer_test2']['priority'] = 400
            conf['event_renderer_test2']['css_class'] = 'testclass2'
            conf.commitBatch()
            custom = Custom(namespace='search')
            renderer = custom.getRenderer(event.fields)
            self.assertEquals(renderer.get('eventtype'), 'testeventtype')
            self.assertEquals(renderer.get('priority'), 400)
            self.assertEquals(renderer.get('template'), '//results/EventsViewer_default_renderer.html')
            self.assertEquals(renderer.get('css_class'), 'testclass2')

            conf.beginBatch()
            conf['event_renderer_test1']['eventtype'] = 'testeventtype'
            conf['event_renderer_test1']['priority'] = 500
            conf['event_renderer_test1']['css_class'] = 'testclass1'
            conf['event_renderer_test2']['eventtype'] = 'testeventtype'
            conf['event_renderer_test2']['priority'] = 400
            conf['event_renderer_test2']['css_class'] = 'testclass2'
            conf.commitBatch()
            custom = Custom(namespace='search')
            renderer = custom.getRenderer(event.fields)
            self.assertEquals(renderer.get('eventtype'), 'testeventtype')
            self.assertEquals(renderer.get('priority'), 500)
            self.assertEquals(renderer.get('template'), '//results/EventsViewer_default_renderer.html')
            self.assertEquals(renderer.get('css_class'), 'testclass1')
 def setUp(self):
     self.adminSessionKey = auth.getSessionKey("admin", "changeme")
     self.userName = createTestUser(tz="Chile/EasterIsland",
                                    sessionKey=self.adminSessionKey)
     self.sessionKey = auth.getSessionKey(self.userName, "changeme")
Esempio n. 22
0
    Chef.update(obj, 10, 'class', 'sleep')

    print "\nPY:   ", obj.toPY()

    print "FIND: 2", Chef.find(obj, obj, 2)
    print "FIND: 4", Chef.find(obj, obj, 4)

    print "UPDATE: 4 DO NOTHING", Chef.update(obj, 4, 'class', 'python')
    print "PY:   ", obj.toPY()

    print "UPDATE: 4", Chef.update(obj, 4, 'code', 'foobar')
    print "UPDATE: 4", Chef.update(obj, 4, 'class', 'parallel')
    print "PY:   ", obj.toPY()
    return

    print "FIND: 5", Chef.find(obj, obj, 5)

    print "DELETE: 2", Chef.delete(obj, 2)
    print "PY:   ", obj.toPY()
    print "DELETE: 5", Chef.delete(obj, 5)
    print "PY:   ", obj.toPY()

    #recipe.run({'person':'elvis'}, True)


if __name__ == '__main__':
    import splunk.auth as auth
    auth.getSessionKey('admin', 'changeme')

    test2()
Esempio n. 23
0
 def setUp(self):
    auth.getSessionKey('admin', 'changeme')
    if not hasattr(cherrypy, 'session'):
       setattr(cherrypy, 'session', {'user':{'name':'admin'}})
Esempio n. 24
0
    def get_appfile_url(self, appid, version, platform, splunk_version):
        return '%s/static/app/splunk_app_shared_components/repo/%s.spl' % (self.ex_host_path, self.build_appname(appid, version, platform, splunk_version))
        
    '''
    remove the entry(stanza) named appname in the appsrepo.conf
    '''
    def remove_repo_app_desc (self, appname):
        app = self.m_repoapps.get(appname)
        app.delete()
"""



if __name__ == "__main__":
    host_path = 'https://localhost:8089'
    sessionKey = auth.getSessionKey('admin', 'monday', host_path)
    
    splunkd = Splunkd(host_path, sessionKey)
    templates = splunkd.get_templates()
    
    '''
    for t in templates:
        napp = splunkd.create_app_from_template('ap-%s' % t.name.value, t.name.value)
        print napp.name.value
        print napp.href.value
    '''

    apps = splunkd.get_installed_apps()
    
    for app in apps:
        print app.name.value
Esempio n. 25
0
 def setUp(self):
     """
  init stuff like getting the session key for requests
  """
     self.sessionKey = auth.getSessionKey(username='******',
                                          password='******')
Esempio n. 26
0
 def __init__(self, username=None, password=None, session_key=None):
     self.session_key = session_key if session_key else auth.getSessionKey(
         username, password)
Esempio n. 27
0
 def setUp(self):
    """
    init stuff like getting the session key for requests
    """
    self.sessionKey = auth.getSessionKey(username='******', password='******')
 def setUp(self):
     self.adminSessionKey = auth.getSessionKey("admin", "changeme")
     self.userName = createTestUser(tz="Chile/EasterIsland", sessionKey=self.adminSessionKey)
     self.sessionKey = auth.getSessionKey(self.userName, "changeme")
Esempio n. 29
0
 def setUp(self):
     auth.getSessionKey('admin', 'changeme')
     if not hasattr(cherrypy, 'session'):
         setattr(cherrypy, 'session', {'user': {'name': 'admin'}})
Esempio n. 30
0
    for ele in job.events:
        count += 1
    job.cancel()

    assert count == 3, fail_msg % count
    print ok_msg

# -------------------------
# -------------------------
if __name__ == '__main__':

    import splunk.auth as au
    import splunk.search
    
    splunk.mergeHostPath('localhost:8089', True)
    key = au.getSessionKey('admin','changeme')
    
    raw_data = """Apr 29 19:11:54  AAA\nApr 29 19:12:54  BBB\nApr 29 19:13:54  CCC\n"""
    
    # ------------------------------- #
    # test simple receivers endpoint  #
    # ------------------------------- #
    resp = submit(raw_data, sourcetype='http-receivers', index='default', source='http-test', hostname='simple-receivers-test')
    print 'insertion for simple receivers complete...querying splunk...waiting 60 seconds...'

    try:
        _get_final_count('simple-receivers-test', key, 'inserted 3 events via simple receivers end point, but found %d', 'insert via simple receivers endpoint - OK')
    except AssertionError, e:
        #test failed, continue to next
        print e
    
Esempio n. 31
0
    def getCacheIDForMailbox(self, box):
        if not self.noCache:

            #If we are here it means we have to extract the last used UID from splunk...
            import splunk.auth as au
            import splunk.search as se
            import splunk
            import httplib2
            import time
            import string

            if self.splunkxpassword:
                try:
                    p = subprocess.Popen(
                        'openssl bf -d -a -pass file:"%s"' %
                        (os.path.join(os.environ['SPLUNK_HOME'], 'etc', 'auth',
                                      'splunk.secret')),
                        shell=True,
                        stdin=subprocess.PIPE,
                        stdout=subprocess.PIPE)
                    self.splunkpassword = p.communicate(self.splunkxpassword +
                                                        '\n')[0]
                except Exception as e:
                    if self.debug:
                        logging.error(e)
                        print(traceback.print_exc(file=sys.stderr))
                    raise ConfigError('Could not decrypt splunkxpassword')

            logging.debug("decrypted splunk password")

            splunk.mergeHostPath(self.splunkHostPath, True)
            try:
                key = au.getSessionKey(self.splunkuser, self.splunkpassword)
            except httplib2.ServerNotFoundError as e:
                raise LoginError("Unable to find the server at %s" %
                                 self.splunkHostPath)
            except Exception as e:
                raise LoginError(
                    "userid/password combination for splunk user is invalid..."
                )

            if not key:
                raise LoginError(
                    "userid/password combination for splunk user is invalid..."
                )

            if box[0] == "'" or box[0] == '"':
                ss = 'search index=mail mailbox=' + box + ' | head 1 | stats max(Date)'
            else:
                ss = 'search index=mail mailbox="' + box + '" | head 1 | stats max(Date)'

            job = se.dispatch(ss, sessionKey=key)

            start = datetime.datetime.now()

            logging.debug("dispatched search = " + ss)
            logging.debug(
                "dispatched job to splunk through the REST API. Waiting for response..."
            )

            while not job.isDone:
                time.sleep(1)
                logging.debug("*** waiting ")
                now = datetime.datetime.now()
                #if (now - start).seconds > self.timeout:
                if int((now - start).seconds) > int(self.timeout):
                    logging.debug(
                        "REST response took more than %s seconds, timing out...using default UID of 0 i.e. same as noCache",
                        self.timeout)
                    break

            #if we have caching on, and we run this for the first time, the result will not have any key like UID
            #Hence it will throw a KeyError or IndexError. Just ignore that error and return 0
            try:
                retVal = str(job.results[0]['max(Date)'])
                logging.debug(" got back " + str(retVal))
            except Exception as e:
                logging.debug(str(e))
                logging.debug(" mailbox was empty ")
                retVal = ""

            job.cancel()

            return retVal

        else:
            return ""
Esempio n. 32
0
def execute():
    
    results = []
    try:
        results, dummyresults, settings = si.getOrganizedResults()

        keywords, options = si.getKeywordsAndOptions()
        settings.update(options)

        sessionKey = settings.get("sessionKey", None)
        if TESTING and sessionKey == None:
            sessionKey = auth.getSessionKey('admin', 'changeme')
        owner      = settings.get("owner", None)
        namespace  = settings.get("namespace", "search")
        scriptname = settings.get("script", None)
        prerun_str = settings.get("prerun", "True").lower()
        prerun     = prerun_str.startswith('t') or prerun_str.startswith('y') or prerun_str.startswith('1')

        log("sessionKey %s owner %s namespace %s script %s prerun %s" % (sessionKey, owner, namespace, scriptname, prerun))
        
        if scriptname == None:
            raise Exception('"script" value required')
        if ".." in scriptname or "/" in scriptname or "\\" in scriptname:
            raise Exception('pathname cannot contain cannot contain "..", "/", or "\\".')
        home = si.splunkHome()
        localpath = os.path.join('etc', 'apps', namespace, 'scripts', scriptname + ".ss")
        pathname = os.path.join(home, localpath)
        if not os.path.exists(pathname):
            raise Exception('script path does not exist: "%s"' % os.path.join("SPLUNK_HOME", localpath))

        log("pathname %s" % (pathname))

        real_stdout = sys.stdout          
        if CAN_STREAM_RESULTS_ANY_TIME:
            # output results immediately to stdout            
            result_stream = sys.stdout  
        else:
            # output results once all done
            result_stream = StringIO.StringIO()

        # capture debugging stdout to StringIO, but have real stdout used for outputting results as streamed
        sys.stdout = StringIO.StringIO()
        
        script = scripting.Script(sessionKey, owner, namespace, path=pathname, prerunfix=prerun, outputstream=result_stream)
        side_effects = script.run()

        
        log("side_effects %s" % (side_effects))

        # output non-results -- variables and print statements from scripts
        sys.stdout.flush()
        messages = {}
        si.addInfoMessage(messages, "Variable values: %s" % side_effects)
        si.addInfoMessage(messages, "Standard output: %s" % sys.stdout.getvalue())

        # reset stdout
        sys.stdout = real_stdout
        OUTPUT_MSGS = True
        if OUTPUT_MSGS:
            # si.outputResults(None, messages)
            for level, messages in messages.items():
                for msg in messages:
                    print "%s=%s" % (level, normalizeMsg(msg))
            print

        # we haven't output results yet.  do it now.
        if not CAN_STREAM_RESULTS_ANY_TIME:
            result_stream.flush()
            print result_stream.getvalue()


    except Exception, e:
        sys.stdout = real_stdout        
        import traceback
        msg = "%s. Traceback: %s" % (e, traceback.format_exc())
        log("error %s" % msg)
        si.generateErrorResults(msg)
Esempio n. 33
0
 def testGetEntityWithHostPath(self):
     '''Test getting an entity using a host_path'''
     sessionKey = auth.getSessionKey('admin', 'changeme')
     manager = SplunkRESTManager(self.TestModel, sessionKey=sessionKey)
     manager.get(id='services/apps/local/search', host_path="%s://%s:%s" % (splunk.getDefault('protocol'), splunk.getDefault('host'), splunk.getDefault('port')))    
Esempio n. 34
0
def execute():

    results = []
    try:
        results, dummyresults, settings = si.getOrganizedResults()

        keywords, options = si.getKeywordsAndOptions()
        settings.update(options)

        sessionKey = settings.get("sessionKey", None)
        if TESTING and sessionKey == None:
            sessionKey = auth.getSessionKey('admin', 'changeme')
        owner = settings.get("owner", None)
        namespace = settings.get("namespace", "search")
        scriptname = settings.get("script", None)
        prerun_str = settings.get("prerun", "True").lower()
        prerun = prerun_str.startswith('t') or prerun_str.startswith(
            'y') or prerun_str.startswith('1')

        log("sessionKey %s owner %s namespace %s script %s prerun %s" %
            (sessionKey, owner, namespace, scriptname, prerun))

        if scriptname == None:
            raise Exception('"script" value required')
        if ".." in scriptname or "/" in scriptname or "\\" in scriptname:
            raise Exception(
                'pathname cannot contain cannot contain "..", "/", or "\\".')
        home = si.splunkHome()
        localpath = os.path.join('etc', 'apps', namespace, 'scripts',
                                 scriptname + ".ss")
        pathname = os.path.join(home, localpath)
        if not os.path.exists(pathname):
            raise Exception('script path does not exist: "%s"' %
                            os.path.join("SPLUNK_HOME", localpath))

        log("pathname %s" % (pathname))

        real_stdout = sys.stdout
        if CAN_STREAM_RESULTS_ANY_TIME:
            # output results immediately to stdout
            result_stream = sys.stdout
        else:
            # output results once all done
            result_stream = StringIO.StringIO()

        # capture debugging stdout to StringIO, but have real stdout used for outputting results as streamed
        sys.stdout = StringIO.StringIO()

        script = scripting.Script(sessionKey,
                                  owner,
                                  namespace,
                                  path=pathname,
                                  prerunfix=prerun,
                                  outputstream=result_stream)
        side_effects = script.run()

        log("side_effects %s" % (side_effects))

        # output non-results -- variables and print statements from scripts
        sys.stdout.flush()
        messages = {}
        si.addInfoMessage(messages, "Variable values: %s" % side_effects)
        si.addInfoMessage(messages,
                          "Standard output: %s" % sys.stdout.getvalue())

        # reset stdout
        sys.stdout = real_stdout
        OUTPUT_MSGS = True
        if OUTPUT_MSGS:
            # si.outputResults(None, messages)
            for level, messages in messages.items():
                for msg in messages:
                    print "%s=%s" % (level, normalizeMsg(msg))
            print

        # we haven't output results yet.  do it now.
        if not CAN_STREAM_RESULTS_ANY_TIME:
            result_stream.flush()
            print result_stream.getvalue()

    except Exception, e:
        sys.stdout = real_stdout
        import traceback
        msg = "%s. Traceback: %s" % (e, traceback.format_exc())
        log("error %s" % msg)
        si.generateErrorResults(msg)
    for ele in job.events:
        count += 1
    job.cancel()

    assert count == 3, fail_msg % count
    print ok_msg

# -------------------------
# -------------------------
if __name__ == '__main__':

    import splunk.auth as au
    import splunk.search
    
    splunk.mergeHostPath('localhost:8089', True)
    key = au.getSessionKey('admin','changeme')
    
    raw_data = """Apr 29 19:11:54  AAA\nApr 29 19:12:54  BBB\nApr 29 19:13:54  CCC\n"""
    
    # ------------------------------- #
    # test simple receivers endpoint  #
    # ------------------------------- #
    resp = submit(raw_data, sourcetype='http-receivers', index='default', source='http-test', hostname='simple-receivers-test')
    print 'insertion for simple receivers complete...querying splunk...waiting 60 seconds...'

    try:
        _get_final_count('simple-receivers-test', key, 'inserted 3 events via simple receivers end point, but found %d', 'insert via simple receivers endpoint - OK')
    except AssertionError, e:
        #test failed, continue to next
        print e
    
Esempio n. 36
0
    print "FIND: 2", Chef.find(obj, obj, 2)
    print "FIND: 4", Chef.find(obj, obj, 4)

    print "UPDATE: 4 DO NOTHING", Chef.update(obj, 4, 'class', 'python')    
    print "PY:   ", obj.toPY()
    
    print "UPDATE: 4", Chef.update(obj, 4, 'code', 'foobar')
    print "UPDATE: 4", Chef.update(obj, 4, 'class', 'parallel')
    print "PY:   ", obj.toPY()
    return

    
    print "FIND: 5", Chef.find(obj, obj, 5)

    print "DELETE: 2", Chef.delete(obj, 2)
    print "PY:   ", obj.toPY()
    print "DELETE: 5", Chef.delete(obj, 5)
    print "PY:   ", obj.toPY()
    
    #recipe.run({'person':'elvis'}, True)

   
if __name__ == '__main__':
    import splunk.auth as auth
    auth.getSessionKey('admin', 'changeme')


    

    test2()
Esempio n. 37
0
    class TestParse(unittest.TestCase):

        _sessionKey = auth.getSessionKey('admin', 'changeme')
        _hostPath = splunk.mergeHostPath()

        # searches
        q = {
            'single': "search foo bar baz",
            'two': "search quux | diff position1=1 position2=2",
            'quotes': 'search twikiuser="******" | diff position1=1 position2=2',
        }

        def testCreateClause(self):
            """ Test the creation of new clause objects """

            clause1 = ParsedClause()
            clause1.command = "search"
            clause1.args = "foo"
            self.assertEquals(clause1.serialize(), "search foo")

            # python dicts (the structure in which args are stored in the ParsedClause class)
            # no longer maintain determinate ordering.
            # therefore, the output of this test can be either
            #   search index="_audit" foo bar baz, or
            #   search foo bar baz index="_audit"
            # both are identical searches
            clause2 = ParsedClause()
            clause2.command = "search"
            clause2.args = {'index': '_audit', 'search': "foo bar baz"}
            clause2String = clause2.serialize()
            self.assertTrue(
                clause2String == 'search index="_audit" foo bar baz'
                or clause2String == 'search foo bar baz index="_audit"')

            clause3 = ParsedClause(command="search", args="quux")
            self.assertEquals(clause3.serialize(), 'search quux')

            clause4 = ParsedClause(command="loglady")
            self.assertEquals(clause4.serialize(), 'loglady')

        def testEqualsOperatorClause(self):
            """ Test the equals operator in ParsedClause """

            # two clauses, including kv's that should be ignored in the compare, string case
            clause1 = ParsedClause()
            clause1.command = "search"
            clause1.args = "foo readlevel=2"
            clause2 = ParsedClause()
            clause2.command = "search"
            clause2.args = "foo index=default"
            self.assert_(clause1 == clause2)

            # two clauses, including kv's that should be ignored in the compare, dict case
            clause3 = ParsedClause()
            clause3.command = "search"
            clause3.args = {"index": "_internal", "user": "******"}
            clause4 = ParsedClause()
            clause4.command = "search"
            clause4.args = {
                "index": "_internal",
                "user": "******",
                "readlevel": "2"
            }
            self.assert_(clause3 == clause4)

            # two clauses, including kv's that should be not ignored in the compare, string case
            clause5 = ParsedClause()
            clause5.command = "search"
            clause5.args = "foo readlevel=11"
            clause6 = ParsedClause()
            clause6.command = "search"
            clause6.args = "foo index=default"
            self.failIf(clause5 == clause6)

            # test indiv clauses pulled out of ParsedSearch
            search1 = parseSearch(self.q['two'],
                                  hostPath=self._hostPath,
                                  sessionKey=self._sessionKey)
            search2 = parseSearch(self.q['two'],
                                  hostPath=self._hostPath,
                                  sessionKey=self._sessionKey)
            self.assert_(search1.clauses[1] == search2.clauses[1])

        def testEqualsOperatorSearch(self):
            """ Test the equals operator in ParsedSearch """

            ps1 = parseSearch(self.q['single'],
                              hostPath=self._hostPath,
                              sessionKey=self._sessionKey)
            ps2 = parseSearch(self.q['single'],
                              hostPath=self._hostPath,
                              sessionKey=self._sessionKey)
            self.assert_(ps1 == ps2)

            ps3 = parseSearch(self.q['single'],
                              hostPath=self._hostPath,
                              sessionKey=self._sessionKey)
            ps4 = parseSearch(self.q['two'],
                              hostPath=self._hostPath,
                              sessionKey=self._sessionKey)
            self.assert_(ps3 != ps4)

        def testParseOneClause(self):
            """ Test the parsing of a single clause search """

            ps = parseSearch(self.q['single'],
                             hostPath=self._hostPath,
                             sessionKey=self._sessionKey)

            self.assertEquals(len(ps.clauses), 1)
            self.assertEquals(ps.clauses[0].command, 'search')
            self.assertEquals(ps.clauses[0].serialize(), 'search foo bar baz')
            self.assert_(ps.clauses[0].properties['streamType'] == 'SP_STREAM')

        def testParseTwoClause(self):
            """ Test the parsing of a single clause search """

            ps = parseSearch(self.q['two'],
                             hostPath=self._hostPath,
                             sessionKey=self._sessionKey)

            self.assertEquals(len(ps.clauses), 2)
            self.assertEquals(ps.clauses[0].command, 'search')
            self.assertEquals(ps.clauses[1].command, 'diff')
            self.assertEquals(normalizeListArgs(ps.clauses[0].args['search']),
                              'quux')
            self.assertEquals(normalizeListArgs(ps.clauses[1].args),
                              'position1=1 position2=2')
            print "PROPS:", ps.clauses[1].properties
            self.assertEquals(ps.clauses[1].properties['streamType'],
                              'SP_EVENTS')

        def testSerialize(self):
            """ Test search serialization/tostring"""

            ps = parseSearch(self.q['single'],
                             hostPath=self._hostPath,
                             sessionKey=self._sessionKey)
            self.assertEquals(str(ps), self.q['single'])

            ps = parseSearch(self.q['two'],
                             hostPath=self._hostPath,
                             sessionKey=self._sessionKey)
            self.assertEquals(str(ps), self.q['two'])

            ps = parseSearch(self.q['quotes'],
                             hostPath=self._hostPath,
                             sessionKey=self._sessionKey)
            self.assertEquals(str(ps), self.q['quotes'])

            indexSearch = 'search index="_audit"'
            ps = parseSearch(indexSearch,
                             hostPath=self._hostPath,
                             sessionKey=self._sessionKey)
            self.assertEquals(str(ps), indexSearch)

        def testJsonable(self):
            """ Test JSONable """
            ps = parseSearch(self.q['single'],
                             hostPath=self._hostPath,
                             sessionKey=self._sessionKey)
            print "\n\t", json.dumps(ps.jsonable())

            ps = parseSearch(self.q['quotes'],
                             hostPath=self._hostPath,
                             sessionKey=self._sessionKey)
            print "\t", json.dumps(ps.jsonable())

        def test_chartSerializer(self):
            """ Test chart serialization """
            pc = ParsedClause()

            cases = {
                'chart sum(events) by hello,world': {
                    'xfield':
                    'hello',
                    'stat-specifiers': [{
                        'function': 'sum',
                        'field': 'events',
                        'rename': 'sum(events)'
                    }],
                    'seriesfield':
                    'world'
                },
                'chart sum(events),count by hello,world': {
                    'xfield':
                    'hello',
                    'seriesfield':
                    'world',
                    'stat-specifiers': [{
                        'function': 'sum',
                        'field': 'events',
                        'rename': 'sum(events)'
                    }, {
                        'function': 'count',
                        'rename': 'count'
                    }]
                },
                'timechart sum(events) by world': {
                    'xfield':
                    '_time',
                    'stat-specifiers': [{
                        'function': 'sum',
                        'field': 'events',
                        'rename': 'sum(events)'
                    }],
                    'seriesfield':
                    'world'
                },
                'timechart sum(events),count by hello': {
                    'xfield':
                    '_time',
                    'seriesfield':
                    'hello',
                    'stat-specifiers': [{
                        'function': 'sum',
                        'field': 'events',
                        'rename': 'sum(events)'
                    }, {
                        'function': 'count',
                        'rename': 'count'
                    }]
                },
                'timechart span="1d" sum(events) by world': {
                    'xfield':
                    '_time',
                    'stat-specifiers': [{
                        'function': 'sum',
                        'field': 'events',
                        'rename': 'sum(events)'
                    }],
                    'seriesfield':
                    'world',
                    'span':
                    '1d'
                },
                'timechart bins=5 sum(events) by world': {
                    'xfield':
                    '_time',
                    'stat-specifiers': [{
                        'function': 'sum',
                        'field': 'events',
                        'rename': 'sum(events)'
                    }],
                    'seriesfield':
                    'world',
                    'bins':
                    5
                },
            }
            for k, v in cases.items():
                command = k.split()[0]
                out = str(
                    ParsedClause(None, command,
                                 v))  #out = pc._chartingSerializer(command, v)
                if out != k:
                    print "\n\nINPUT: ", v
                    print "GOAL:  ", k
                    print "OUTPUT:", out
                    self.assertEquals(k, out)
Esempio n. 38
0
def execute():
    results = []
    try:
        results, dummyresults, settings = si.getOrganizedResults()

        # default values
        args = {'namespace': 'search'}
        # get commandline args
        keywords, options = si.getKeywordsAndOptions()
        # override default args with settings from search kernel
        args.update(settings)
        # override default args with commandline args
        args.update(options)

        sessionKey = args.get("sessionKey", None)
        owner = args.get("owner", 'admin')
        namespace = args.get("namespace", None)

        if namespace.lower() == "none":
            namespace = None

        messages = {}

        if sessionKey == None:
            # this shouldn't happen, but it's useful for testing.
            try:
                sessionKey = sa.getSessionKey('admin', 'changeme')
                si.addWarnMessage(
                    messages,
                    "No session given to 'tune' command. Using default admin account and password."
                )
            except splunk.AuthenticationFailed, e:
                si.addErrorMessage(messages,
                                   "No session given to 'tune' command.")
                return

        if len(keywords) != 1:
            usage()

        # e.g., '/data/inputs/monitor'
        entity = keywords[0]
        logger.info("Entity: %s Args: %s" % (entity, args))

        results = []  # we don't care about incoming results
        try:
            entitys = en.getEntities(entity,
                                     sessionKey=sessionKey,
                                     owner=owner,
                                     namespace=namespace,
                                     count=-1)
            for name, entity in entitys.items():
                try:
                    myapp = entity["eai:acl"]["app"]
                    if namespace != None and myapp != namespace:
                        continue
                except:
                    continue  # if no eai:acl/app, filter out
                result = entityToResult(name, entity)
                results.append(result)
        except splunk.ResourceNotFound, e2:
            pass