Пример #1
0
 def run(self):
     try:
         results = se.searchAll(self.search, sessionKey=self.sessionKey, namespace=self.namespace, owner=self.user, status_buckets=0, required_field_list='*',
                                auto_finalize_ec=100, 
                                max_count=100, max_time=FIELD_INFO_MAX_TIME,
                                enable_lookups=0, auto_cancel=2*FIELD_INFO_MAX_TIME                                   
                                #exec_mode='blocking', 
                                )
         fieldCounts = {}
         fieldValues = {}
         for result in results:
             for field in result:
                 if ignoredField(field):
                     continue
                 fieldCounts[field] = fieldCounts.get(field, 0) + 1
                 if field not in fieldValues:
                     fieldValues[field] = set()
                 fieldValues[field].add(str(result[field]))
                 
         fields = fieldCounts.keys()
         fields.sort(lambda x, y: (10 * len(fieldValues[y]) + fieldCounts[y]) - (10 * len(fieldValues[x]) + fieldCounts[x]))
         fieldInfo = fields[:FIELD_INFO_MAX_FIELDS]
         # store answer away
         g_field_info_cache[self.key] = fieldInfo
     except:
         pass
     try:
         if self in g_threads:
             g_threads.remove(self)
     except:
         pass
    def sync_macros(self):
        """Summary
            Sync inputs for update macros based on custom indexes.
        """
        logger.info('syncing inputs...')

        # get the snapshot of current inputs from summary index
        inputs_spl = util.get_option_from_conf(self.session_key, 'macros', 'aws-sourcetype-index-summary', 'definition')
        input_list = splunk_search.searchAll(inputs_spl, sessionKey = self.session_key)
        logger.info('%s input(s) in total' % len(input_list))

        for input in input_list:
            index_name = input.get('input_index')[0].value
            sourcetype = input.get('input_sourcetype')[0].value

            # update macros
            if sourcetype in SOURCETYPE_MACRO_MAP:
                macro_stanza = SOURCETYPE_MACRO_MAP[sourcetype]
                util.update_index_macro(self.session_key, macro_stanza, index_name)

        # enable savedsearches
        saved_searches = self.local_service.saved_searches

        for search_name in SCHEDULE_SEARCHES:
            if search_name in saved_searches:
                search = saved_searches[search_name]
                enabled = splunk_util.normalizeBoolean(search.content['is_scheduled'])
                if not enabled:
                    search.update(**{'is_scheduled': 1})

        return 'Macros Update Complete.'
def _is_input_existed(session_key, sourcetype):
    input_spl = util.get_option_from_conf(session_key, 'macros',
                                          'aws-input-summary', 'definition')
    results = search.searchAll('%s | search sourcetype="%s"' %
                               (input_spl, sourcetype),
                               sessionKey=session_key)
    return len(results) > 0
Пример #4
0
    def execute(self):
        index_option_value = util.get_option_from_conf(self.session_key, 'macros', 'aws-description-index', 'definition')

        results = search.searchAll(SPL % (index_option_value), sessionKey = self.session_key)

        recommendations = []
        for result in results:
            temp_arr = str(result).split('=')
            if len(temp_arr) == 2 and temp_arr[0] == 'resourceId':
                recommendations.append({
                    'resource_id': temp_arr[1],
                    'resource_type': 'elb',
                    'ml_dimension': constants.UNUSED_ELB_DIMENSION,
                    'ml_action': constants.DELETE_ACTION,
                    'ml_priority': 1,
                    'feature': [],
                    'timestamp': int(time.time())
                })

        if len(recommendations) > 0:
            self.recommendation_kao.batch_insert_items(recommendations)

        output_message = 'Insert %d unused elbs into kvstore.' % len(recommendations)
        logger.info(output_message)

        return output_message
Пример #5
0
def getSampleEvents(eventtype, args, fast=True):
    results = []

    if eventtype != '':

        if eventtype.strip().startswith("|") or len(shu.getJustCommands(eventtype, None)) > 1:
            raise Exception("Eventtypes cannot contain search commands")
        
        eventtype = eventtype.replace('\\', '\\\\')
        sid = args['sid']
        if fast:
            # try to finalize jobs so that search job can be used with loadjob
            try:
                job = se.getJob(sid)
                job.finalize()
                se.waitForJob(job, MAX_JOB_WAIT) # job isn't ready immediately after finalize is called.
            except Exception, ee:
                pass
            query = "| loadjob %s | search %s | head %s | fields | abstract maxlines=%s " % (sid, eventtype, MAX_SAMPLES, MAX_LINES)
        else:
            query = "search %s | head %s | fields | abstract maxlines=%s " % (eventtype, MAX_SAMPLES, MAX_LINES)

        maxtime = args.get('maxtime', None)
        if maxtime != None:
            # try to use maxtime to get selecteed event at top
            epochmaxtime = splunk.util.dt2epoch(splunk.util.parseISO(maxtime))
            results = se.searchAll(query, latest_time=epochmaxtime, status_buckets=1,
                                   auto_finalize_ec=MAX_SAMPLES,
                                   max_out=MAX_SAMPLES,
                                   max_count=MAX_SAMPLES, max_time=MAX_JOB_WAIT,
                                   enable_lookups=0, auto_cancel=int(1.5*MAX_JOB_WAIT)
                                   )

        # if we got no results, perhaps the job expired.  rerun the search.
        if fast and len(results) == 0:
            return getSampleEvents(eventtype, args, False)
        
        # if not enough events, research without time constraint
        if len(results) < MIN_SAMPLES:
            results = se.searchAll(query, status_buckets=1,
                                   auto_finalize_ec=MAX_SAMPLES,
                                   max_out=MAX_SAMPLES,
                                   max_count=MAX_SAMPLES, max_time=MAX_JOB_WAIT,
                                   enable_lookups=0, auto_cancel=int(1.5*MAX_JOB_WAIT)
                                   )
        results =  [ r.raw.getRaw() for r in results ]
Пример #6
0
    def run(self, workspace = {}, debug=None):
        import splunk.search as se

        q = self.args['search']
        q = substVars(q, workspace)
        workspace['_'] = results = se.searchAll(q, **workspace)
        if debug:
            if 'debug' not in workspace:
                workspace['debug'] = []
            workspace['debug'].append(results[:self.MAX_DEBUG_RESULTS_PER_STEP])
Пример #7
0
    def run(self, workspace={}, debug=None):
        import splunk.search as se

        q = self.args['search']
        q = substVars(q, workspace)
        workspace['_'] = results = se.searchAll(q, **workspace)
        if debug:
            if 'debug' not in workspace:
                workspace['debug'] = []
            workspace['debug'].append(
                results[:self.MAX_DEBUG_RESULTS_PER_STEP])
Пример #8
0
def getSampleEvents(eventtype, args):
    results = []

    if eventtype != '':

        if eventtype.strip().startswith("|") or len(shu.getJustCommands(eventtype, None)) > 1:
            raise Exception("Eventtypes cannot contain search commands")
        
        eventtype = eventtype.replace('\\', '\\\\')
        query = "search %s | head %s | fields | abstract maxlines=%s " % (eventtype, MAX_SAMPLES, MAX_LINES)
        maxtime = args.get('maxtime', None)
        if maxtime != None:
            # try to use maxtime to get selecteed event at top
            epochmaxtime = splunk.util.dt2epoch(splunk.util.parseISO(maxtime))
            results = se.searchAll(query, latest_time=epochmaxtime, status_buckets=1)

        # if not enough events, research without time constraint
        if len(results) < MIN_SAMPLES:
            results = se.searchAll(query, status_buckets=1)

        results =  [ r.raw.getRaw() for r in results ]
    return results
Пример #9
0
def getSampleEvents(sessionKey, sampleSearch, maxtime, messages):
    logger.debug( "SAMPLESEARCH: %s" % sampleSearch)
    if sampleSearch == '':
        return [],[]

    query = "search %s | head %s | abstract maxlines=%s " % (sampleSearch, MAX_SAMPLES, MAX_LINES)
    logger.debug("QUERY: %s" % query)
    logger.debug("MAXTIME: %s" % maxtime)
    results = []
    if maxtime != None:
        # try to use maxtime to get selecteed event at top
        epochmaxtime = splunk.util.dt2epoch(splunk.util.parseISO(maxtime))
        results = se.searchAll(query, sessionKey=sessionKey, latest_time=epochmaxtime, status_buckets=1)
        logger.debug( "RESULTS1: %s" % len(results))
    #addMessage(messages, "ONLY %s events with maxtime = %s '%s'" % (len(results), maxtime, query), CWARN)
    #print "ONLY %s events with maxtime = %s '%s'" % (len(results), maxtime, query)

    # if not enough events, research without time constraint
    if len(results) < MIN_SAMPLES:
        results = se.searchAll(query, sessionKey=sessionKey, status_buckets=1)
        logger.debug( "RESULTS2: %s" % len(results))
        
    return ([ r.raw.getRaw() for r in results ], results)
Пример #10
0
def getSampleEvents(sessionKey, sampleSearch, maxtime, messages):
    logger.debug( "SAMPLESEARCH: %s" % sampleSearch)
    if sampleSearch == '':
        return [],[]

    query = "search %s | head %s | abstract maxlines=%s " % (sampleSearch, MAX_SAMPLES, MAX_LINES)
    logger.debug("QUERY: %s" % query)
    logger.debug("MAXTIME: %s" % maxtime)
    results = []
    if maxtime != None:
        # try to use maxtime to get selecteed event at top
        epochmaxtime = splunk.util.dt2epoch(splunk.util.parseISO(maxtime))
        results = se.searchAll(query, sessionKey=sessionKey, latest_time=epochmaxtime, status_buckets=1)
        logger.debug( "RESULTS1: %s" % len(results))
    #addMessage(messages, "ONLY %s events with maxtime = %s '%s'" % (len(results), maxtime, query), CWARN)
    #print "ONLY %s events with maxtime = %s '%s'" % (len(results), maxtime, query)

    # if not enough events, research without time constraint
    if len(results) < MIN_SAMPLES:
        results = se.searchAll(query, sessionKey=sessionKey, status_buckets=1)
        logger.debug( "RESULTS2: %s" % len(results))
        
    return ([ r.raw.getRaw() for r in results ], results)
    def sync_accounts(self):
        """Summary
            Sync account id, name from add-on and save/update them to a lookup file.
        """
        logger.info('syncing accounts...')

        # get the existed accounts in the lookup file
        updated_account_list = []
        existed_account_keys = {}
        key_pattern = '%s%s'

        if os.path.isfile(ACCOUNT_LOOKUP_PATH):
            with open(ACCOUNT_LOOKUP_PATH) as csv_file:
                reader = csv.DictReader(csv_file)
                for row in reader:
                    account_id = row['account_id']
                    account_name = row['name']

                    key = key_pattern % (account_id, account_name)
                    existed_account_keys[key] = True
                    updated_account_list.append({
                        'account_id': account_id,
                        'name': account_name
                    })

        # check the newest accounts from summary index, append the new ones
        accounts_spl = util.get_option_from_conf(self.session_key, 'macros', 'aws-account-summary', 'definition')
        account_list = splunk_search.searchAll('%s | dedup name, account_id | table name, account_id' % accounts_spl, sessionKey = self.session_key)
        logger.info('%s account(s) in total' % len(account_list))

        is_accounts_changed = False

        for account in account_list:
            account_id = account.get('account_id')[0].value
            account_name = account.get('name')[0].value

            if key_pattern % (account_id, account_name) not in existed_account_keys:
                updated_account_list.append({
                    'account_id': account_id,
                    'name': account_name
                })
                is_accounts_changed = True

        # update lookup file
        if is_accounts_changed:
            util.update_lookup_file(self.session_key, ACCOUNT_LOOKUP_NAME, ACCOUNT_HEADER, updated_account_list)

        return 'Accounts Synchronization Complete.'
Пример #12
0
def getPastSearches(user, sessionKey, namespace):

    bootstrapSearches = []

    try:
        bootsearchlog = make_splunkhome_path(['etc','system','static','bootstrapsearches.txt'])
        lines = utils.readText(bootsearchlog).split('\n')
        bootstrapSearches.extend(lines)
    except:
        logger.warn("Unable to get bootstrap search history")

    userHistory = []
    try:
        # get user's history of searches, ignoring those that didn't return any results
        q = "|history | head %s | search event_count>0 OR result_count>0 | dedup search | table search" % MAX_HISTORY
        results = se.searchAll(q, sessionKey=sessionKey, namespace=namespace, owner=user, spawn_process=False)
        userHistory = [str(r['search']) for r in results]
        if q in userHistory:
            userHistory.remove(q)
    except Exception, e:
        logger.warn("Unable to get search history: %s" % e)
Пример #13
0
    def get_cloudwatch_kpis(self, *metric_names, **time_params):
        """
        Get Cloudwatch data of some metric.
        :param metric_name: Cloudwatch metric name
        :param time_params: a dict, has "earliest_time" and "latest_time" key
        :return: an array of "splunk.search.Result" object
        """
        index_option_value = util.get_option_from_conf(self.session_key, 'macros', 'aws-cloudwatch-index', 'definition')
        spl = constants.CLOUDWATCH_SPL

        metric_name_list = []
        for metric_name in metric_names:
            metric_name_list.append('metric_name="%s"' % metric_name)

        if time_params is None or 'earliest_time' not in time_params:
            time_params['earliest_time'] = 0

        if time_params is None or 'latest_time' not in time_params:
            time_params['latest_time'] = int(time.time())

        results = search.searchAll(spl.format(index = index_option_value, metric_name_conditions = ' OR '.join(metric_name_list)), sessionKey = self.session_key, earliestTime = time_params['earliest_time'], latestTime = time_params['latest_time'])
        return results
def _upgrade_topology(service, session_key, results):
    # get data from topology summary index
    topology_results = search.searchAll(
        'search index=aws_topology_history | head 1', sessionKey=session_key)

    # check Config inputs
    is_input_existed = _is_input_existed(session_key, 'aws:config')

    description = ''

    if len(topology_results) == 0:
        _migrate_topology(service)
        _generate_topology_snapshot(service)
        description += 'Migrated existed topology to summary index. Generated topology snapshot. '

    if is_input_existed:
        _enable_savedsearches(service, TOPOLOGY_SAVEDSEARCHES)
        description += 'Enabled corresponding savedsearches.'

    if description is not '':
        results.append({'name': 'Topology', 'description': description})

    return
    results, dummyresults, settings = intersplunk.getOrganizedResults()
    session_key = settings['sessionKey']

    # generate local service
    service = LocalServiceManager(app=util.APP_NAME,
                                  session_key=session_key).get_local_service()

    if len(sys.argv) == 2:
        lookup_name = sys.argv[1]

        spl = '| inputlookup %s' % lookup_name

        # get search restrictions of current user
        search_restriction_arr = util.get_search_restrictions(session_key)

        if len(search_restriction_arr) > 0:
            search_restrictions = ' OR '.join(search_restriction_arr)
            spl = '%s | search %s' % (spl, search_restrictions)

        util.get_logger().info(spl)

        results = search.searchAll(spl, sessionKey=session_key)

except:
    import traceback
    stack = traceback.format_exc()
    results = intersplunk.generateErrorResults("Error : Traceback: " +
                                               str(stack))
    util.get_logger().error(str(stack))

intersplunk.outputResults(results)