def _setup_kv_store(self, handler): # Sets up the Entity KV Store logger.info('Setting up the Entity KV Store...') self.entity_store = EMKVStoreManager( collection=em_constants.STORE_ENTITIES, server_uri=EMCommon.get_server_uri(), session_key=handler.getSessionKey(), app=em_constants.APP_NAME) self.collector_store = EMKVStoreManager( collection=em_constants.STORE_COLLECTORS, server_uri=EMCommon.get_server_uri(), session_key=handler.getSessionKey(), app=em_constants.APP_NAME)
def _setupKVStore(self, handler): # Sets up the KV store from which we will be conducting operations logger.info('Setting up the Groups KV Store...') self.groups_store = EMKVStoreManager( collection=EMConstants.STORE_GROUPS, server_uri=EMCommon.get_server_uri(), session_key=handler.getSessionKey(), app=EMConstants.APP_NAME) self.collector_store = EMKVStoreManager( collection=EMConstants.STORE_COLLECTORS, server_uri=EMCommon.get_server_uri(), session_key=handler.getSessionKey(), app=EMConstants.APP_NAME)
def _setup_kv_store(self, handler): log.info('Setting up the Collector Configuration KV Store...') self.collector_store = EMKVStoreManager( collection=em_constants.STORE_COLLECTORS, server_uri=EMCommon.get_server_uri(), session_key=handler.getSessionKey(), app=em_constants.APP_NAME)
def __init__(self, logger, session_key): super(StaticEntityToDynamicEntity, self).__init__(logger, session_key) server_uri = em_common.get_server_uri() self.search_manager = EMSearchManager( server_uri=server_uri, session_key=self.session_key, app=em_constants.APP_NAME, ) self.saved_search_manager = SavedSearchManager( session_key=self.session_key, server_uri=server_uri, app=em_constants.APP_NAME, ) self.static_entity_store = KVStoreManager( collection='em_entities', server_uri=server_uri, session_key=self.session_key, app=em_constants.APP_NAME, ) self.entity_classes = {ec.key: ec for ec in EntityClass.load()} self.new_entities = [] self.updated_alerts = {} self.entity_key_mapping = {} self.existing_dynamic_entities = set( [e.key for e in EmEntity.load(0, 0, '', 'asc')])
def _setupSearchManager(self, handler): # Sets up the search manager logger.info('Setting up the EM Search Manager...') self.search_manager = EMSearchManager( server_uri=EMCommon.get_server_uri(), session_key=handler.getSessionKey(), app=EMConstants.APP_NAME)
def handleMetricName(self, handler, confInfo): """ Return metric names of the entities """ self._setup_kv_store(handler) search_query = handler.callerArgs.data.get('query', '') if search_query: search_query = self._load_valid_metric_names_query_param( search_query[0]) count = handler.callerArgs.get('count', 0) search_manager = EMSearchManager(EMCommon.get_server_uri(), handler.getSessionKey(), em_constants.APP_NAME) search_results_list = search_manager.get_metric_names_by_dim_names( dimensions=search_query, count=count) metrics_list = [] if search_results_list: for result in search_results_list: single_metric = { result.get('metric_name'): { 'min': result.get('min'), 'max': result.get('max') } } metrics_list.append(single_metric) # Availability should always be the first metric metrics_list.insert( 0, { em_constants.DEFAULT_METRIC_FOR_COLOR_BY: { 'min': '0.00', 'max': '1.00' } }) confInfo['metric_names']['metric_names'] = json.dumps(metrics_list)
def _setup_savedsearch_manager(self): logger.info('Setting up Savedsearch manager...') self.savedsearch_manager = SavedSearchManager( session_key=self.session_key, server_uri=em_common.get_server_uri(), app=em_constants.APP_NAME, system_session_key=self.system_session_key, )
def __init__(self, session_key=''): """ Return EMEntityDiscoveryController object """ self.session_key = session_key self.collector_store = EMKVStoreManager( em_constants.STORE_COLLECTORS, em_common.get_server_uri(), self.session_key, app=em_constants.APP_NAME) self.entity_store = EMKVStoreManager( em_constants.STORE_ENTITIES, em_common.get_server_uri(), self.session_key, app=em_constants.APP_NAME) self.all_collectors = self._get_all_collectors() self.all_entities = self._get_all_entities()
def _build_url(self, name='', query_params=None): query_params = {} if not query_params else query_params query_params.update({'output_mode': 'json'}) url = SAVEDSEARCH_ENDPOINT_TEMPLATE.format( server_uri=em_common.get_server_uri(), app_name=em_constants.APP_NAME, name=urllib.quote(name), query_params=urllib.urlencode(query_params)) return url
def do_additional_setup(self): self.session_key = session['authtoken'] log_level = self.inputs.get('job').get('log_level', 'WARNING') logger.setLevel(log.parse_log_level(log_level)) self.group_store = KVStoreManager( em_constants.STORE_GROUPS, em_common.get_server_uri(), self.session_key, app=em_constants.APP_NAME)
def before_handle(self, request): endpoint = '%s/services/authentication/current-context' % em_common.get_server_uri( ) _resp, content = rest.simpleRequest(endpoint, method='GET', getargs={'output_mode': 'json'}, sessionKey=session['authtoken']) content = json.loads(content) roles = content['entry'][0]['content']['roles'] session.save(roles=roles)
def __init__(self, logger, session_key): super(AlertsMigrationHandler, self).__init__(logger, session_key) self.saved_search_manager = SavedSearchManager( session_key=self.session_key, server_uri=em_common.get_server_uri(), app=em_constants.APP_NAME, ) self.ACL_manager = ACLManager( self.session_key, app=em_constants.APP_NAME, port=getDefault('port'), )
def discover_entities(self): """ Discover entities from identifier dimensions :return: list of entities """ search_manager = EMSearchManager(em_common.get_server_uri(), self.session_key, em_constants.APP_NAME) earliest = '-%ss' % (self.monitoring_calculation_window + self.monitoring_lag) latest = '-%ss' % self.monitoring_lag dims_list = search_manager.get_dimension_names_by_id_dims( predicate=self.source_predicate, id_dims_name=self.identifier_dimensions, earliest=earliest, latest=latest, count=0) dimension_names = [] for dims in dims_list: dimension_names += dims.get('dims', []) # Remove duplicates dimension_names = list(set(dimension_names)) # Filter out black_listed dimensions dimension_names = filter( lambda d: d not in self.blacklisted_dimensions, dimension_names) # | mcatalog values(_dims) doesn't return native splunk dimensions # There are 3 native dimensions: host, source, sourcetype # If user wants to identify entity by those host then this search # won't work. # Hence, we need to add host to the list as dimension. if len(filter(lambda d: d == 'host', dimension_names)) == 0: dimension_names += ['host'] # Get dimension name-value pairs for all entities entities_dimensions_list = search_manager.get_all_dims_from_dims_name( predicate=self.source_predicate, id_dims_name=self.identifier_dimensions, dims_name=dimension_names, earliest=earliest, latest=latest) entities = [] for entity_dimensions in entities_dimensions_list: entities.append(self.get_entity(entity_dimensions)) return entities
def load_filter_by_entity_ids(cls, entity_ids): ''' load groups that contain entities corresponding to the input entity ids :type entity_ids: list of str :param entity_ids: list of entity ids :rtype list :return list of EMGroup objects ''' search_manager = EMSearchManager(em_common.get_server_uri(), session['authtoken'], em_constants.APP_NAME) groups_with_count = search_manager.filter_groups_by_entity_ids( entity_ids) group_keys = list(groups_with_count) if len(group_keys): return EMGroup.load(0, 0, '', 'asc', query={'_key': group_keys}) return []
def handleMetricData(self, handler, confInfo): """ Return metric metadata by entity name """ count = handler.callerArgs.get('count', 0) query_params = handler.callerArgs.data.get('query', '') if not query_params: raise ArgValidationException('Missing required key: query') query_params = self._load_valid_metric_metadata_query(query_params[0]) self._setup_kv_store(handler) dimensions = query_params.get('dimensions', {}) execute_search = normalizeBoolean( query_params.get('executeSearch', True)) reformated_dimensions = dimensions if dimensions: reformated_dimensions = { 'dimensions.{}'.format(key): value for key, value in dimensions.iteritems() } kvstore_query = EMCommon.get_query_from_request_args( json.dumps(reformated_dimensions)) filtered_entities = self._handleListAll( confInfo, fields='_key,dimensions,collectors.name', query_params={'query': kvstore_query}) collectors = self._handleListAllConfigs(confInfo, fields='name,title_dimension') collector_config = { collector.get('name'): collector.get('title_dimension') for collector in collectors } search_manager = EMSearchManager(EMCommon.get_server_uri(), handler.getSessionKey(), em_constants.APP_NAME) search_res = search_manager.get_avg_metric_val_by_entity( execute_search=execute_search, metric_name=query_params['metric_name'], entities=filtered_entities, collector_config=collector_config, count=count) confInfo['metric_data']['metric_data'] = \ json.dumps({ ret.get('key'): ret.get('value') for ret in search_res }) if isinstance(search_res, list) else search_res
def process_payload(self, payload): """ Fetches the search results by using the sid from the payload """ sid = payload.get('sid') search_name = payload.get('search_name') session_key = payload.get('session_key') logging.error( 'INFO custom alert action em_write_alerts triggered, search_name = %s' % search_name) endpoint = EMConstants.SEARCH_RESULTS_ENDPOINT % ( EMCommon.get_server_uri(), EMConstants.APP_NAME, sid) getargs = {'output_mode': 'json', 'count': 0} _, content = rest.simpleRequest(endpoint, session_key, method='GET', getargs=getargs) return json.loads(content)
def process_payload(self, payload): """ Fetches the search results by using the sid from the payload """ self.sid = payload.get('sid') self.search_name = payload.get('search_name') self.session_key = payload.get('session_key') self.namespace = payload.get('namespace', 'splunk_app_infrastructure') logger.info('custom alert action triggered, search_name = %s' % self.search_name) endpoint = em_constants.SEARCH_RESULTS_ENDPOINT % ( EMCommon.get_server_uri(), em_constants.APP_NAME, self.sid) getargs = {'output_mode': 'json', 'count': 0} _, content = rest.simpleRequest(endpoint, self.session_key, method='GET', getargs=getargs) return json.loads(content)
def do_execute(self): """ Implements the `do_execute` method of parent class. It transforms entity classes into savedsearches """ logger.info('Start initializing entity class savedsearches...') entity_classes = self.load_entity_classes() cur_entity_class_keys = set(ec.key for ec in entity_classes) subscribed_entity_class_keys = [] for entity_class_key in cur_entity_class_keys: if has_collector_subscription( server_uri=em_common.get_server_uri(), session_key=session['authtoken'], collector_name=entity_class_key): subscribed_entity_class_keys.append(entity_class_key) ec_savedsearch_managed_by_prefix = '{}:entity_class:'.format( em_constants.APP_NAME) # clean up old savedsearches logger.info('Cleaning up obsolete entity class savedsearches...') for ss in self.service.saved_searches: if ss['alert.managedBy'] and ss['alert.managedBy'].startswith( ec_savedsearch_managed_by_prefix): ss_entity_class_key = ss['alert.managedBy'][ len(ec_savedsearch_managed_by_prefix):] if ss_entity_class_key not in subscribed_entity_class_keys: self.service.saved_searches.delete(ss['name']) logger.info('Deleted savedsearch %s' % ss['name']) # upsert new/updated savedsearches logger.info('Upserting new/updated entity class savedsearches...') for ec in entity_classes: if ec.key not in subscribed_entity_class_keys: continue logger.debug('entity class: %s' % ec.__dict__) try: ec.upsert_savedsearch() logger.info('Upserted savedsearch for entity class %s' % ec.key) except EntityClassInternalException: continue except UnauthenticatedException as e: logger.error(e)
def _generate_cloudwatch_input_request(self, method, data=None, name=None): base_url = '%s/servicesNS/nobody/Splunk_TA_aws/splunk_ta_aws_aws_cloudwatch/%s?%s' headers = { 'Authorization': 'Splunk %s' % self.session_key, 'Content-Type': 'application/json' } # Handle the query params that are passed in server_uri = em_common.get_server_uri() query_params = dict(output_mode='json') query_params['count'] = 1 query_params['offset'] = 0 # Build the URL and make the request url = base_url % (server_uri, name or '', urlencode(query_params)) request = Request(url, to_bytes(urlencode(data)) if data else None, headers=headers) request.get_method = lambda: method return request
def add_fixture(self): """ Add fixture data :return: void """ collector_store = EMKVStoreManager(em_constants.STORE_COLLECTORS, em_common.get_server_uri(), self.session_key, app=em_constants.APP_NAME) collectors = collector_store.load() existing_collector_names = set(c['name'] for c in collectors) all_collector_names = set(c['name'] for c in em_constants.COLLECTORS) common = existing_collector_names.intersection(all_collector_names) # create new collectors for c in em_constants.COLLECTORS: if c['name'] not in common: collector_store.create(key=c['name'], data=c) # delete outdated collectors for c in collectors: if c['name'] not in all_collector_names: collector_store.delete(key=c['name'])
def do_additional_setup(self): log_level = self.inputs.get('job').get('log_level', 'INFO') self.logger = log.getLogger(logger_name=self.name, log_level=log_level) self.check_kvstore_readiness() self.session_key = session['authtoken'] self.dry_run = self.inputs.get('job').get('dry_run', '').lower() in ('1', 'true') server_uri = em_common.get_server_uri() self.splunkd_service = Service( port=getDefault('port'), token=self.session_key, owner='nobody', app=em_constants.APP_NAME, ) self.app_conf_manager = ConfManager(conf_file=APP_CONF_FILE, server_uri=server_uri, session_key=self.session_key, app=em_constants.APP_NAME) self.migration_metadata = MigrationMetadata.get() self.current_version = self.migration_metadata.latest_migrated_version self.new_version = self.app_conf_manager.get_stanza( 'launcher')['entry'][0]['content']['version'] self.data_inputs_controller = NonMigrationDataInputsController() self.savedsearch_controller = SavedsearchController()
def __init__(self, session_key): self._manager = EMSavedSearchManager( server_uri=EMCommon.get_server_uri(), session_key=session_key )
def _setup_savedsearch_manager(self, handler): self.savedsearch_manager = EMSavedSearchManager( server_uri=EMCommon.get_server_uri(), session_key=handler.getSessionKey())
def __init__(self, session_key): self.session_key = session_key self.search_manager = EMSearchManager(em_common.get_server_uri(), self.session_key, em_constants.APP_NAME)
def get_entity(self, entity_dimensions): """ Get entity from metrics idx by identifier dimensions. :param entity_dimensions: All dimensions (including identifier_dimensions) that are associated with a specific entity i.e. { 'host': 'wyoming.sa.com', 'server': 'staging', 'tag': ['USA', 'datagen', 'states'], 'ip': '10.10.0.49', 'os_version': '11.0', 'location': 'north americas', 'os': 'ubuntu' } :return: EMEntity object """ if entity_dimensions is None or self.session_key is None: return None entity_store = EMKVStoreManager(em_constants.STORE_ENTITIES, em_common.get_server_uri(), self.session_key, app=em_constants.APP_NAME) # dimension_names contains identifier_dimension name dimension_names = entity_dimensions.keys() id_dims = { id_dim: entity_dimensions.get(id_dim) for id_dim in self.identifier_dimensions } # This assumes that title_dimension should be an existing dimension entity_title = entity_dimensions.get(self.title_dimension) # If this entity exists then imported_date should be retained _key = em_common.get_key_from_dims(id_dims) entity = entity_store.get(key=_key) current_time = time.time() imported_date = current_time if entity is not None: imported_date = entity['imported_date'] # Define entity dimensions entity_id_dims = None entity_info_dims = None # Merge identifier dimensions if type(self.identifier_dimensions) is list: entity_id_dims = list( set(self.identifier_dimensions) & set(dimension_names)) elif type(self.identifier_dimensions ) is unicode and self.identifier_dimensions == '*': entity_id_dims = dimension_names else: entity_id_dims = [] # Merge informational dimensions if type(self.informational_dimensions) is list: entity_info_dims = list( set(self.informational_dimensions) & set(dimension_names)) elif type(self.informational_dimensions ) is unicode and self.informational_dimensions == '*': entity_info_dims = list(set(dimension_names) - set(entity_id_dims)) else: entity_info_dims = [] # Merge collector information collectors = [] if entity is None else entity.get('collectors', []) try: existing_collector_info_index = [x['name'] for x in collectors ].index(self.name) except ValueError: existing_collector_info_index = -1 if existing_collector_info_index == -1: collectors.append({ 'name': self.name, 'updated_date': current_time }) else: collectors[existing_collector_info_index][ 'updated_date'] = current_time return em_model_entity.EMEntity( title=entity_title, dimensions=entity_dimensions, identifier_dimensions=entity_id_dims, informational_dimensions=entity_info_dims, state='active', imported_date=imported_date, updated_date=current_time, collectors=collectors)