def __init__(self, scriptMode, ctxInfo): admin.MConfigHandler.__init__(self, scriptMode, ctxInfo) session_key = self.getSessionKey() service = LocalServiceManager(util.APP_NAME, DEFAULT_OWNER, session_key).get_local_service() self.username = Entity( service, 'authentication/current-context').content['username'] self.kao = KVStoreAccessObject(KVSTORE_NAMESPACE, session_key) self.existed_black_list = json.loads( self.kao.query_items({'username': self.username})) self.shouldAutoList = False
class RecommendationHandler(admin.MConfigHandler): def __init__(self, scriptMode, ctxInfo): admin.MConfigHandler.__init__(self, scriptMode, ctxInfo) self.shouldAutoList = False self.kao = KVStoreAccessObject(recomm_consts.RECOMMENDATION_COLLECTION, self.getSessionKey()) def setup(self): return def handleList(self, confInfo): session_key = self.getSessionKey() if recomm_utils.is_splunk_light(session_key): logger.info('ML lib does not exist in Splunk Light.') raise admin.ArgValidationException('Not supported in Splunk Light') if not recomm_utils.is_ml_lib_included(session_key): logger.info('ML lib does not exist') raise admin.ArgValidationException('ML lib does not exist') results = self.kao.query_items() results = json.loads(results) for result in results: d = confInfo[result['_key']] for key in result.keys(): d.append(key, result[key]) logger.info('list %s recommendation' % (len(results))) return
def __init__(self, service, session_key, kvstore_name=migration_const.KVSTORE_NAMESPACE, conf_name=const.CONF_NAME): """ :param service: used to initialize configuration handler and search jobs :param session_key: used to initialize kvstore handler :param kvstore_name: the name of kv store which stored anomaly detection rule settings :param conf_name: the name of conf file which stored job settings """ self.service = service self.jobs = service.jobs self.conf_manager = AnomalyConfManager(self.service, conf_name) self.kao = KVStoreAccessObject(kvstore_name, session_key)
class BaseTask(object): def __init__(self, session_key = None): self.session_key = session_key self.feedback_kao = KVStoreAccessObject(constants.FEEDBACK_COLLECTION, session_key) self.recommendation_kao = KVStoreAccessObject(constants.RECOMMENDATION_COLLECTION, session_key) def pre_execute(self): """ Executes before the task of machine learning runs. (optional) :return: """ return def post_execute(self): """ Executes after the task of machine learning runs. (optional) :return: """ return def execute(self): """ Executes the task of machine learning. Each task class needs to implement this hook. :return: output message (string) """ raise NotImplementedError('This method needs to be implemented in each task.') def read_feebacks(self, ml_dimension): """ Reads feedbacks from kvstore. :param ml_dimension: dimension for machine learning :return: """ feebacks = self.feedback_kao.query_items({'ml_dimension': ml_dimension}) return feebacks def get_cloudwatch_kpis(self, *metric_names, **time_params): """ Get Cloudwatch data of some metric. :param metric_name: Cloudwatch metric name :param time_params: a dict, has "earliest_time" and "latest_time" key :return: an array of "splunk.search.Result" object """ index_option_value = util.get_option_from_conf(self.session_key, 'macros', 'aws-cloudwatch-index', 'definition') spl = constants.CLOUDWATCH_SPL metric_name_list = [] for metric_name in metric_names: metric_name_list.append('metric_name="%s"' % metric_name) if time_params is None or 'earliest_time' not in time_params: time_params['earliest_time'] = 0 if time_params is None or 'latest_time' not in time_params: time_params['latest_time'] = int(time.time()) results = search.searchAll(spl.format(index = index_option_value, metric_name_conditions = ' OR '.join(metric_name_list)), sessionKey = self.session_key, earliestTime = time_params['earliest_time'], latestTime = time_params['latest_time']) return results
def __init__(self, scriptMode, ctxInfo): admin.MConfigHandler.__init__(self, scriptMode, ctxInfo) self.shouldAutoList = False self.kao = KVStoreAccessObject(recomm_consts.FEEDBACK_COLLECTION, self.getSessionKey()) self.recomm_kao = KVStoreAccessObject(recomm_consts.RECOMMENDATION_COLLECTION, self.getSessionKey())
class RecommActionHandler(admin.MConfigHandler): def __init__(self, scriptMode, ctxInfo): admin.MConfigHandler.__init__(self, scriptMode, ctxInfo) self.shouldAutoList = False self.kao = KVStoreAccessObject(recomm_consts.FEEDBACK_COLLECTION, self.getSessionKey()) self.recomm_kao = KVStoreAccessObject(recomm_consts.RECOMMENDATION_COLLECTION, self.getSessionKey()) def setup(self): for arg in []: self.supportedArgs.addReqArg(arg) for arg in [ARG_RECOMM_ID, ARG_FEEDBACK, ARG_TIMESTAMP]: self.supportedArgs.addOptArg(arg) def handleList(self, confInfo): results = self.kao.query_items({ 'splunk_account': self.userName }, 'timestamp') results = json.loads(results) for result in results: d = confInfo[result['_key']] for key in result.keys(): d.append(key, result[key]) logger.info('list %s recommendation feedbacks' % (len(results))) return def handleEdit(self, confInfo): recomm_id = self.callerArgs[ARG_RECOMM_ID][0] item = self._merge_details(recomm_id) if not item: logger.info('failed to update feedback for %s' % recomm_id) return False logger.info('update feedback %s to %s' % (item[ARG_RESOURCE_ID], item[ARG_FEEDBACK])) return self.kao.update_item_by_key(self.callerArgs.id, item) def handleCreate(self, confInfo): recomm_id = self.callerArgs[ARG_RECOMM_ID][0] item = self._merge_details(recomm_id) if not item: logger.info('failed to create %s feedback for %s' % (self.callerArgs[ARG_FEEDBACK][0], recomm_id)) return False new_item = self.kao.insert_single_item(item) new_item = json.loads(new_item) confInfo[new_item['_key']].append('id', item) logger.info('create feedback for %s' % (item[ARG_RESOURCE_ID])) return new_item def _merge_details(self, recomm_id): item = {} try: item = json.loads(self.recomm_kao.get_item_by_key(recomm_id)) except: logger.info('failed to find recomm_item %s' % recomm_id) return False feedback_item = {} for key in [ARG_RESOURCE_ID, ARG_DIMENSION, ARG_FEATURE, ARG_PRIORITY, ARG_ACTION]: feedback_item[key] = item[key] feedback_item[ARG_RECOMM_ID] = recomm_id feedback_item['splunk_account'] = self.userName feedback_item[ARG_FEEDBACK] = self.callerArgs[ARG_FEEDBACK][0] feedback_item[ARG_TIMESTAMP] = int(self.callerArgs[ARG_TIMESTAMP][0]) / 1000 return feedback_item
def __init__(self, session_key = None): self.session_key = session_key self.feedback_kao = KVStoreAccessObject(constants.FEEDBACK_COLLECTION, session_key) self.recommendation_kao = KVStoreAccessObject(constants.RECOMMENDATION_COLLECTION, session_key)
class KVStoreToConfMigrationManager(object): """ Handle anomaly detection settings migration from kvstore to configuration file and data from "aws_anomaly_detection" index to "main" index with "anomaly detection" source """ def __init__(self, service, session_key, kvstore_name=migration_const.KVSTORE_NAMESPACE, conf_name=const.CONF_NAME): """ :param service: used to initialize configuration handler and search jobs :param session_key: used to initialize kvstore handler :param kvstore_name: the name of kv store which stored anomaly detection rule settings :param conf_name: the name of conf file which stored job settings """ self.service = service self.jobs = service.jobs self.conf_manager = AnomalyConfManager(self.service, conf_name) self.kao = KVStoreAccessObject(kvstore_name, session_key) def migrate(self): anomaly_detection_rules = json.loads(self.kao.query_items()) if len(anomaly_detection_rules) == 0: logger.info('Anomaly Detection Migration: no need to migrate.') return [] # empty kvstore migrate_stanza_name_list = [] # get existed jobs' searches and names from anomaly configuration file anomaly_configs = self.conf_manager.conf.list() searches = [] names = [] for config in anomaly_configs: content = config.content() if migration_const.JOB_NAME in content and migration_const.JOB_SEARCH in content: searches.append(content[migration_const.JOB_SEARCH]) names.append(content[migration_const.JOB_NAME]) for rule in anomaly_detection_rules: # migrate from kvstore to configuration file stanza_name, settings = self._migrate_settings( rule, searches, names) if stanza_name is None: # if the search is existed, continue logger.info( 'Anomaly Detection Migration: KVstore object with key %s has been migrated before.' % (rule['_key'])) else: # save job and migrate data self.conf_manager.create_stanza(stanza_name, settings) names.append(settings['job_name']) searches.append(settings['job_search']) logger.info( 'Anomaly Detection Migration: KVstore object with key %s has been migrated to conf file with stanza name %s' % (rule['_key'], stanza_name)) # migrate from "aws_anomaly_detection" index to "summary" index job = self._migrate_data(rule, stanza_name) logger.info( 'Anomaly Detection Migration: data migration with job sid %s and isFail %s' % (job['sid'], job['isFailed'])) # clear kvstore self.kao.delete_item_by_key(rule['_key']) migrate_stanza_name_list.append(stanza_name) return [x for x in migrate_stanza_name_list if x is not None] def _migrate_settings(self, rule, searches, names): is_billing = rule['category'] == 'billing' spl = migration_const.KVSTORE_TO_CONF_SPL[rule['category']][ rule['granularity']] service_display_name = 'all' if rule['service'] == '*' else rule[ 'service'] response = '' if is_billing else rule['parameters'].split('=')[1] search = spl.format(rule['anomalyAccount'], rule['service'], service_display_name, response) if search in searches: return None, None granularity_display_name = 'daily' if rule[ 'granularity'] == 'd' else 'hourly' response_display_name = 'response=all' if response == '*' else 'response=' + response if is_billing: response_display_name = '' job_name = '{0} {1} {2} {3} {4}'.format(rule['category'], rule['anomalyAccount'], service_display_name, granularity_display_name, response_display_name) train = '10' + rule['granularity'] schedule = 'Hourly' if rule['granularity'] == 'h' else 'Daily' return str(uuid.uuid1()), { 'job_name': job_name, 'job_train': train, 'job_schedule': schedule, 'job_priority': migration_const.DEFAULT_PRIORIY, 'job_search': search, 'job_mode': migration_const.DEFAULT_MODE } def _migrate_data(self, rule, job_id): service = 'all' if rule['service'] == '*' else rule['service'] parameters_regex = 'response=\*' if rule['parameters'].find( '*') >= 0 else rule['parameters'] migrate_data_spl = migration_const.DATA_MIGRATE_SPL.format( '{0}_{1}_{2}'.format(rule['anomalyAccount'], rule['category'], rule['service']), rule['granularity'], parameters_regex, job_id, service, const.INDEX, const.SOURCE_TYPE) job = self.jobs.create(migrate_data_spl) return job
class WarningMessageHandler(admin.MConfigHandler): def __init__(self, scriptMode, ctxInfo): admin.MConfigHandler.__init__(self, scriptMode, ctxInfo) session_key = self.getSessionKey() service = LocalServiceManager(util.APP_NAME, DEFAULT_OWNER, session_key).get_local_service() self.username = Entity( service, 'authentication/current-context').content['username'] self.kao = KVStoreAccessObject(KVSTORE_NAMESPACE, session_key) self.existed_black_list = json.loads( self.kao.query_items({'username': self.username})) self.shouldAutoList = False def setup(self): for arg in [ARG_PAGE_ROOTS]: self.supportedArgs.addOptArg(arg) def handleRemove(self, confInfo): page_remove = self.callerArgs.id page_root_remove = None for root in PAGE_ROOT_MAP: if page_remove in PAGE_ROOT_MAP[root]: page_root_remove = root break if page_root_remove is None: return # if not exist, insert directly if len(self.existed_black_list) == 0: self.kao.insert_single_item({ 'username': self.username, 'blacklist': [page_root_remove] }) # else, update else: black_list = self.existed_black_list[0]['blacklist'] if page_root_remove not in black_list: self.kao.update_item_by_key( self.existed_black_list[0]['_key'], { 'username': self.username, 'blacklist': black_list + [page_root_remove] }) return def handleList(self, confInfo): results = confInfo['user_pages'] page_roots = [] # search black list from kvstore self.existed_black_list = json.loads( self.kao.query_items({'username': self.username})) if len(self.existed_black_list) > 0: page_roots = self.existed_black_list[0]['blacklist'] results.append('page_roots', page_roots) pages = [] for root in page_roots: pages += PAGE_ROOT_MAP[root] results.append('pages', pages) return def handleCreate(self, confInfo): if self.callerArgs[ARG_PAGE_ROOTS][0] is None: page_roots = [] else: page_roots = self.callerArgs[ARG_PAGE_ROOTS][0].split(',') # if not exist, insert directly if len(self.existed_black_list) == 0: self.kao.insert_single_item({ 'username': self.username, 'blacklist': page_roots }) # else, update else: self.kao.update_item_by_key(self.existed_black_list[0]['_key'], { 'username': self.username, 'blacklist': page_roots }) return