Пример #1
0
    def prepare_dynamic_entities_from_static_entities(self, static_entities):
        self.logger.info(
            'Preparing DE entities from static entities, number of entities to migrate: %d'
            % len(static_entities))
        for entity in static_entities:
            collector_name = entity['collectors'][0]['name']
            entity_class = self.entity_classes[collector_name]
            unhashed_key_parts = [
                entity['dimensions'][identifier_dimension]
                for identifier_dimension in entity_class.identifier_dimensions
            ]
            unhashed_key_parts.append(collector_name)
            entity_key = sha256(to_bytes(
                ':'.join(unhashed_key_parts))).hexdigest()

            now = int(time.time())

            new_entity = EmEntity(
                key=entity_key,
                title=entity['title'],
                entity_class=collector_name,
                mod_time=now,
                expiry_time=now + entity_class.monitoring_window,
                identifier_dimension_names=entity_class.identifier_dimensions,
                dimensions=entity['dimensions'],
            )
            self.new_entities.append(new_entity._raw())
            self.entity_key_mapping[entity['_key']] = new_entity.key
Пример #2
0
    def execute(self, results, payload):
        entity_to_retire_keys = [res['key'] for res in results]

        if len(entity_to_retire_keys):
            try:
                EmEntity.bulk_delete({'_key': entity_to_retire_keys})
                self.service.messages.create(
                    'successful-retirement-%s' % time.time(),
                    severity='info',
                    value='Successfully retired %s inactive entities.' %
                    len(entity_to_retire_keys))
            except Exception as e:
                logger.error('failed to retire entities - error: %s' % e)
Пример #3
0
    def __init__(self, logger, session_key):
        super(StaticEntityToDynamicEntity, self).__init__(logger, session_key)

        server_uri = em_common.get_server_uri()
        self.search_manager = EMSearchManager(
            server_uri=server_uri,
            session_key=self.session_key,
            app=em_constants.APP_NAME,
        )
        self.saved_search_manager = SavedSearchManager(
            session_key=self.session_key,
            server_uri=server_uri,
            app=em_constants.APP_NAME,
        )
        self.static_entity_store = KVStoreManager(
            collection='em_entities',
            server_uri=server_uri,
            session_key=self.session_key,
            app=em_constants.APP_NAME,
        )

        self.entity_classes = {ec.key: ec for ec in EntityClass.load()}
        self.new_entities = []
        self.updated_alerts = {}
        self.entity_key_mapping = {}
        self.existing_dynamic_entities = set(
            [e.key for e in EmEntity.load(0, 0, '', 'asc')])
    def stream(self, records):
        """
        Generator function that processes and yields event records to the Splunk stream pipeline.
        :param records: splunk event records
        :return:
        """
        try:
            # save authtoken into session so that subsequent routines can directly use it
            session.save(authtoken=self._metadata.searchinfo.session_key)

            for record in records:
                if self.retain_input_record:
                    record['group_id'] = 'N/A'
                    record['group_title'] = 'N/A'
                    yield record
                if len(self.groups):
                    for group in self.groups:
                        # TODO: maybe should mark _from_raw a public method?
                        entity = EmEntity._from_raw(record)
                        # check matching groups
                        if group.check_entity_membership(entity):
                            record['group_id'] = group.key
                            record['group_title'] = group.title
                            yield record
                else:
                    yield record
        finally:
            # clear session at the end so it doesn't get persisted across calls otherwise it might
            # accidentally enable unauthorized access.
            session.clear()
Пример #5
0
    def handle_get(self, request, key):
        entity = EmEntity.get(key)
        if not entity:
            raise EntityNotFoundException(
                _('Entity with id %(key)s not found.'))

        correlation_filter = entity.get_correlation_filter()
        response = self.extract_entity_json_response(entity)
        response.update({'correlation_filter': serialize(correlation_filter)})
        return response
Пример #6
0
 def get_entities(self):
     '''
     Get a list of entities that are members of this group
     '''
     return EmEntity.load(
         0,
         0,
         'title',
         'asc',
         query=self.filter.to_dict(key_prefix='dimensions.'))
Пример #7
0
 def handle_load(self, request):
     count = request.query.get('count', 0)
     offset = request.query.get('offset', 0)
     sort_key = request.query.get('sort_key', '')
     sort_dir = request.query.get('sort_dir', 'asc')
     query_dict = self._get_entity_filter_query(request)
     entities = EmEntity.load(count, offset, sort_key, sort_dir, query_dict)
     return [
         self.extract_entity_json_response(entity) for entity in entities
     ]
Пример #8
0
    def handle_metric_data(self, request):
        count = request.query.get('count', 0)
        query = request.query.get('query', '')
        if not query:
            raise EntityArgValidationException(
                _('Missing required query parameter: query'))
        query_params = self._load_valid_metric_data_query(query)
        dimensions = query_params.get('dimensions', {})
        dimensions = {
            'dimensions.{}'.format(key): value
            for key, value in dimensions.items()
        }

        # retrieve filtered entities and transform it for get_avg_metric_val_by_entity()
        filtered_entities = EmEntity.load(count, 0, '', 'asc', dimensions)
        filtered_entities = [{
            "key": entity.key,
            "collectors": [{
                "name": entity.entity_class
            }],
            "dimensions": {
                dim_key: dim_val
                for (dim_key, dim_vals) in entity.dimensions.items()
                for dim_val in dim_vals
            }
        } for entity in filtered_entities]

        # get entity class map of key to title dimension
        entity_classes = EntityClass.load()

        collectors_map = {
            ec.key: {
                'title_dim': ec.title_dimension,
                'id_dims': ec.identifier_dimensions
            }
            for ec in entity_classes
        }

        # run search
        should_execute_search = normalizeBoolean(
            query_params.get('executeSearch', True))
        search_res = self.search_manager.get_avg_metric_val_by_entity(
            execute_search=should_execute_search,
            metric_name=query_params['metric_name'],
            entities=filtered_entities,
            collector_config=collectors_map,
            count=count,
            collection=em_constants.STORE_ENTITY_CACHE)
        response = {res.get('key'): res.get('value')
                    for res in search_res} if isinstance(search_res,
                                                         list) else search_res
        return response
Пример #9
0
    def prepare_itoa_entities(self):
        """
        Check if kvstore is ready, if not simply exit

        If ready, load all SAI entities and convert them into ITOA entities
        :return: itoa_entities
        """
        try:
            em_common.check_kvstore_readiness(session['authtoken'])
        except em_common.KVStoreNotReadyException as e:
            logger.error(
                'Migrate SAI entities to ITSI failed because KVStore is not ready - Error: %s'
                % e)
            sys.exit(1)

        all_entities = EmEntity.load(0, 0, '', 'asc')
        return self._convert_to_itoa(all_entities)
Пример #10
0
    def _add_data_from_kvstore_to_alert(self, alert_data):
        """
        Adds data from the kvstore to the alert
        """
        managed_by_type = alert_data['managed_by_type']
        managed_by_id = alert_data['managed_by_id']
        entity_title = alert_data['entity_title']
        if managed_by_type == ALERT_TYPE_ENTITY:
            entity = EmEntity.get(managed_by_id)
            managed_by_value = entity_title
            dimensions = entity.dimensions
        else:
            group = EMGroup.get(managed_by_id)
            managed_by_value = group.title
            dimensions = group.filter.to_dict()

        alert_data['managed_by_value'] = managed_by_value
        alert_data['dimensions'] = dimensions
        return alert_data
    def update_group_membership(self):
        logger.info('Starting group membership update...')
        # reload all the entities after discovery are done
        all_groups = EMGroup.load(0, 0, '', 'asc')
        all_entities = EmEntity.load(0, 0, '', 'asc')
        data_list = []
        # go through each group and update entities count
        for group in all_groups:
            entities = [en for en in all_entities if group.check_entity_membership(en)]
            entity_status_breakdown = Counter(en.status for en in entities)
            group.entities_count = len(entities)
            group.active_entities_count = entity_status_breakdown.get(EmEntity.ACTIVE, 0)
            group.inactive_entities_count = entity_status_breakdown.get(EmEntity.INACTIVE, 0)
            logger.info('group "%s" metadata - active: %s, inactive: %s, count: %s' % (
                group.title, group.active_entities_count, group.inactive_entities_count, len(entities)
            ))
            data_list.append(group.get_raw_data())

        self.group_store.batch_save(data_list)
        logger.info('Finished group membership update...')
Пример #12
0
 def _apply_changes(self):
     if self.new_entities:
         EmEntity.storage_bulk_save(self.new_entities)
     for k, v in self.updated_alerts.items():
         self.saved_search_manager.update(k, v)
Пример #13
0
 def handle_dimension_summary(self, request):
     query_dict = self._get_entity_filter_query(request)
     dim_summary = EmEntity.get_dimension_summary(query_dict)
     return {'dimensions': dim_summary}
Пример #14
0
 def handle_metadata(self, request):
     query_dict = self._get_entity_filter_query(request)
     metadata = EmEntity.get_metadata(query_dict)
     return metadata
Пример #15
0
 def handle_bulk_delete(self, request):
     query = self._get_entity_filter_query(request)
     exclusion_list = json.loads(request.query.get('exclusion_list', '[]'))
     EmEntity.bulk_delete(query, exclusion_list=exclusion_list)
Пример #16
0
 def handle_delete(self, request, key):
     query = {'_key': [key]}
     EmEntity.bulk_delete(query)
Пример #17
0
    def make_incident_from_alert(self, result, session_key):

        incident = {}
        # name of alert triggered
        alert_name = result['ss_id']
        incident['Alert Name'] = alert_name
        # metric being alerted on
        metric_name = result['metric_name']
        incident['Metric Name'] = metric_name
        # State of metric at time of alert. If metric is info, warn, or critical. This is same as 'message_type'
        alert_state_and_incident_level = SPLUNK_ALERT_CODE_TO_VICTOROPS_INCIDENT_LEVEL[result['current_state']]
        incident['Metric State'] = alert_state_and_incident_level
        # if metric improved or degraded
        state_change = result['state_change']
        incident['Metric State Change'] = state_change

        # value of metric at time of alert
        metric_value = str(round(float(result['current_value']), 1))
        incident['Metric Value'] = metric_value

        # Now setting entity and group specific information
        # Fetching some variables which are necessary in multiple places later
        managed_by_id = result['managed_by_id']
        managed_by_type = result.get('managed_by_type', '')
        entity_title = result.get('entity_title', '')
        aggregation = result.get('aggregation_method', '').lower()
        metric_filters_incl = result.get('metric_filters_incl', '')
        metric_filters_excl = result.get('metric_filters_excl', '')
        split_by = result.get('split_by', '')
        split_by_value = result.get(split_by, '')
        # Split-by identifier dimensions gives no split_by_value but adds entity_title
        if (split_by and not split_by_value):
            split_by_value = entity_title

        if (metric_filters_incl):
            incident['Metric Filters (Inclusive)'] = metric_filters_incl
        if (metric_filters_excl):
            incident['Metric Filters (Exclusive)'] = metric_filters_excl

        # If alert is coming from GROUP...
        if result['managed_by_type'] == ALERT_TYPE_GROUP:
            group = EMGroup.get(managed_by_id)
            filter_dimensions_dict = group.filter.to_dict()
            title = group.title

            filter_dimensions_formatted = EMSendVictorOpsAlertAction._format_filter_dimensions(filter_dimensions_dict)
            workspace_link = self._make_workspace_url(ALERT_TYPE_GROUP, managed_by_id, alert_name)
            incident['Group Triggering Alert'] = title
            incident['Dimensions on Originating Group'] = filter_dimensions_formatted
            incident['Link to Alert Workspace'] = workspace_link
        else:
            # If alert is coming from ENTITY...
            entity = EmEntity.get(managed_by_id)
            title = entity_title

            filter_dimensions_formatted = EMSendVictorOpsAlertAction._format_filter_dimensions(entity.dimensions)
            workspace_link = self._make_workspace_url(ALERT_TYPE_ENTITY, managed_by_id, alert_name)
            incident['Host Triggering Alert'] = entity_title
            incident['Dimensions on Originating Host'] = filter_dimensions_formatted
            incident['Link to Alert Workspace'] = workspace_link

        # Lastly, setting victorops-specific info
        # message_type tells VO whether incident is info, warn, or critical
        incident['message_type'] = alert_state_and_incident_level
        # entity_id is incident's uuid. It lets you update the incident. It has nothing to do with SII entity concept.
        incident['entity_id'] = '%s_%s' % (managed_by_id, metric_name)
        # VO uses message to populate emails, service now tickets, slack etc
        # Group (or entity) split-by alert
        if (split_by != 'None'):
            split_by_clause = (
                ' ({aggregation}) on {managed_by_type}: {title}, {split_by}: '
                '{split_by_value}'
                ).format(
                    managed_by_type=managed_by_type,
                    title=title,
                    split_by=split_by,
                    split_by_value=split_by_value,
                    aggregation=aggregation
                )
        # Entity or group aggregation alert
        else:
            split_by_clause = (
                ' ({aggregation}) on {managed_by_type}: {title}'
                ).format(
                    managed_by_type=managed_by_type,
                    title=title,
                    aggregation=aggregation
                )

        message = '{metric_name} {state_change}s to {metric_value}{split_by_clause}'.format(
            metric_name=metric_name,
            state_change=state_change,
            metric_value=metric_value,
            split_by_clause=split_by_clause
        )
        incident['state_message'] = message
        incident['entity_display_name'] = message
        return incident