Exemplo n.º 1
0
def extract_special_event_types(requests):
    """
    Return list of pairs (event_type, requests)
    that should be registered as one of aggregating event types: ows:all, other,
    """
    out = []

    ows_et = requests.exclude(event_type__isnull=True)\
                     .filter(event_type__name__startswith='OWS:')\
                     .exclude(event_type__name=EventType.EVENT_OWS)\
                     .distinct('event_type')\
                     .values_list('event_type', flat=True)
    ows_rq = requests.filter(event_type__in=ows_et)
    ows_all = EventType.get(EventType.EVENT_OWS)
    out.append((
        ows_all,
        ows_rq,
    ))

    nonows_et = requests.exclude(event_type__isnull=True)\
                        .exclude(event_type__name__startswith='OWS:')\
                        .exclude(event_type__name=EventType.EVENT_OTHER)\
                        .distinct('event_type')\
                        .values_list('event_type', flat=True)
    nonows_rq = requests.filter(event_type__in=nonows_et)
    nonows_all = EventType.get(EventType.EVENT_OTHER)
    out.append((
        nonows_all,
        nonows_rq,
    ))

    return out
Exemplo n.º 2
0
    def get_metrics_data(self, metric_name,
                         valid_from,
                         valid_to,
                         interval,
                         service=None,
                         label=None,
                         user=None,
                         resource=None,
                         resource_type=None,
                         event_type=None,
                         service_type=None,
                         group_by=None):
        """
        Returns metric values for metric within given time span
        """
        params = {}
        col = 'mv.value_num'
        agg_f = self.get_aggregate_function(col, metric_name, service)
        has_agg = agg_f != col
        group_by_map = {'resource': {'select': ['mr.id', 'mr.type', 'mr.name', 'mr.resource_id'],
                                     'from': ['join monitoring_monitoredresource mr on (mv.resource_id = mr.id)'],
                                     'where': ['and mv.resource_id is not NULL'],
                                     'order_by': None,
                                     'grouper': ['resource', 'name', 'type', 'id', 'resource_id'],
                                     },
                        # for each resource get the number of unique labels
                        'resource_on_label': {'select_only': ['mr.id', 'mr.type', 'mr.name', 'mr.resource_id',
                                                              'count(distinct(ml.name)) as val',
                                                              'count(1) as metric_count',
                                                              'sum(samples_count) as samples_count',
                                                              'sum(mv.value_num), min(mv.value_num)',
                                                              'max(mv.value_num)', ],
                                              'from': [('join monitoring_monitoredresource mr '
                                                        'on (mv.resource_id = mr.id)')],
                                              'where': ['and mv.resource_id is not NULL'],
                                              'order_by': ['val desc'],
                                              'group_by': ['mr.id', 'mr.type', 'mr.name'],
                                              'grouper': ['resource', 'name', 'type', 'id', 'resource_id'],
                                              },
                        # for each resource get the number of unique users
                        'resource_on_user': {'select_only': ['mr.id', 'mr.type', 'mr.name', 'mr.resource_id',
                                                             'count(distinct(ml.user)) as val',
                                                             'count(1) as metric_count',
                                                             'sum(samples_count) as samples_count',
                                                             'sum(mv.value_num), min(mv.value_num)',
                                                             'max(mv.value_num)', ],
                                             'from': [('join monitoring_monitoredresource mr '
                                                       'on (mv.resource_id = mr.id)')],
                                             'where': ['and mv.resource_id is not NULL'],
                                             'order_by': ['val desc'],
                                             'group_by': ['mr.id', 'mr.type', 'mr.name'],
                                             'grouper': ['resource', 'name', 'type', 'id', 'resource_id'],
                                             },
                        'event_type': {'select_only': ['ev.name as event_type', 'count(1) as val',
                                                       'count(1) as metric_count',
                                                       'sum(samples_count) as samples_count',
                                                       'sum(mv.value_num), min(mv.value_num)',
                                                       'max(mv.value_num)', ],
                                       'from': ['join monitoring_eventtype ev on (ev.id = mv.event_type_id)',
                                                ('join monitoring_monitoredresource mr '
                                                 'on (mv.resource_id = mr.id)')],
                                       'where': [],
                                       'order_by': ['val desc'],
                                       'group_by': ['ev.name'],
                                       'grouper': [],
                                       },
                        'event_type_on_label': {'select_only': ['ev.name as event_type',
                                                                'count(distinct(ml.name)) as val',
                                                                'count(1) as metric_count',
                                                                'sum(samples_count) as samples_count',
                                                                'sum(mv.value_num), min(mv.value_num)',
                                                                'max(mv.value_num)', ],
                                                'from': ['join monitoring_eventtype ev on (ev.id = mv.event_type_id)',
                                                         ('join monitoring_monitoredresource mr '
                                                          'on (mv.resource_id = mr.id)')],
                                                'where': [],
                                                'order_by': ['val desc'],
                                                'group_by': ['ev.name'],
                                                'grouper': [],
                                                },
                        # group by user: number of unique user
                        'user': {'select_only': [('count(distinct(ml.user)) as val, '
                                                  'count(1) as metric_count, sum(samples_count) as samples_count, '
                                                  'sum(mv.value_num), min(mv.value_num), max(mv.value_num)')],
                                 'from': [('join monitoring_monitoredresource mr '
                                           'on (mv.resource_id = mr.id)')],
                                 # 'from': [], do we want to retrieve also events not related to a monitored resource?
                                 'where': ['and ml.user is not NULL'],
                                 'order_by': ['val desc'],
                                 'group_by': [],
                                 'grouper': [],
                                 },
                        # number of labels for each user
                        'user_on_label': {'select_only': ['ml.user as user, count(distinct(ml.name)) as val, '
                                                          'count(1) as metric_count',
                                                          'sum(samples_count) as samples_count',
                                                          'sum(mv.value_num), min(mv.value_num)',
                                                          'max(mv.value_num)', ],
                                          'from': [('join monitoring_monitoredresource mr '
                                                    'on (mv.resource_id = mr.id)')],
                                          'where': ['and ml.user is not NULL'],
                                          'order_by': ['val desc'],
                                          'group_by': ['ml.user'],
                                          'grouper': [],
                                          },
                        # group by label
                        'label': {'select_only': [('count(distinct(ml.name)) as val, '
                                                   'count(1) as metric_count, sum(samples_count) as samples_count, '
                                                   'sum(mv.value_num), min(mv.value_num), max(mv.value_num)')],
                                  'from': [('join monitoring_monitoredresource mr '
                                            'on (mv.resource_id = mr.id)')],
                                  'where': [],  # ["and mv.resource_id is NULL or (mr.type = '')"],
                                  'order_by': ['val desc'],
                                  'group_by': [],
                                  'grouper': [],
                                  },
                        }

        q_from = ['from monitoring_metricvalue mv',
                  'join monitoring_servicetypemetric mt on (mv.service_metric_id = mt.id)',
                  'join monitoring_metric m on (m.id = mt.metric_id)',
                  'join monitoring_metriclabel ml on (mv.label_id = ml.id) ']
        q_where = ['where', " ((mv.valid_from >= TIMESTAMP %(valid_from)s AT TIME ZONE 'UTC' ",
                   "and mv.valid_to < TIMESTAMP %(valid_to)s AT TIME ZONE 'UTC') ",
                   "or (mv.valid_from > TIMESTAMP %(valid_from)s AT TIME ZONE 'UTC' ",
                   "and mv.valid_to <= TIMESTAMP %(valid_to)s AT TIME ZONE 'UTC')) ",
                   'and m.name = %(metric_name)s']
        q_group = ['ml.name']
        params.update({'metric_name': metric_name,
                       'valid_from': valid_from.strftime('%Y-%m-%d %H:%M:%S'),
                       'valid_to': valid_to.strftime('%Y-%m-%d %H:%M:%S')})

        q_order_by = ['val desc']

        q_select = [('select ml.name as label, {} as val, '
                     'count(1) as metric_count, sum(samples_count) as samples_count, '
                     'sum(mv.value_num), min(mv.value_num), max(mv.value_num)').format(agg_f)]
        if service and service_type:
            raise ValueError(
                "Cannot use service and service type in the same query")
        if service:
            q_where.append('and mv.service_id = %(service_id)s')
            params['service_id'] = service.id
        elif service_type:
            q_from.append('join monitoring_service ms on '
                          '(ms.id = mv.service_id and ms.service_type_id = %(service_type_id)s ) ')
            params['service_type_id'] = service_type.id

        if group_by not in ('event_type', 'event_type_on_label',) and event_type is None:
            event_type = EventType.get(EventType.EVENT_ALL)

        if event_type and metric_name not in ['uptime', ]:
            q_where.append(' and mv.event_type_id = %(event_type)s ')
            params['event_type'] = event_type.id

        if label:
            q_where.append(' and ml.id = %(label)s')
            params['label'] = label.id
        # if not group_by and not resource:
        #     resource = MonitoredResource.get('', '', or_create=True)

        if resource and has_agg:
            q_group.append('mr.name')
            # group returned columns into a dict
            # config in grouping map: target_column = {source_column1: val, ...}

        if label and has_agg:
            q_group.extend(['ml.name'])

        grouper = None
        if group_by:
            group_by_cfg = group_by_map[group_by]
            g_sel = group_by_cfg.get('select')
            if g_sel:
                q_select.append(', {}'.format(', '.join(g_sel)))

            g_sel = group_by_cfg.get('select_only')
            if g_sel:
                q_select = ['select {}'.format(', '.join(g_sel))]

            q_from.extend(group_by_cfg['from'])
            q_where.extend(group_by_cfg['where'])
            if group_by_cfg.get('group_by') is not None:
                q_group = group_by_cfg['group_by']
            else:
                q_group.extend(group_by_cfg['select'])
            grouper = group_by_cfg['grouper']

        if resource_type and not resource:
            if not [mr for mr in q_from if 'monitoring_monitoredresource' in mr]:
                q_from.append('join monitoring_monitoredresource mr on mv.resource_id = mr.id ')
            q_where.append(' and mr.type = %(resource_type)s ')
            params['resource_type'] = resource_type

        if resource and group_by in ('resource', 'resource_on_label', 'resource_on_user', ):
            raise ValueError(
                "Cannot use resource and group by resource at the same time")
        elif resource:
            if not [mr for mr in q_from if 'monitoring_monitoredresource' in mr]:
                q_from.append('join monitoring_monitoredresource mr on mv.resource_id = mr.id ')
            q_where.append(' and mr.id = %(resource_id)s ')
            params['resource_id'] = resource.id

        if 'ml.name' in q_group:
            q_select.append(', max(ml.user) as user')
            # q_group.extend(['ml.user']) not needed

        if user:
            q_where.append(' and ml.user = %(user)s ')
            params['user'] = user

        if q_group:
            q_group = [' group by ', ','.join(q_group)]
        if q_order_by:
            q_order_by = 'order by {}'.format(','.join(q_order_by))

        q = ' '.join(chain(q_select, q_from, q_where, q_group, [q_order_by]))

        def postproc(row):
            if grouper:
                t = {}
                tcol = grouper[0]
                for scol in grouper[1:]:
                    if scol == 'resource_id':
                        if scol in row:
                            r_id = row.pop(scol)
                            if 'type' in t and t['type'] != MonitoredResource.TYPE_URL:
                                try:
                                    rb = ResourceBase.objects.get(id=r_id)
                                    t['href'] = rb.detail_url
                                except BaseException:
                                    t['href'] = ""
                    else:
                        t[scol] = row.pop(scol)
                        if scol == 'type' and scol in t and t[scol] == MonitoredResource.TYPE_URL:
                            try:
                                resolve(t['name'])
                                t['href'] = t['name']
                            except Resolver404:
                                t['href'] = ""
                row[tcol] = t
            return row

        return [postproc(row) for row in raw_sql(q, params)]
Exemplo n.º 3
0
 def _get_message(self, data_type):
     """
     Retrieving data querying the MetricValue model
     :param data_type: field mapping to keep only interesting information
     :return: data dictionary
     """
     has_data = False
     # Name of the object read by logstash filter (not used in case of "overview")
     data_name = data_type["name"]
     # Define data HEADER
     data = {
         "format_version": "1.0",
         "data_type": data_name,
         "instance": {
             "name": settings.HOSTNAME,
             "ip": self.client_ip
         },
         "time": {
             "startTime": self._valid_from.isoformat(),
             "endTime": self._valid_to.isoformat()
         }
     }
     # List data container (not used in case of "overview")
     list_data = []
     # For each metric we want to execute a query
     for metric in data_type["metrics"]:
         # Name omitted in hooks when retrieving no-list data (es. "overview")
         is_list = "name" in metric["hooks"]
         group_by = metric["params"]["group_by"] \
             if "params" in metric and "group_by" in metric["params"] \
             else None
         event_type = EventType.get(metric["params"]["event_type"]) \
             if "params" in metric and "event_type" in metric["params"] \
             else None
         # Retrieving data through the CollectorAPI object
         metrics_data = self._collector.get_metrics_data(
             metric_name=metric["name"],
             valid_from=self._valid_from,
             valid_to=self._valid_to,
             interval=self._interval,
             event_type=event_type,
             group_by=group_by)
         if metrics_data:
             # data dictionary updating
             for item in metrics_data:
                 if is_list:
                     name_value = self._build_data(item,
                                                   metric["hooks"]["name"])
                 item_value = {
                     k: self._build_data(item, v)
                     for k, v in metric["hooks"].items()
                 }
                 if "countries" == data_name:
                     try:
                         country_iso_3 = pycountry.countries.get(
                             alpha_3=name_value).alpha_3
                         center = self._get_country_center(country_iso_3)
                         item_value['center'] = center or ''
                     except Exception as e:
                         log.error(str(e))
                 if is_list:
                     try:
                         list_item = filter(
                             lambda l_item: l_item["name"] == name_value,
                             list_data)
                         _idx = list(list_item)[0]
                         i = list_data.index(_idx)
                         list_data[i].update(item_value)
                     except (IndexError, ValueError) as e:
                         list_data.append(item_value)
                 else:
                     data.update(item_value)
                     has_data = True
     if list_data:
         data.update({data_name: list_data})
         has_data = True
     if "extra" in data_type:
         for extra in data_type["extra"]:
             # For each "extra" entry we have to define a "_get_{extra}" method
             data.update({extra: getattr(self, '_get_{}'.format(extra))()})
             has_data = True
     return data if has_data else None