Esempio n. 1
0
    def process_frontends(self, data_frame):
        """Process statistics for frontends.

        Arguments:
            data_frame (obj): A pandas data_frame ready for processing.
        """
        # Filtering for Pandas
        cnt_metrics = 1
        log.debug('processing statistics for frontends')
        is_frontend = data_frame['svname_'] == 'FRONTEND'
        excluded_frontends = []
        metrics = self.config.get('process', 'frontend-metrics', fallback=None)

        if metrics is not None:
            metrics = metrics.split(' ')
        else:
            metrics = FRONTEND_METRICS
        log.debug('metric names for frontends %s', metrics)

        exclude_frontends_file = self.config.get('process',
                                                 'exclude-frontends',
                                                 fallback=None)
        if exclude_frontends_file is not None:
            excluded_frontends = load_file_content(exclude_frontends_file)
            log.info('excluding frontends %s', excluded_frontends)
            # replace dots in frontend names
            excluded_frontends[:] = [x.replace('.', '_')
                                     for x in excluded_frontends]
        filter_frontend = (~data_frame['pxname_']
                           .isin(excluded_frontends))

        frontend_stats = (data_frame[is_frontend & filter_frontend]
                          .loc[:, ['pxname_'] + metrics])

        # Group by frontend name and sum values for each column
        frontend_aggr_stats = frontend_stats.groupby(['pxname_']).sum()
        cnt_metrics += frontend_aggr_stats.size
        for index, row in frontend_aggr_stats.iterrows():
            paths = self.get_metric_paths('frontend', index)
            for i in row.iteritems():
                datapoints = [
                    "{p}.frontend.{f}.{m} {v} {t}\n"
                    .format(p=path,
                            f=index,
                            m=i[0],
                            v=i[1],
                            t=self.timestamp) for path in paths
                ]
                for datapoint in datapoints:
                    dispatcher.signal('send', data=datapoint)

        data = ("{p}.haproxystats.MetricsFrontend {v} {t}\n"
                .format(p=self.graphite_path,
                        v=cnt_metrics,
                        t=self.timestamp))
        dispatcher.signal('send', data=data)
        log.info('number of frontend metrics %s', cnt_metrics)

        log.debug('finished processing statistics for frontends')
Esempio n. 2
0
    def process_frontends(self, data_frame):
        """
        Process statistics for frontends.

        Arguments:
            data_frame (obj): A pandas data_frame ready for processing.
        """
        # Filtering for Pandas
        cnt_metrics = 1
        log.debug('processing statistics for frontends')
        is_frontend = data_frame['svname'] == 'FRONTEND'
        filter_frontend = None
        metrics = self.config.get('process', 'frontend-metrics', fallback=None)

        if metrics is not None:
            metrics = metrics.split(' ')
        else:
            metrics = FRONTEND_METRICS
        log.debug('metric names for frontends %s', metrics)

        exclude_frontends_file = self.config.get('process',
                                                 'exclude-frontends',
                                                 fallback=None)
        if exclude_frontends_file is not None:
            excluded_frontends = load_file_content(exclude_frontends_file)
            if excluded_frontends:  # in case the file is empty
                log.info('excluding frontends %s', excluded_frontends)
                filter_frontend = (~data_frame['pxname']
                                   .isin(excluded_frontends))
        if filter_frontend is not None:
            frontend_stats = (data_frame[is_frontend & filter_frontend]
                              .loc[:, ['pxname'] + metrics])
        else:
            frontend_stats = (data_frame[is_frontend]
                              .loc[:, ['pxname'] + metrics])

        # Group by frontend name and sum values for each column
        frontend_aggr_stats = frontend_stats.groupby(['pxname']).sum()
        cnt_metrics += frontend_aggr_stats.size
        for index, row in frontend_aggr_stats.iterrows():
            name = index.replace('.', '_')
            for i in row.iteritems():
                data = ("{p}.frontend.{f}.{m} {v} {t}\n"
                        .format(p=self.graphite_path,
                                f=name,
                                m=i[0],
                                v=i[1],
                                t=self.timestamp))
                dispatcher.signal('send', data=data)

        data = ("{p}.haproxystats.MetricsFrontend {v} {t}\n"
                .format(p=self.graphite_path,
                        v=cnt_metrics,
                        t=self.timestamp))
        dispatcher.signal('send', data=data)
        log.info('number of frontend metrics %s', cnt_metrics)

        log.debug('finished processing statistics for frontends')
Esempio n. 3
0
    def sites_stats(self, files):
        """
        Process statistics for frontends/backends/servers.

        Arguments:
            files (list): A list of files which contain the output of 'show
            stat' command on the stats socket of HAProxy.
        """
        log.info('processing statistics for sites')
        log.debug('processing files %s', ' '.join(files))
        log.debug('merging multiple csv files to one Pandas data frame')
        data_frame = concat_csv(files)
        filter_backend = None
        if data_frame is not None:
            # Perform some sanitization on the raw data
            if '# pxname' in data_frame.columns:
                log.debug('replace "# pxname" column with  "pxname"')
                data_frame.rename(columns={'# pxname': 'pxname'}, inplace=True)
            if 'Unnamed: 62' in data_frame.columns:
                log.debug('remove "Unnamed: 62" column')
                try:
                    data_frame.drop(labels=['Unnamed: 62'],
                                    axis=1,
                                    inplace=True)
                except ValueError as error:
                    log.warning("failed to drop 'Unnamed: 62' column with: %s",
                                error)

            if not isinstance(data_frame, pandas.DataFrame):
                log.warning('Pandas data frame was not created')
                return
            if len(data_frame.index) == 0:
                log.error('Pandas data frame is empty')
                return

            # For some metrics HAProxy returns nothing, so we replace them
            # with zeros
            data_frame.fillna(0, inplace=True)

            self.process_frontends(data_frame)

            exclude_backends_file = self.config.get('process',
                                                    'exclude-backends',
                                                    fallback=None)
            if exclude_backends_file is not None:
                excluded_backends = load_file_content(exclude_backends_file)
                if excluded_backends:
                    log.info('excluding backends %s', excluded_backends)
                    filter_backend = (~data_frame['pxname']
                                      .isin(excluded_backends))

            self.process_backends(data_frame, filter_backend=filter_backend)
            self.process_servers(data_frame, filter_backend=filter_backend)
            log.info('finished processing statistics for sites')
        else:
            log.error('failed to process statistics for sites')
Esempio n. 4
0
    def sites_stats(self, files):
        """Process statistics for frontends/backends/servers.

        Arguments:
            files (list): A list of files which contain the output of 'show
            stat' command on the stats socket of HAProxy.
        """
        log.info('processing statistics for sites')
        log.debug('processing files %s', ' '.join(files))
        log.debug('merging multiple csv files to one Pandas data frame')
        data_frame = concat_csv(files)
        excluded_backends = []

        if data_frame is not None:
            # Perform some sanitization on the raw data
            if '# pxname' in data_frame.columns:
                log.debug('replace "# pxname" column with  "pxname"')
                data_frame.rename(columns={'# pxname': 'pxname'}, inplace=True)
            if 'Unnamed: 62' in data_frame.columns:
                log.debug('remove "Unnamed: 62" column')
                try:
                    data_frame.drop(labels=['Unnamed: 62'],
                                    axis=1,
                                    inplace=True)
                except ValueError as error:
                    log.warning("failed to drop 'Unnamed: 62' column with: %s",
                                error)
            # Sanitize the values for pxname (frontend's/backend's names) and
            # svname (server's names) columns by replacing dots with
            # underscores because Graphite uses the dot in the namespace.
            data_frame['pxname_'] = (
                data_frame.pxname.apply(lambda value: value.replace('.', '_')))
            data_frame['svname_'] = (
                data_frame.svname.apply(lambda value: value.replace('.', '_')))

            data_frame.drop('pxname', axis=1, inplace=True)
            data_frame.drop('svname', axis=1, inplace=True)

            if not isinstance(data_frame, pandas.DataFrame):
                log.warning('Pandas data frame was not created')
                return
            if len(data_frame.index) == 0:
                log.error('Pandas data frame is empty')
                return

            # For some metrics HAProxy returns nothing, so we replace them
            # with zeros
            data_frame.fillna(0, inplace=True)

            self.process_frontends(data_frame)

            exclude_backends_file = self.config.get('process',
                                                    'exclude-backends',
                                                    fallback=None)
            if exclude_backends_file is not None:
                excluded_backends = load_file_content(exclude_backends_file)
                log.info('excluding backends %s', excluded_backends)
                # replace dots in backend names
                excluded_backends[:] = [
                    x.replace('.', '_') for x in excluded_backends
                ]

            filter_backend = ~data_frame['pxname_'].isin(excluded_backends)

            self.process_backends(data_frame, filter_backend)
            self.process_servers(data_frame, filter_backend)
            log.info('finished processing statistics for sites')
        else:
            log.error('failed to process statistics for sites')