Ejemplo n.º 1
0
    def dump_metric_data(self):
        self.logger.debug("PRINTING CONTENTS OF METRIC DATA")

        # Read the component state metrics and append them to metrics_data dict
        for sev in severities:
            for component_name in self.component_names():
                self.metric_data.append(self.state[component_name +
                                                   sev]['metrics'])

        self.logger.debug(self.metric_data)
        dump_swiftlm_uptime_data(self.metric_data,
                                 self.cache_file_path,
                                 self.logger)
Ejemplo n.º 2
0
 def test_file_truncated(self):
     testdir = tempfile.mkdtemp()
     fake_logger = mock.MagicMock()
     data = json.dumps([{"metric": "foo", "dimensions": {"hostname": "example.com", "fifth": "dimension"}}])
     path = os.path.join(testdir, "junk")
     utility.dump_swiftlm_uptime_data(data, path, logger=fake_logger)
     with open(path) as f:
         self.assertEqual(data, json.load(f))
     # now write a shorter data string to the same file
     data = json.dumps([{"metric": "foo", "dimensions": {"hostname": "timy.com"}}])
     utility.dump_swiftlm_uptime_data(data, path, logger=fake_logger)
     with open(path) as f:
         self.assertEqual(data, json.load(f))
Ejemplo n.º 3
0
    def dump_metric_data(self):
        self.logger.debug("PRINTING CONTENTS OF METRIC DATA")

        # Read the component state metrics and append them to metrics_data dict
        for component_name in self.component_names():
            metric = self.state[component_name]['metrics'].copy()
            # Time is now rather than the time we moved to this state
            metric['timestamp'] = timestamp()
            self.metric_data.append(metric)

        self.logger.debug(self.metric_data)
        dump_swiftlm_uptime_data(self.metric_data, self.cache_file_path,
                                 self.logger)
Ejemplo n.º 4
0
 def test_file_truncated(self):
     testdir = tempfile.mkdtemp()
     fake_logger = mock.MagicMock()
     data = json.dumps([{
         'metric': 'foo',
         'dimensions': {
             'hostname': 'example.com',
             'fifth': 'dimension'
         }
     }])
     path = os.path.join(testdir, 'junk')
     utility.dump_swiftlm_uptime_data(data, path, logger=fake_logger)
     with open(path) as f:
         self.assertEqual(data, json.load(f))
     # now write a shorter data string to the same file
     data = json.dumps([{
         'metric': 'foo',
         'dimensions': {
             'hostname': 'timy.com',
         }
     }])
     utility.dump_swiftlm_uptime_data(data, path, logger=fake_logger)
     with open(path) as f:
         self.assertEqual(data, json.load(f))
Ejemplo n.º 5
0
def run_forever(log_file_name, interval, metric_file, reseller_prefixes,
                logger, monasca_agent_interval):
    """
    The main cycle loop

    :param log_file_name: name of file we are tailing
    :param interval: how often we report metrics
    :param metric_file: file to dump metrics into
    :param reseller_prefixes: list of account prefixes to process
    :param logger: a logger
    """

    logger.info('Starting. Reading from: %s' % log_file_name)
    # Get into sync with wake up interval
    WAKE_UP_TIME = 60
    time.sleep(sleep_interval(interval, time.time(), WAKE_UP_TIME))

    while True:
        try:
            log_tail = LogTailer(log_file_name)
            break
        except IOError as err:
            if err.errno == 2:
                # Log file does not yet exist
                time.sleep(sleep_interval(interval, time.time(), WAKE_UP_TIME))
            else:
                raise err

    cycle = 10
    metric_data = []
    while True:
        try:
            # Sleep until next wake up
            time.sleep(sleep_interval(interval, time.time(), WAKE_UP_TIME))

            # Timestamp means the metrics are measurements of the data
            # gathered in the *last* time interval (i.e., timestamp is the
            # end of the cycle)
            timestamp = time.time()

            # Read lines written to the log since we last read the file
            # and process lines to extract stats.
            stats = AccessStatsRecorder()
            for line in log_tail.lines():
                result = parse_proxy_log_message(
                    line, reseller_prefixes=reseller_prefixes)
                if isinstance(result, dict):
                    stats.record_op(result.get('verb'),
                                    result.get('http_status'),
                                    result.get('bytes_transferred'),
                                    project=result.get('project'),
                                    container=result.get('container'),
                                    obj=result.get('obj'))

            # Convert stats into metric measurements
            total_metrics = make_measurements('swiftlm.access.host.operation.',
                                              stats.get_stats(), timestamp)
            for measurement in total_metrics:
                metric_data.append(measurement)

            # Occasionally, log the totals
            cycle += 1
            if cycle >= 10:
                for metric in total_metrics:
                    logger.info('Metric: %s' % json.dumps(metric))
                cycle = 0

            for project in stats.get_projects():
                project_metrics = make_measurements(
                    'swiftlm.access.host.operation.project.',
                    project.get_stats(),
                    timestamp,
                    dimensions={'tenant_id': project.get_stats().get('name')})
                for measurement in project_metrics:
                    metric_data.append(measurement)

            # Record that we processed data without error
            metric_data.append({
                'metric': 'swiftlm.access.host.operation.status',
                'value': 0,
                'timestamp': timestamp,
                'dimensions': {
                    'service': 'object-storage'
                },
                'value_meta': {
                    'msg': 'OK'
                }
            })
        except Exception as err:  # noqa
            metric_data = []
            metric_data.append({
                'metric': 'swiftlm.access.host.operation.status',
                'value': 2,
                'timestamp': time.time(),
                'dimensions': {
                    'service': 'object-storage'
                },
                'value_meta': {
                    'msg': err
                }
            })

        purge_old_measurements(metric_data, interval, monasca_agent_interval)
        dump_swiftlm_uptime_data(metric_data,
                                 metric_file,
                                 logger,
                                 lock_timeout=2)