Exemplo n.º 1
0
    def test_filtered_cluster_processes_stats(self, mock_get_current,
                                              mock_fetch, mock_ips_getter,
                                              mock_get_private_ip,
                                              mock_options):
        # Mock appscale_info functions for getting IPs
        mock_get_private_ip.return_value = '192.168.33.10'
        mock_ips_getter.return_value = ['192.168.33.10', '192.168.33.11']
        # Mock secret
        mock_options.secret = 'secret'
        # Read test data from json file
        raw_test_data, stats_test_data = get_stats_from_file(
            'processes-stats.json', process_stats.ProcessesStatsSnapshot)
        # Mock local source
        mock_get_current.return_value = stats_test_data['192.168.33.10']
        # Mock AsyncHTTPClient.fetch using raw stats dictionaries from test data
        response = MagicMock(body=json.dumps(raw_test_data['192.168.33.11']),
                             code=200,
                             reason='OK')
        future_response = gen.Future()
        future_response.set_result(response)
        mock_fetch.return_value = future_response
        #Prepare raw dict with include lists
        raw_include_lists = {
            'process': [
                'monit_name', 'unified_service_name', 'application_id', 'port',
                'cpu', 'memory', 'children_stats_sum'
            ],
            'process.cpu': ['user', 'system', 'percent'],
            'process.memory': ['resident', 'virtual', 'unique'],
            'process.children_stats_sum': ['cpu', 'memory'],
        }

        # ^^^ ALL INPUTS ARE SPECIFIED (or mocked) ^^^
        # Call method under test to get stats with filtered set of fields
        include_lists = IncludeLists(raw_include_lists)
        stats, failures = yield cluster_stats.cluster_processes_stats.get_current(
            max_age=15, include_lists=include_lists)
        self.assertEqual(failures, {})

        # ASSERTING EXPECTATIONS
        request_to_slave = mock_fetch.call_args[0][0]
        self.assertEqual(json.loads(request_to_slave.body), {
            'max_age': 15,
            'include_lists': raw_include_lists,
        })
        self.assertEqual(request_to_slave.url,
                         'http://192.168.33.11:4378/stats/local/processes')
        self.assertDictContainsSubset(request_to_slave.headers,
                                      {'Appscale-Secret': 'secret'})

        local_stats = stats['192.168.33.10']
        slave_stats = stats['192.168.33.11']
        self.assertIsInstance(local_stats,
                              process_stats.ProcessesStatsSnapshot)
        self.assertEqual(len(local_stats.processes_stats), 24)
        self.assertEqual(local_stats.utc_timestamp, 1494248000.0)
        self.assertIsInstance(slave_stats,
                              process_stats.ProcessesStatsSnapshot)
        self.assertEqual(len(slave_stats.processes_stats), 10)
        self.assertEqual(slave_stats.utc_timestamp, 1494248091.0)
Exemplo n.º 2
0
    def get(self):
        if self.request.headers.get(SECRET_HEADER) != options.secret:
            logging.warn("Received bad secret from {client}".format(
                client=self.request.remote_ip))
            self.set_status(HTTP_Codes.HTTP_DENIED, "Bad secret")
            return
        if self.request.body:
            payload = json.loads(self.request.body)
        else:
            payload = {}
        include_lists = payload.get('include_lists')
        newer_than = payload.get('newer_than')

        if include_lists is not None:
            try:
                include_lists = IncludeLists(include_lists)
            except WrongIncludeLists as err:
                logging.warn("Bad request from {client} ({error})".format(
                    client=self.request.remote_ip, error=err))
                json.dump({'error': str(err)}, self)
                self.set_status(HTTP_Codes.HTTP_BAD_REQUEST,
                                'Wrong include_lists')
                return
        else:
            include_lists = self._default_include_lists

        if not newer_than:
            newer_than = (time.mktime(datetime.now().timetuple()) -
                          ACCEPTABLE_STATS_AGE)

        if not self._snapshot or self._snapshot.utc_timestamp <= newer_than:
            self._snapshot = self._stats_source.get_current()

        json.dump(stats_to_dict(self._snapshot, include_lists), self)
Exemplo n.º 3
0
    def get(self):
        if self.request.headers.get(SECRET_HEADER) != options.secret:
            logging.warn("Received bad secret from {client}".format(
                client=self.request.remote_ip))
            self.set_status(HTTP_Codes.HTTP_DENIED, "Bad secret")
            return
        if self.request.body:
            payload = json.loads(self.request.body)
        else:
            payload = {}
        include_lists = payload.get('include_lists')
        newer_than = payload.get('newer_than')

        if include_lists is not None:
            try:
                include_lists = IncludeLists(include_lists)
            except WrongIncludeLists as err:
                logging.warn("Bad request from {client} ({error})".format(
                    client=self.request.remote_ip, error=err))
                json.dump({'error': str(err)}, self)
                self.set_status(HTTP_Codes.HTTP_BAD_REQUEST,
                                'Wrong include_lists')
                return
        else:
            include_lists = self._default_include_lists

        if not newer_than:
            newer_than = (time.mktime(datetime.now().timetuple()) -
                          ACCEPTABLE_STATS_AGE)

        if (not self._default_include_lists
                or include_lists.is_subset_of(self._default_include_lists)):
            # If user didn't specify any non-default fields we can use local cache
            fresh_local_snapshots = {
                node_ip: snapshot
                for node_ip, snapshot in self._snapshots.iteritems()
                if snapshot.utc_timestamp > newer_than
            }
        else:
            fresh_local_snapshots = {}

        new_snapshots_dict, failures = (
            yield self._current_cluster_stats_source.get_current_async(
                newer_than=newer_than,
                include_lists=include_lists,
                exclude_nodes=fresh_local_snapshots.keys()))

        # Put new snapshots to local cache
        self._snapshots.update(new_snapshots_dict)

        # Extend fetched snapshots dict with fresh local snapshots
        new_snapshots_dict.update(fresh_local_snapshots)

        rendered_snapshots = {
            node_ip: stats_to_dict(snapshot, include_lists)
            for node_ip, snapshot in new_snapshots_dict.iteritems()
        }

        json.dump({"stats": rendered_snapshots, "failures": failures}, self)
Exemplo n.º 4
0
  def test_filtered_cluster_node_stats(self, mock_get_current, mock_fetch,
                                       mock_ips_getter, mock_get_private_ip,
                                       mock_options):
    # Mock appscale_info functions for getting IPs
    mock_get_private_ip.return_value = '192.168.33.10'
    mock_ips_getter.return_value = ['192.168.33.10', '192.168.33.11']
    # Mock secret
    mock_options.secret = 'secret'
    # Read test data from json file
    raw_test_data, stats_test_data = get_stats_from_file(
      'node-stats.json', node_stats.NodeStatsSnapshot
    )
    # Mock local source
    mock_get_current.return_value = stats_test_data['192.168.33.10']
    # Mock AsyncHTTPClient.fetch using raw stats dictionaries from test data
    response = MagicMock(body=json.dumps(raw_test_data['192.168.33.11']),
                         code=200, reason='OK')
    future_response = gen.Future()
    future_response.set_result(response)
    mock_fetch.return_value = future_response
    #Prepare raw dict with include lists
    raw_include_lists = {
      'node': ['cpu', 'memory'],
      'node.cpu': ['percent', 'count'],
      'node.memory': ['available']
    }

    # Initialize cluster stats source with include lists
    cluster_stats_source = cluster_stats.ClusterNodesStatsSource()

    # ^^^ ALL INPUTS ARE SPECIFIED (or mocked) ^^^
    # Call method under test to get stats with filtered set of fields
    include_lists = IncludeLists(raw_include_lists)
    stats, failures = yield cluster_stats_source.get_current_async(
      max_age=10, include_lists=include_lists
    )

    # ASSERTING EXPECTATIONS
    request_to_slave = mock_fetch.call_args[0][0]
    self.assertEqual(
      json.loads(request_to_slave.body),
      {
        'max_age': 10,
        'include_lists': raw_include_lists,
      })
    self.assertEqual(
      request_to_slave.url, 'http://192.168.33.11:4378/stats/local/node'
    )
    self.assertDictContainsSubset(
      request_to_slave.headers, {'Appscale-Secret': 'secret'}
    )
    self.assertEqual(failures, {})

    local_stats = stats['192.168.33.10']
    slave_stats = stats['192.168.33.11']
    self.assertIsInstance(local_stats, node_stats.NodeStatsSnapshot)
    self.assertEqual(local_stats.utc_timestamp, 1494248091.0)
    self.assertIsInstance(slave_stats, node_stats.NodeStatsSnapshot)
    self.assertEqual(slave_stats.utc_timestamp, 1494248082.0)
Exemplo n.º 5
0
  def test_filtered_cluster_proxies_stats(self, mock_fetch, mock_ips_getter,
                                          mock_get_private_ip, mock_options):
    # Mock appscale_info functions for getting IPs
    mock_get_private_ip.return_value = '192.168.33.10'
    mock_ips_getter.return_value = ['192.168.33.11']
    # Mock secret
    mock_options.secret = 'secret'
    # Read test data from json file
    raw_test_data = get_stats_from_file(
      'proxies-stats.json', proxy_stats.ProxiesStatsSnapshot
    )[0]
    #Prepare raw dict with include lists
    raw_include_lists = {
      'proxy': ['name', 'unified_service_name', 'application_id',
                'frontend', 'backend'],
      'proxy.frontend': ['scur', 'smax', 'rate', 'req_rate', 'req_tot'],
      'proxy.backend': ['qcur', 'scur', 'hrsp_5xx', 'qtime', 'rtime'],
    }
    # Mock AsyncHTTPClient.fetch using raw stats dictionaries from test data
    response = MagicMock(body=json.dumps(raw_test_data['192.168.33.11']),
                         code=200, reason='OK')
    future_response = gen.Future()
    future_response.set_result(response)
    mock_fetch.return_value = future_response

    # Initialize stats source
    cluster_stats_source = cluster_stats.ClusterProxiesStatsSource()

    # ^^^ ALL INPUTS ARE SPECIFIED (or mocked) ^^^
    # Call method under test to get stats with filtered set of fields
    include_lists = IncludeLists(raw_include_lists)
    stats, failures = yield cluster_stats_source.get_current_async(
      max_age=18, include_lists=include_lists
    )

    # ASSERTING EXPECTATIONS
    request_to_lb = mock_fetch.call_args[0][0]
    self.assertEqual(
      json.loads(request_to_lb.body),
      {
        'max_age': 18,
        'include_lists': raw_include_lists,
      })
    self.assertEqual(
      request_to_lb.url, 'http://192.168.33.11:4378/stats/local/proxies'
    )
    self.assertDictContainsSubset(
      request_to_lb.headers, {'Appscale-Secret': 'secret'}
    )
    self.assertEqual(failures, {})

    lb_stats = stats['192.168.33.11']
    self.assertIsInstance(lb_stats, proxy_stats.ProxiesStatsSnapshot)
    self.assertEqual(len(lb_stats.proxies_stats), 5)
    self.assertEqual(lb_stats.utc_timestamp, 1494248097.0)
Exemplo n.º 6
0
from appscale.hermes.stats.producers.rabbitmq_stats import PushQueueStatsSource
from appscale.hermes.stats.producers.rabbitmq_stats import RabbitMQStatsSource


DEFAULT_INCLUDE_LISTS = IncludeLists({
  # Node stats
  'node': ['utc_timestamp', 'cpu', 'memory',
           'partitions_dict', 'loadavg'],
  'node.cpu': ['percent', 'count'],
  'node.memory': ['available', 'total'],
  'node.partition': ['free', 'used'],
  'node.loadavg': ['last_5min'],
  # Processes stats
  'process': ['monit_name', 'unified_service_name', 'application_id',
              'port', 'cpu', 'memory', 'children_stats_sum'],
  'process.cpu': ['user', 'system', 'percent'],
  'process.memory': ['resident', 'virtual', 'unique'],
  'process.children_stats_sum': ['cpu', 'memory'],
  # Proxies stats
  'proxy': ['name', 'unified_service_name', 'application_id',
            'frontend', 'backend', 'servers_count'],
  'proxy.frontend': ['bin', 'bout', 'scur', 'smax', 'rate',
                     'req_rate', 'req_tot', 'hrsp_4xx', 'hrsp_5xx'],
  'proxy.backend': ['qcur', 'scur', 'hrsp_5xx', 'qtime', 'rtime'],
  'rabbitmq': ['utc_timestamp', 'disk_free_alarm', 'mem_alarm', 'name'],
  'queue': ['name', 'messages'],
})


@attr.s
class HandlerInfo(object):