Пример #1
0
    async def test_filtered_cluster_nodes_stats():
        # Read test data from json file
        raw_test_data, stats_test_data = get_stats_from_file(
            'node-stats.json', node_stats.NodeStatsSnapshot)

        private_ip_patcher = patch(
            'appscale.common.appscale_info.get_private_ip',
            return_value='192.168.33.10')
        ips_getter_patcher = patch(
            'appscale.hermes.producers.cluster_stats.cluster_nodes_stats.ips_getter',
            return_value=['192.168.33.10', '192.168.33.11'])
        secret_patcher = patch('appscale.common.appscale_info.get_secret',
                               return_value='secret')
        local_stats_patcher = patch(
            'appscale.hermes.producers.node_stats.NodeStatsSource.get_current',
            return_value=stats_test_data['192.168.33.10'])
        json_method = MagicMock(
            return_value=future(raw_test_data['192.168.33.11']))
        response = MagicMock(json=json_method, status=200)
        get_remote_patcher = patch(
            'aiohttp.ClientSession.get',
            return_value=AsyncContextMock(aenter=response))

        # Prepare raw dict with include lists
        raw_include_lists = {
            'node': ['cpu', 'memory'],
            'node.cpu': ['percent', 'count'],
            'node.memory': ['available']
        }

        # ^^^ ALL INPUTS ARE SPECIFIED (or mocked) ^^^
        with contextlib.ExitStack() as stack:
            # Start patchers
            stack.enter_context(private_ip_patcher)
            stack.enter_context(ips_getter_patcher)
            stack.enter_context(secret_patcher)
            stack.enter_context(local_stats_patcher)
            session_get_mock = stack.enter_context(get_remote_patcher)
            # Call method under test to get stats with filtered set of fields
            include_lists = IncludeLists(raw_include_lists)
            stats, failures = await cluster_stats.cluster_nodes_stats.get_current(
                max_age=10, include_lists=include_lists)

        # ASSERTING EXPECTATIONS
        session_get_mock.assert_called_once_with(
            'http://192.168.33.11:4378/stats/local/node',
            headers={'Appscale-Secret': 'secret'},
            json={
                'max_age': 10,
                'include_lists': raw_include_lists,
            },
            timeout=constants.REMOTE_REQUEST_TIMEOUT)
        assert failures == {}

        local_stats = stats['192.168.33.10']
        slave_stats = stats['192.168.33.11']
        assert isinstance(local_stats, node_stats.NodeStatsSnapshot)
        assert local_stats.utc_timestamp == 1494248091.0
        assert isinstance(slave_stats, node_stats.NodeStatsSnapshot)
        assert slave_stats.utc_timestamp == 1494248082.0
Пример #2
0
    def test_filtered_cluster_processes_stats(self, mock_get_current,
                                              mock_fetch, mock_ips_getter,
                                              mock_get_private_ip,
                                              mock_options):
        # Mock appscale_info functions for getting IPs
        mock_get_private_ip.return_value = '192.168.33.10'
        mock_ips_getter.return_value = ['192.168.33.10', '192.168.33.11']
        # Mock secret
        mock_options.secret = 'secret'
        # Read test data from json file
        raw_test_data, stats_test_data = get_stats_from_file(
            'processes-stats.json', process_stats.ProcessesStatsSnapshot)
        # Mock local source
        mock_get_current.return_value = stats_test_data['192.168.33.10']
        # Mock AsyncHTTPClient.fetch using raw stats dictionaries from test data
        response = MagicMock(body=json.dumps(raw_test_data['192.168.33.11']),
                             code=200,
                             reason='OK')
        future_response = gen.Future()
        future_response.set_result(response)
        mock_fetch.return_value = future_response
        #Prepare raw dict with include lists
        raw_include_lists = {
            'process': [
                'monit_name', 'unified_service_name', 'application_id', 'port',
                'cpu', 'memory', 'children_stats_sum'
            ],
            'process.cpu': ['user', 'system', 'percent'],
            'process.memory': ['resident', 'virtual', 'unique'],
            'process.children_stats_sum': ['cpu', 'memory'],
        }

        # ^^^ ALL INPUTS ARE SPECIFIED (or mocked) ^^^
        # Call method under test to get stats with filtered set of fields
        include_lists = IncludeLists(raw_include_lists)
        stats, failures = yield cluster_stats.cluster_processes_stats.get_current(
            max_age=15, include_lists=include_lists)
        self.assertEqual(failures, {})

        # ASSERTING EXPECTATIONS
        request_to_slave = mock_fetch.call_args[0][0]
        self.assertEqual(json.loads(request_to_slave.body), {
            'max_age': 15,
            'include_lists': raw_include_lists,
        })
        self.assertEqual(request_to_slave.url,
                         'http://192.168.33.11:4378/stats/local/processes')
        self.assertDictContainsSubset(request_to_slave.headers,
                                      {'Appscale-Secret': 'secret'})

        local_stats = stats['192.168.33.10']
        slave_stats = stats['192.168.33.11']
        self.assertIsInstance(local_stats,
                              process_stats.ProcessesStatsSnapshot)
        self.assertEqual(len(local_stats.processes_stats), 24)
        self.assertEqual(local_stats.utc_timestamp, 1494248000.0)
        self.assertIsInstance(slave_stats,
                              process_stats.ProcessesStatsSnapshot)
        self.assertEqual(len(slave_stats.processes_stats), 10)
        self.assertEqual(slave_stats.utc_timestamp, 1494248091.0)
Пример #3
0
def get_default_include_lists():
    """ Creates an instance of IncludeLists with default values.
  It is not a constant because all model classes should be imported before
  creating an instance of IncludeLists.
  We're planning to get rid of complicated include lists logic
  by splitting composite models into smaller.
  """
    return IncludeLists({
        # Node stats
        'node':
        ['utc_timestamp', 'cpu', 'memory', 'partitions_dict', 'loadavg'],
        'node.cpu': ['percent', 'count'],
        'node.memory': ['available', 'total'],
        'node.partition': ['free', 'used'],
        'node.loadavg': ['last_5min'],
        # Processes stats
        'process': [
            'monit_name', 'unified_service_name', 'application_id', 'port',
            'cpu', 'memory', 'children_stats_sum'
        ],
        'process.cpu': ['user', 'system', 'percent'],
        'process.memory': ['resident', 'virtual', 'unique'],
        'process.children_stats_sum': ['cpu', 'memory'],
        # Proxies stats
        'proxy': [
            'name', 'unified_service_name', 'application_id', 'frontend',
            'backend', 'servers_count'
        ],
        'proxy.frontend': [
            'bin', 'bout', 'scur', 'smax', 'rate', 'req_rate', 'req_tot',
            'hrsp_4xx', 'hrsp_5xx'
        ],
        'proxy.backend': ['qcur', 'scur', 'hrsp_5xx', 'qtime', 'rtime'],
        # Taskqueue service stats
        'taskqueue': [
            'utc_timestamp', 'current_requests', 'cumulative', 'recent',
            'instances_count', 'failures'
        ],
        'taskqueue.instance':
        ['start_timestamp_ms', 'current_requests', 'cumulative', 'recent'],
        'taskqueue.cumulative': ['total', 'failed', 'pb_reqs', 'rest_reqs'],
        'taskqueue.recent':
        ['total', 'failed', 'avg_latency', 'pb_reqs', 'rest_reqs'],
        # RabbitMQ stats
        'rabbitmq': [
            'utc_timestamp', 'disk_free_alarm', 'mem_alarm', 'name',
            'partitions'
        ],
        # Push queue stats
        'queue': ['name', 'messages'],
        # Cassandra stats
        'cassandra':
        ['utc_timestamp', 'nodes', 'missing_nodes', 'unknown_nodes'],
        # Cassandra node stats
        'cassandra.node':
        ['address', 'status', 'state', 'load', 'owns_pct', 'tokens_num'],
    })
Пример #4
0
    async def test_filtered_cluster_proxies_stats():
        # Read test data from json file
        raw_test_data = get_stats_from_file(
            'proxies-stats.json', proxy_stats.ProxiesStatsSnapshot)[0]

        private_ip_patcher = patch(
            'appscale.common.appscale_info.get_private_ip',
            return_value='192.168.33.10')
        ips_getter_patcher = patch(
            'appscale.hermes.producers.cluster_stats.cluster_proxies_stats.ips_getter',
            return_value=['192.168.33.11'])
        secret_patcher = patch('appscale.common.appscale_info.get_secret',
                               return_value='secret')
        json_method = MagicMock(
            return_value=future(raw_test_data['192.168.33.11']))
        response = MagicMock(json=json_method, status=200)
        get_remote_patcher = patch(
            'aiohttp.ClientSession.get',
            return_value=AsyncContextMock(aenter=response))
        # Prepare raw dict with include lists
        raw_include_lists = {
            'proxy': [
                'name', 'unified_service_name', 'application_id', 'frontend',
                'backend'
            ],
            'proxy.frontend': ['scur', 'smax', 'rate', 'req_rate', 'req_tot'],
            'proxy.backend': ['qcur', 'scur', 'hrsp_5xx', 'qtime', 'rtime'],
        }

        # ^^^ ALL INPUTS ARE SPECIFIED (or mocked) ^^^
        with contextlib.ExitStack() as stack:
            # Start patchers
            stack.enter_context(private_ip_patcher)
            stack.enter_context(ips_getter_patcher)
            stack.enter_context(secret_patcher)
            session_get_mock = stack.enter_context(get_remote_patcher)
            # Call method under test to get stats with filtered set of fields
            include_lists = IncludeLists(raw_include_lists)
            stats, failures = await cluster_stats.cluster_proxies_stats.get_current(
                max_age=18, include_lists=include_lists)

        # ASSERTING EXPECTATIONS
        session_get_mock.assert_called_once_with(
            'http://192.168.33.11:4378/stats/local/proxies',
            headers={'Appscale-Secret': 'secret'},
            json={
                'max_age': 18,
                'include_lists': raw_include_lists,
            },
            timeout=constants.REMOTE_REQUEST_TIMEOUT)
        assert failures == {}

        lb_stats = stats['192.168.33.11']
        assert isinstance(lb_stats, proxy_stats.ProxiesStatsSnapshot)
        assert len(lb_stats.proxies_stats) == 5
        assert lb_stats.utc_timestamp == 1494248097.0
Пример #5
0
    def test_filtered_cluster_proxies_stats(self, mock_fetch, mock_ips_getter,
                                            mock_get_private_ip, mock_options):
        # Mock appscale_info functions for getting IPs
        mock_get_private_ip.return_value = '192.168.33.10'
        mock_ips_getter.return_value = ['192.168.33.11']
        # Mock secret
        mock_options.secret = 'secret'
        # Read test data from json file
        raw_test_data = get_stats_from_file(
            'proxies-stats.json', proxy_stats.ProxiesStatsSnapshot)[0]
        #Prepare raw dict with include lists
        raw_include_lists = {
            'proxy': [
                'name', 'unified_service_name', 'application_id', 'frontend',
                'backend'
            ],
            'proxy.frontend': ['scur', 'smax', 'rate', 'req_rate', 'req_tot'],
            'proxy.backend': ['qcur', 'scur', 'hrsp_5xx', 'qtime', 'rtime'],
        }
        # Mock AsyncHTTPClient.fetch using raw stats dictionaries from test data
        response = MagicMock(body=json.dumps(raw_test_data['192.168.33.11']),
                             code=200,
                             reason='OK')
        future_response = gen.Future()
        future_response.set_result(response)
        mock_fetch.return_value = future_response

        # ^^^ ALL INPUTS ARE SPECIFIED (or mocked) ^^^
        # Call method under test to get stats with filtered set of fields
        include_lists = IncludeLists(raw_include_lists)
        stats, failures = yield cluster_stats.cluster_proxies_stats.get_current(
            max_age=18, include_lists=include_lists)

        # ASSERTING EXPECTATIONS
        request_to_lb = mock_fetch.call_args[0][0]
        self.assertEqual(json.loads(request_to_lb.body), {
            'max_age': 18,
            'include_lists': raw_include_lists,
        })
        self.assertEqual(request_to_lb.url,
                         'http://192.168.33.11:4378/stats/local/proxies')
        self.assertDictContainsSubset(request_to_lb.headers,
                                      {'Appscale-Secret': 'secret'})
        self.assertEqual(failures, {})

        lb_stats = stats['192.168.33.11']
        self.assertIsInstance(lb_stats, proxy_stats.ProxiesStatsSnapshot)
        self.assertEqual(len(lb_stats.proxies_stats), 5)
        self.assertEqual(lb_stats.utc_timestamp, 1494248097.0)
Пример #6
0
    async def __call__(self, request):
        """ Handles HTTP request.

    Args:
      request: an instance of Request.
    Returns:
      An instance of Resposne.
    """
        if request.has_body:
            payload = await request.json()
        else:
            payload = {}
        include_lists = payload.get('include_lists')
        max_age = payload.get('max_age', ACCEPTABLE_STATS_AGE)

        if include_lists is not None:
            try:
                include_lists = IncludeLists(include_lists)
            except WrongIncludeLists as err:
                logger.warn("Bad request from {client} ({error})".format(
                    client=request.remote, error=err))
                return web.Response(status=http.HTTPStatus.BAD_REQUEST,
                                    reason='Wrong include_lists',
                                    text=str(err))
        else:
            include_lists = self.default_include_lists

        snapshot = None

        # Try to use cached snapshot
        if self.cached_snapshot:
            now = time.time()
            acceptable_time = now - max_age
            if self.cached_snapshot.utc_timestamp >= acceptable_time:
                snapshot = self.cached_snapshot
                logger.info(
                    "Returning cached snapshot with age {:.2f}s".format(
                        now - self.cached_snapshot.utc_timestamp))

        if not snapshot:
            snapshot = self.stats_source.get_current()
            if inspect.isawaitable(snapshot):
                snapshot = await snapshot
            self.cached_snapshot = snapshot

        return web.json_response(stats_to_dict(snapshot, include_lists))
Пример #7
0
    def get(self):
        if self.request.headers.get(SECRET_HEADER) != options.secret:
            logging.warn("Received bad secret from {client}".format(
                client=self.request.remote_ip))
            self.set_status(HTTP_Codes.HTTP_DENIED, "Bad secret")
            return
        if self.request.body:
            payload = json.loads(self.request.body)
        else:
            payload = {}
        include_lists = payload.get('include_lists')
        max_age = payload.get('max_age', ACCEPTABLE_STATS_AGE)

        if include_lists is not None:
            try:
                include_lists = IncludeLists(include_lists)
            except WrongIncludeLists as err:
                logging.warn("Bad request from {client} ({error})".format(
                    client=self.request.remote_ip, error=err))
                json.dump({'error': str(err)}, self)
                self.set_status(HTTP_Codes.HTTP_BAD_REQUEST,
                                'Wrong include_lists')
                return
        else:
            include_lists = self._default_include_lists

        snapshot = None

        # Try to use cached snapshot
        if self._cached_snapshot:
            now = time.time()
            acceptable_time = now - max_age
            if self._cached_snapshot.utc_timestamp >= acceptable_time:
                snapshot = self._cached_snapshot
                logging.info(
                    "Returning cached snapshot with age {:.2f}s".format(
                        now - self._cached_snapshot.utc_timestamp))

        if not snapshot:
            snapshot = self._stats_source.get_current()
            if isinstance(snapshot, gen.Future):
                snapshot = yield snapshot
            self._cached_snapshot = snapshot

        json.dump(stats_to_dict(snapshot, include_lists), self)
Пример #8
0
  def get(self):
    if self.request.headers.get(SECRET_HEADER) != options.secret:
      logger.warn("Received bad secret from {client}"
                   .format(client=self.request.remote_ip))
      self.set_status(HTTP_Codes.HTTP_DENIED, "Bad secret")
      return
    if self.request.body:
      payload = json.loads(self.request.body)
    else:
      payload = {}
    include_lists = payload.get('include_lists')
    max_age = payload.get('max_age', ACCEPTABLE_STATS_AGE)

    if include_lists is not None:
      try:
        include_lists = IncludeLists(include_lists)
      except WrongIncludeLists as err:
        logger.warn("Bad request from {client} ({error})"
                     .format(client=self.request.remote_ip, error=err))
        json.dump({'error': str(err)}, self)
        self.set_status(HTTP_Codes.HTTP_BAD_REQUEST, 'Wrong include_lists')
        return
    else:
      include_lists = self._default_include_lists

    newer_than = time.mktime(datetime.now().timetuple()) - max_age

    if (not self._default_include_lists or
        include_lists.is_subset_of(self._default_include_lists)):
      # If user didn't specify any non-default fields we can use local cache
      fresh_local_snapshots = {
        node_ip: snapshot
        for node_ip, snapshot in self._cached_snapshots.iteritems()
        if max_age and snapshot.utc_timestamp > newer_than
      }
      if fresh_local_snapshots:
        logger.debug("Returning cluster stats with {} cached snapshots"
                      .format(len(fresh_local_snapshots)))
    else:
      fresh_local_snapshots = {}

    new_snapshots_dict, failures = (
      yield self._current_cluster_stats_source.get_current(
        max_age=max_age, include_lists=include_lists,
        exclude_nodes=fresh_local_snapshots.keys()
      )
    )

    # Put new snapshots to local cache
    self._cached_snapshots.update(new_snapshots_dict)

    # Extend fetched snapshots dict with fresh local snapshots
    new_snapshots_dict.update(fresh_local_snapshots)

    rendered_snapshots = {
      node_ip: stats_to_dict(snapshot, include_lists)
      for node_ip, snapshot in new_snapshots_dict.iteritems()
    }

    json.dump({
      "stats": rendered_snapshots,
      "failures": failures
    }, self)
Пример #9
0
    def get(self):
        if self.request.headers.get(SECRET_HEADER) != options.secret:
            logging.warn("Received bad secret from {client}".format(
                client=self.request.remote_ip))
            self.set_status(HTTP_Codes.HTTP_DENIED, "Bad secret")
            return
        if self.request.body:
            payload = json.loads(self.request.body)
        else:
            payload = {}
        include_lists = payload.get('include_lists')
        max_age = payload.get('max_age', ACCEPTABLE_STATS_AGE)

        if include_lists is not None:
            try:
                include_lists = IncludeLists(include_lists)
            except WrongIncludeLists as err:
                logging.warn("Bad request from {client} ({error})".format(
                    client=self.request.remote_ip, error=err))
                json.dump({'error': str(err)}, self)
                self.set_status(HTTP_Codes.HTTP_BAD_REQUEST,
                                'Wrong include_lists')
                return
        else:
            include_lists = self._default_include_lists

        newer_than = time.mktime(datetime.now().timetuple()) - max_age

        if (not self._default_include_lists
                or include_lists.is_subset_of(self._default_include_lists)):
            # If user didn't specify any non-default fields we can use local cache
            fresh_local_snapshots = {
                node_ip: snapshot
                for node_ip, snapshot in self._cached_snapshots.iteritems()
                if max_age and snapshot.utc_timestamp > newer_than
            }
            if fresh_local_snapshots:
                logging.debug(
                    "Returning cluster stats with {} cached snapshots".format(
                        len(fresh_local_snapshots)))
        else:
            fresh_local_snapshots = {}

        new_snapshots_dict, failures = (
            yield self._current_cluster_stats_source.get_current(
                max_age=max_age,
                include_lists=include_lists,
                exclude_nodes=fresh_local_snapshots.keys()))

        # Put new snapshots to local cache
        self._cached_snapshots.update(new_snapshots_dict)

        # Extend fetched snapshots dict with fresh local snapshots
        new_snapshots_dict.update(fresh_local_snapshots)

        rendered_snapshots = {
            node_ip: stats_to_dict(snapshot, include_lists)
            for node_ip, snapshot in new_snapshots_dict.iteritems()
        }

        json.dump({"stats": rendered_snapshots, "failures": failures}, self)
Пример #10
0
DEFAULT_INCLUDE_LISTS = IncludeLists({
    # Node stats
    'node': ['utc_timestamp', 'cpu', 'memory', 'partitions_dict', 'loadavg'],
    'node.cpu': ['percent', 'count'],
    'node.memory': ['available', 'total'],
    'node.partition': ['free', 'used'],
    'node.loadavg': ['last_5min'],
    # Processes stats
    'process': [
        'monit_name', 'unified_service_name', 'application_id', 'port', 'cpu',
        'memory', 'children_stats_sum'
    ],
    'process.cpu': ['user', 'system', 'percent'],
    'process.memory': ['resident', 'virtual', 'unique'],
    'process.children_stats_sum': ['cpu', 'memory'],
    # Proxies stats
    'proxy': [
        'name', 'unified_service_name', 'application_id', 'frontend',
        'backend', 'servers_count'
    ],
    'proxy.frontend': [
        'bin', 'bout', 'scur', 'smax', 'rate', 'req_rate', 'req_tot',
        'hrsp_4xx', 'hrsp_5xx'
    ],
    'proxy.backend': ['qcur', 'scur', 'hrsp_5xx', 'qtime', 'rtime'],
    # Taskqueue service stats
    'taskqueue': [
        'utc_timestamp', 'current_requests', 'cumulative', 'recent',
        'instances_count', 'failures'
    ],
    'taskqueue.instance':
    ['start_timestamp_ms', 'current_requests', 'cumulative', 'recent'],
    'taskqueue.cumulative': ['total', 'failed', 'pb_reqs', 'rest_reqs'],
    'taskqueue.recent':
    ['total', 'failed', 'avg_latency', 'pb_reqs', 'rest_reqs'],
    # RabbitMQ stats
    'rabbitmq':
    ['utc_timestamp', 'disk_free_alarm', 'mem_alarm', 'name', 'partitions'],
    # Push queue stats
    'queue': ['name', 'messages'],
    # Cassandra stats
    'cassandra': ['utc_timestamp', 'nodes', 'missing_nodes', 'unknown_nodes'],
    # Cassandra node stats
    'cassandra.node':
    ['address', 'status', 'state', 'load', 'owns_pct', 'tokens_num'],
})
Пример #11
0
    async def __call__(self, request):
        """ Handles HTTP request.

    Args:
      request: an instance of Request.
    Returns:
      An instance of Response.
    """
        if request.has_body:
            payload = await request.json()
        else:
            payload = {}
        include_lists = payload.get('include_lists')
        max_age = payload.get('max_age', ACCEPTABLE_STATS_AGE)

        if include_lists is not None:
            try:
                include_lists = IncludeLists(include_lists)
            except WrongIncludeLists as err:
                logger.warn("Bad request from {client} ({error})".format(
                    client=request.remote, error=err))
                return web.Response(status=http.HTTPStatus.BAD_REQUEST,
                                    reason='Wrong include_lists',
                                    text=str(err))
        else:
            include_lists = self.default_include_lists

        newer_than = time.mktime(datetime.now().timetuple()) - max_age

        if (not self.default_include_lists
                or include_lists.is_subset_of(self.default_include_lists)):
            # If user didn't specify any non-default fields we can use local cache
            fresh_local_snapshots = {
                node_ip: snapshot
                for node_ip, snapshot in self.cached_snapshots.items()
                if max_age and snapshot.utc_timestamp > newer_than
            }
            if fresh_local_snapshots:
                logger.debug(
                    "Returning cluster stats with {} cached snapshots".format(
                        len(fresh_local_snapshots)))
        else:
            fresh_local_snapshots = {}

        new_snapshots_dict, failures = (await self.stats_source.get_current(
            max_age=max_age,
            include_lists=include_lists,
            exclude_nodes=list(fresh_local_snapshots.keys())))

        # Put new snapshots to local cache
        self.cached_snapshots.update(new_snapshots_dict)

        # Extend fetched snapshots dict with fresh local snapshots
        new_snapshots_dict.update(fresh_local_snapshots)

        rendered_snapshots = {
            node_ip: stats_to_dict(snapshot, include_lists)
            for node_ip, snapshot in new_snapshots_dict.items()
        }

        return web.json_response({
            "stats": rendered_snapshots,
            "failures": failures
        })