Ejemplo n.º 1
0
def _parse_args(args):
    parser = argparse.ArgumentParser(
            description='Start up a simple heartbeat server on localhost.')
    parser.add_argument(
            '--port', default=8080,
            help='The port to start the heartbeat server.')
    parser.add_argument(
            '--db_host',
            default=config.get_config_value('AUTOTEST_WEB', 'host'),
            help='Db server ip address.')
    parser.add_argument(
            '--db_name',
            default=config.get_config_value('AUTOTEST_WEB', 'database'),
            help='Name of the db table.')
    parser.add_argument(
            '--db_user',
            default=config.get_config_value('AUTOTEST_WEB', 'user'),
            help='User for the db server.')
    parser.add_argument(
            '--db_password',
            default=config.get_config_value('AUTOTEST_WEB', 'password'),
            help='Password for the db server.')
    parser.add_argument(
            '--db_port',
            default=config.get_config_value('AUTOTEST_WEB', 'port', default=''),
            help='Port of the db server.')

    return parser.parse_args(args)
Ejemplo n.º 2
0
def use_server_db():
    """Check if use_server_db is enabled in configuration.

    @return: True if use_server_db is set to True in global config.
    """
    return global_config.get_config_value('SERVER',
                                          'use_server_db',
                                          default=False,
                                          type=bool)
Ejemplo n.º 3
0
def get_logging_handler():
    """Return a logging handler.

    Configure a RPC logging handler based on global_config and return
    the handler.
    """
    max_log_size = config.get_config_value('SERVER',
                                           'rpc_max_log_size_mb',
                                           type=int)
    number_of_old_logs = config.get_config_value('SERVER',
                                                 'rpc_num_old_logs',
                                                 type=int)
    log_path = config.get_config_value('SERVER', 'rpc_log_path')

    formatter = logging.Formatter(fmt=LOGGING_FORMAT, datefmt='%m/%d %H:%M:%S')
    handler = logging.handlers.RotatingFileHandler(
        log_path,
        maxBytes=max_log_size * MEGABYTE,
        backupCount=number_of_old_logs)
    handler.setFormatter(formatter)
    return handler
Ejemplo n.º 4
0
    def initialize(self,
                   test_env=TestEnv.sandbox,
                   account='*****@*****.**'):
        """
        Initialize the test.

        @param test_env: The test environment to use
        """
        super(webstore_test, self).initialize()

        self.username = account
        self.password = global_config.get_config_value(
            'CLIENT', 'webstore_test_password', type=str)

        self.test_env = test_env
        self._chrome_flags = _get_chrome_flags(test_env)
        self.webstore_url = {
            TestEnv.staging: 'https://webstore-staging.corp.google.com',
            TestEnv.sandbox:
            'https://webstore-staging.sandbox.google.com/webstore',
            TestEnv.pnl:
            'https://webstore-prod-not-live.corp.google.com/webstore',
            TestEnv.prod: 'https://chrome.google.com/webstore'
        }[test_env]
class RDBHostCacheManager(object):
    """RDB Cache manager."""

    key = collections.namedtuple('key', ['deps', 'acls'])
    use_cache = global_config.get_config_value('RDB',
                                               'use_cache',
                                               type=bool,
                                               default=True)

    def __init__(self):
        self._cache_backend = (InMemoryCacheBackend()
                               if self.use_cache else DummyCacheBackend())
        self.hits = 0
        self.misses = 0
        self.stale_entries = []

    def mean_staleness(self):
        """Compute the average stale entries per line.

        @return: A floating point representing the mean staleness.
        """
        return (reduce(lambda x, y: float(x + y), self.stale_entries) /
                len(self.stale_entries)) if self.stale_entries else 0

    def hit_ratio(self):
        """Compute the hit ratio of this cache.

        @return: A floating point percentage of the hit ratio.
        """
        if not self.hits and not self.misses:
            return 0
        requests = float(self.hits + self.misses)
        return (self.hits / requests) * 100

    def record_stats(self):
        """Record stats about the cache managed by this instance."""
        hit_ratio = self.hit_ratio()
        staleness = self.mean_staleness()
        logging.debug(
            'Cache stats: hit ratio: %.2f%%, '
            'avg staleness per line: %.2f%%.', hit_ratio, staleness)
        metrics.Float('chromeos/autotest/scheduler/rdb/cache/hit_ratio').set(
            hit_ratio)
        metrics.Float('chromeos/autotest/scheduler/rdb/cache/mean_staleness'
                      ).set(staleness)

    @classmethod
    def get_key(cls, deps, acls):
        """Return a key for the given deps, acls.

        @param deps: A list of deps, as taken by the AcquireHostRequest.
        @param acls: A list of acls, as taken by the AcquireHostRequest.
        @return: A cache key for the given deps/acls.
        """
        # All requests with the same deps, acls should hit the same cache line.
        # TODO: Do something smarter with acls, only one needs to match.
        return cls.key(deps=frozenset(deps), acls=frozenset(acls))

    def get_line(self, key):
        """Clear and return the cache line matching the key.

        @param key: The key the desired cache_line is stored under.
        @return: A list of rdb hosts matching the key, or None.

        @raises rdb_utils.CacheMiss: If the key isn't in the cache.
        """
        try:
            cache_line = self._cache_backend.get(key)
        except KeyError:
            self.misses += 1
            raise rdb_utils.CacheMiss('Key %s not in cache' % (key, ))
        self.hits += 1
        self._cache_backend.delete(key)
        return list(cache_line)

    def _check_line(self, line, key):
        """Sanity check a cache line.

        This method assumes that a cache line is made up of RDBHost objects,
        and checks to see if they all match each other/the key passed in.
        Checking is done in terms of host labels and acls, note that the hosts
        in the line can have different deps/acls, as long as they all have the
        deps required by the key, and at least one matching acl of the key.

        @param line: The cache line value.
        @param key: The key the line will be stored under.
        @raises rdb_utils.RDBException:
            If one of the hosts in the cache line is already leased.
            The cache already has a different line under the given key.
            The given key doesn't match the hosts in the line.
        """
        # Note that this doesn't mean that all hosts in the cache are unleased.
        if any(host.leased for host in line):
            raise rdb_utils.RDBException('Cannot cache leased hosts %s' % line)

        # Confirm that the given line can be used to service the key by checking
        # that all hosts have the deps mentioned in the key, and at least one
        # matching acl.
        h_keys = set([self.get_key(host.labels, host.acls) for host in line])
        for h_key in h_keys:
            if (not h_key.deps.issuperset(key.deps)
                    or not key.acls.intersection(h_key.acls)):
                raise rdb_utils.RDBException(
                    'Given key: %s does not match key '
                    'computed from hosts in line: %s' % (key, h_keys))
        if self._cache_backend.has_key(key):
            raise rdb_utils.RDBException(
                'Cannot override a cache line. It '
                'must be cleared before setting. Key: %s, hosts %s' %
                (key, line))

    def set_line(self, key, hosts):
        """Cache a list of similar hosts.

        set_line will no-op if:
            The hosts aren't all unleased.
            The hosts don't have deps/acls matching the key.
            A cache line under the same key already exists.
        The first 2 cases will lead to a cache miss in the corresponding get.

        @param hosts: A list of unleased hosts with the same deps/acls.
        @raises RDBException: If hosts is None, since None is reserved for
            key expiration.
        """
        if hosts is None:
            raise rdb_utils.RDBException('Cannot set None in the cache.')

        # An empty list means no hosts matching the request are available.
        # This can happen if a previous request leased all matching hosts.
        if not hosts or not self.use_cache:
            self._cache_backend.set(key, [])
            return
        try:
            self._check_line(hosts, key)
        except rdb_utils.RDBException as e:
            logging.error(e)
        else:
            self._cache_backend.set(key, set(hosts))
Ejemplo n.º 6
0
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

import os

import common
from autotest_lib.client.bin import utils as common_utils
from autotest_lib.client.common_lib.global_config import global_config

# Name of the base container.
BASE = global_config.get_config_value('AUTOSERV', 'container_base_name')

# Path to folder that contains autotest code inside container.
CONTAINER_AUTOTEST_DIR = '/usr/local/autotest'

# Naming convention of the result directory in test container.
RESULT_DIR_FMT = os.path.join(CONTAINER_AUTOTEST_DIR, 'results', '%s')
# Attributes to retrieve about containers.
ATTRIBUTES = ['name', 'state']

SSP_ENABLED = global_config.get_config_value('AUTOSERV',
                                             'enable_ssp_container',
                                             type=bool,
                                             default=True)
# url to the folder stores base container.
CONTAINER_BASE_FOLDER_URL = global_config.get_config_value(
    'AUTOSERV', 'container_base_folder_url')
CONTAINER_BASE_URL_FMT = '%s/%%s.tar.xz' % CONTAINER_BASE_FOLDER_URL
CONTAINER_BASE_URL = CONTAINER_BASE_URL_FMT % BASE
# Default directory used to store LXC containers.
Ejemplo n.º 7
0
"""
Library for autotest-remote usage.
"""

import sys, os, re, traceback, signal, time, logging, getpass

try:
    import autotest.common as common
except ImportError:
    import common

from autotest_lib.client.common_lib.global_config import global_config
require_atfork = global_config.get_config_value('AUTOSERV',
                                                'require_atfork_module',
                                                type=bool,
                                                default=True)

try:
    import atfork
    atfork.monkeypatch_os_fork_functions()
    import atfork.stdlib_fixer
    # Fix the Python standard library for threading+fork safety with its
    # internal locks.  http://code.google.com/p/python-atfork/
    import warnings
    warnings.filterwarnings('ignore', 'logging module already imported')
    atfork.stdlib_fixer.fix_logging_module()
except ImportError, e:
    from autotest_lib.client.common_lib import global_config
    if global_config.global_config.get_config_value('AUTOSERV',
                                                    'require_atfork_module',
                                                    type=bool,
from autotest_lib.site_utils.lxc.container import Container
from autotest_lib.site_utils.lxc.container_factory import ContainerFactory

try:
    from chromite.lib import metrics
    from infra_libs import ts_mon
except ImportError:
    import mock
    metrics = utils.metrics_mock
    ts_mon = mock.Mock()


# Timeout (in seconds) for container pool operations.
_CONTAINER_POOL_TIMEOUT = 3

_USE_LXC_POOL = global_config.get_config_value('LXC_POOL', 'use_lxc_pool',
                                               type=bool)

class ContainerBucket(object):
    """A wrapper class to interact with containers in a specific container path.
    """

    def __init__(self, container_path=constants.DEFAULT_CONTAINER_PATH,
                 container_factory=None):
        """Initialize a ContainerBucket.

        @param container_path: Path to the directory used to store containers.
                               Default is set to AUTOSERV/container_path in
                               global config.
        @param container_factory: A factory for creating Containers.
        """
        self.container_path = os.path.realpath(container_path)
Ejemplo n.º 9
0
"""
Library for autotest-remote usage.
"""

import sys, os, re, traceback, signal, time, logging, getpass

import common

from autotest_lib.client.common_lib.global_config import global_config
require_atfork = global_config.get_config_value(
        'AUTOSERV', 'require_atfork_module', type=bool, default=True)

try:
    import atfork
    atfork.monkeypatch_os_fork_functions()
    import atfork.stdlib_fixer
    # Fix the Python standard library for threading+fork safety with its
    # internal locks.  http://code.google.com/p/python-atfork/
    import warnings
    warnings.filterwarnings('ignore', 'logging module already imported')
    atfork.stdlib_fixer.fix_logging_module()
except ImportError, e:
    from autotest_lib.client.common_lib import global_config
    if global_config.global_config.get_config_value(
            'AUTOSERV', 'require_atfork_module', type=bool, default=False):
        print >>sys.stderr, 'Please run utils/build_externals.py'
        print e
        sys.exit(1)

from autotest_lib.server import server_logging_config
from autotest_lib.server import server_job, utils, autoserv_parser, autotest