コード例 #1
0
ファイル: models.py プロジェクト: AOSP8146/external
class ServerRole(dbmodels.Model, model_logic.ModelExtensions):
    """Role associated with hosts."""
    # Valid roles for a server.
    ROLE_LIST = [
        'afe', 'scheduler', 'host_scheduler', 'drone', 'devserver', 'database',
        'database_slave', 'suite_scheduler', 'crash_server', 'shard',
        'golo_proxy', 'sentinel', 'reserve'
    ]
    ROLE = enum.Enum(*ROLE_LIST, string_values=True)
    # When deleting any of following roles from a primary server, a working
    # backup must be available if user_server_db is enabled in global config.
    ROLES_REQUIRE_BACKUP = [
        ROLE.SCHEDULER, ROLE.HOST_SCHEDULER, ROLE.DATABASE,
        ROLE.SUITE_SCHEDULER, ROLE.DRONE
    ]
    # Roles that must be assigned to a single primary server in an Autotest
    # instance
    ROLES_REQUIRE_UNIQUE_INSTANCE = [
        ROLE.SCHEDULER, ROLE.HOST_SCHEDULER, ROLE.DATABASE,
        ROLE.SUITE_SCHEDULER
    ]

    server = dbmodels.ForeignKey(Server, related_name='roles')
    role = dbmodels.CharField(max_length=128, choices=ROLE.choices())

    objects = model_logic.ExtendedManager()

    class Meta:
        """Metadata for the ServerRole class."""
        db_table = 'server_roles'
コード例 #2
0
class Test(dbmodels.Model, model_logic.ModelExtensions):
    """\
    Required:
    author: author name
    description: description of the test
    name: test name
    time: short, medium, long
    test_class: This describes the class for your the test belongs in.
    test_category: This describes the category for your tests
    test_type: Client or Server
    path: path to pass to run_test()
    sync_count:  is a number >=1 (1 being the default). If it's 1, then it's an
                 async job. If it's >1 it's sync job for that number of machines
                 i.e. if sync_count = 2 it is a sync job that requires two
                 machines.
    Optional:
    dependencies: What the test requires to run. Comma deliminated list
    dependency_labels: many-to-many relationship with labels corresponding to
                       test dependencies.
    experimental: If this is set to True production servers will ignore the test
    run_verify: Whether or not the scheduler should run the verify stage
    """
    TestTime = enum.Enum('SHORT', 'MEDIUM', 'LONG', start_value=1)
    TestTypes = model_attributes.TestTypes
    # TODO(showard) - this should be merged with Job.ControlType (but right
    # now they use opposite values)

    name = dbmodels.CharField(max_length=255, unique=True)
    author = dbmodels.CharField(max_length=255)
    test_class = dbmodels.CharField(max_length=255)
    test_category = dbmodels.CharField(max_length=255)
    dependencies = dbmodels.CharField(max_length=255, blank=True)
    description = dbmodels.TextField(blank=True)
    experimental = dbmodels.BooleanField(default=True)
    run_verify = dbmodels.BooleanField(default=True)
    test_time = dbmodels.SmallIntegerField(choices=TestTime.choices(),
                                           default=TestTime.MEDIUM)
    test_type = dbmodels.SmallIntegerField(choices=TestTypes.choices())
    sync_count = dbmodels.IntegerField(default=1)
    path = dbmodels.CharField(max_length=255, unique=True)

    dependency_labels = (
        dbmodels.ManyToManyField(Label, blank=True,
                                 db_table='afe_autotests_dependency_labels'))
    name_field = 'name'
    objects = model_logic.ExtendedManager()


    def admin_description(self):
        escaped_description = saxutils.escape(self.description)
        return '<span style="white-space:pre">%s</span>' % escaped_description
    admin_description.allow_tags = True
    admin_description.short_description = 'Description'


    class Meta:
        db_table = 'afe_autotests'

    def __unicode__(self):
        return unicode(self.name)
コード例 #3
0
class ControlData(object):
    # Available TIME settings in control file, the list must be in lower case
    # and in ascending order, test running faster comes first.
    TEST_TIME_LIST = ['fast', 'short', 'medium', 'long', 'lengthy']
    TEST_TIME = enum.Enum(*TEST_TIME_LIST, string_values=False)

    @staticmethod
    def get_test_time_index(time):
        """
        Get the order of estimated test time, based on the TIME setting in
        Control file. Faster test gets a lower index number.
        """
        try:
            return ControlData.TEST_TIME.get_value(time.lower())
        except AttributeError:
            # Raise exception if time value is not a valid TIME setting.
            error_msg = '%s is not a valid TIME.' % time
            logging.error(error_msg)
            raise ControlVariableException(error_msg)


    def __init__(self, vars, path, raise_warnings=False):
        # Defaults
        self.path = path
        self.dependencies = set()
        # TODO(jrbarnette): This should be removed once outside
        # code that uses can be changed.
        self.experimental = False
        self.run_verify = True
        self.sync_count = 1
        self.test_parameters = set()
        self.test_category = ''
        self.test_class = ''
        self.retries = 0
        self.job_retries = 0
        # Default to require server-side package. Unless require_ssp is
        # explicitly set to False, server-side package will be used for the
        # job. This can be overridden by global config
        # AUTOSERV/enable_ssp_container
        self.require_ssp = None
        self.attributes = set()
        self.max_result_size_KB = DEFAULT_MAX_RESULT_SIZE_KB
        self.priority = priorities.Priority.DEFAULT
        self.fast = False

        _validate_control_file_fields(self.path, vars, raise_warnings)

        for key, val in vars.iteritems():
            try:
                self.set_attr(key, val, raise_warnings)
            except Exception, e:
                if raise_warnings:
                    raise
                print 'WARNING: %s; skipping' % e

        self._patch_up_suites_from_attributes()
コード例 #4
0
class PowerUnitInfo(object):
    """A class that wraps rpm/poe information of a device."""

    POWERUNIT_TYPES = enum.Enum('POE', 'RPM', string_value=True)

    def __init__(self,
                 device_hostname,
                 powerunit_type,
                 powerunit_hostname,
                 outlet,
                 hydra_hostname=None):
        self.device_hostname = device_hostname
        self.powerunit_type = powerunit_type
        self.powerunit_hostname = powerunit_hostname
        self.outlet = outlet
        self.hydra_hostname = hydra_hostname

    @staticmethod
    def get_powerunit_info(afe_host):
        """Constructe a PowerUnitInfo instance from an afe host.

        @param afe_host: A host object.

        @returns: A PowerUnitInfo object populated with the power management
                  unit information of the host.
        """
        if (not POWERUNIT_HOSTNAME_KEY in afe_host.attributes
                or not POWERUNIT_OUTLET_KEY in afe_host.attributes):
            raise rpm_infrastructure_exception.RPMInfrastructureException(
                'Can not retrieve complete rpm information'
                'from AFE for %s, please make sure %s and %s are'
                ' in the host\'s attributes.' %
                (afe_host.hostname, POWERUNIT_HOSTNAME_KEY,
                 POWERUNIT_OUTLET_KEY))

        hydra_hostname = (afe_host.attributes[HYDRA_HOSTNAME_KEY] if
                          HYDRA_HOSTNAME_KEY in afe_host.attributes else None)
        return PowerUnitInfo(
            device_hostname=afe_host.hostname,
            powerunit_type=PowerUnitInfo.POWERUNIT_TYPES.RPM,
            powerunit_hostname=afe_host.attributes[POWERUNIT_HOSTNAME_KEY],
            outlet=afe_host.attributes[POWERUNIT_OUTLET_KEY],
            hydra_hostname=hydra_hostname)
コード例 #5
0
ファイル: models.py プロジェクト: AOSP8146/external
class Server(dbmodels.Model, model_logic.ModelExtensions):
    """Models a server."""
    DETAIL_FMT = ('Hostname     : %(hostname)s\n'
                  'Status       : %(status)s\n'
                  'Roles        : %(roles)s\n'
                  'Attributes   : %(attributes)s\n'
                  'Date Created : %(date_created)s\n'
                  'Date Modified: %(date_modified)s\n'
                  'Note         : %(note)s\n')

    STATUS_LIST = ['primary', 'backup', 'repair_required']
    STATUS = enum.Enum(*STATUS_LIST, string_values=True)

    hostname = dbmodels.CharField(unique=True, max_length=128)
    cname = dbmodels.CharField(null=True,
                               blank=True,
                               default=None,
                               max_length=128)
    status = dbmodels.CharField(unique=False,
                                max_length=128,
                                choices=STATUS.choices())
    date_created = dbmodels.DateTimeField(null=True, blank=True)
    date_modified = dbmodels.DateTimeField(null=True, blank=True)
    note = dbmodels.TextField(null=True, blank=True)

    objects = model_logic.ExtendedManager()

    class Meta:
        """Metadata for class Server."""
        db_table = 'servers'

    def __unicode__(self):
        """A string representation of the Server object.
        """
        roles = ','.join([r.role for r in self.roles.all()])
        attributes = dict([(a.attribute, a.value)
                           for a in self.attributes.all()])
        return self.DETAIL_FMT % {
            'hostname': self.hostname,
            'status': self.status,
            'roles': roles,
            'attributes': attributes,
            'date_created': self.date_created,
            'date_modified': self.date_modified,
            'note': self.note
        }

    def get_role_names(self):
        """Get a list of role names of the server.

        @return: A list of role names of the server.
        """
        return [r.role for r in self.roles.all()]

    def get_details(self):
        """Get a dictionary with all server details.

        For example:
        {
            'hostname': 'server1',
            'status': 'backup',
            'roles': ['drone', 'scheduler'],
            'attributes': {'max_processes': 300}
        }

        @return: A dictionary with all server details.
        """
        details = {}
        details['hostname'] = self.hostname
        details['status'] = self.status
        details['roles'] = self.get_role_names()
        attributes = dict([(a.attribute, a.value)
                           for a in self.attributes.all()])
        details['attributes'] = attributes
        details['date_created'] = self.date_created
        details['date_modified'] = self.date_modified
        details['note'] = self.note
        return details
コード例 #6
0
ファイル: rpc_utils.py プロジェクト: renormalist/autotest
    additional_wrap_arguments = site_additional_wrap_arguments(plan, hostname)

    verify_params = get_wrap_arguments(
        plan, hostname, model_attributes.AdditionalParameterType.VERIFY)

    return control_file.wrap_control_file(
        control_file=test_config.control_file.contents,
        is_server=test_config.is_server,
        skip_verify=(not run_verify),
        verify_params=verify_params,
        **additional_wrap_arguments)


ComputeTestConfigStatusResult = enum.Enum('Pass',
                                          'Fail',
                                          'Scheduled',
                                          'Running',
                                          string_values=True)


def compute_test_config_status(host, test_config=None):
    """
    Returns a value of ComputeTestConfigStatusResult:
        Pass: This host passed the test config
        Fail: This host failed the test config
        Scheduled: This host has not yet run this test config
        Running: This host is currently running this test config

    A 'pass' means that, for every test configuration in the plan, the machine
    had at least one AFE job with no failed tests. 'passed' could also be None,
    meaning that this host is still running tests.
コード例 #7
0
class ControlData(object):
    # Available TIME settings in control file, the list must be in lower case
    # and in ascending order, test running faster comes first.
    TEST_TIME_LIST = ['fast', 'short', 'medium', 'long', 'lengthy']
    TEST_TIME = enum.Enum(*TEST_TIME_LIST, string_values=False)

    @staticmethod
    def get_test_time_index(time):
        """
        Get the order of estimated test time, based on the TIME setting in
        Control file. Faster test gets a lower index number.
        """
        try:
            return ControlData.TEST_TIME.get_value(time.lower())
        except AttributeError:
            # Raise exception if time value is not a valid TIME setting.
            error_msg = '%s is not a valid TIME.' % time
            logging.error(error_msg)
            raise ControlVariableException(error_msg)

    def __init__(self, vars, path, raise_warnings=False):
        # Defaults
        self.path = path
        self.dependencies = set()
        # TODO(jrbarnette): This should be removed once outside
        # code that uses can be changed.
        self.experimental = False
        self.run_verify = True
        self.sync_count = 1
        self.test_parameters = set()
        self.test_category = ''
        self.test_class = ''
        self.retries = 0
        self.job_retries = 0
        # Default to require server-side package. Unless require_ssp is
        # explicitly set to False, server-side package will be used for the
        # job. This can be overridden by global config
        # AUTOSERV/enable_ssp_container
        self.require_ssp = None
        self.attributes = set()

        diff = REQUIRED_VARS - set(vars)
        if diff:
            warning = ('WARNING: Not all required control '
                       'variables were specified in %s.  Please define '
                       '%s.') % (self.path, ', '.join(diff))
            if raise_warnings:
                raise ControlVariableException(warning)
            print textwrap.wrap(warning, 80)

        obsolete = OBSOLETE_VARS & set(vars)
        if obsolete:
            warning = ('WARNING: Obsolete variables were '
                       'specified in %s.  Please remove '
                       '%s.') % (self.path, ', '.join(obsolete))
            if raise_warnings:
                raise ControlVariableException(warning)
            print textwrap.wrap(warning, 80)

        for key, val in vars.iteritems():
            try:
                self.set_attr(key, val, raise_warnings)
            except Exception, e:
                if raise_warnings:
                    raise
                print 'WARNING: %s; skipping' % e
コード例 #8
0
import common
from autotest_lib.client.common_lib import enum


# common enums for Job attributes
RebootBefore = enum.Enum('Never', 'If dirty', 'Always')
RebootAfter = enum.Enum('Never', 'If all tests passed', 'Always')
コード例 #9
0
class Job(dbmodels.Model, model_logic.ModelExtensions):
    """\
    owner: username of job owner
    name: job name (does not have to be unique)
    priority: Low, Medium, High, Urgent (or 0-3)
    control_file: contents of control file
    control_type: Client or Server
    created_on: date of job creation
    submitted_on: date of job submission
    synch_count: how many hosts should be used per autoserv execution
    run_verify: Whether or not to run the verify phase
    timeout: hours from queuing time until job times out
    max_runtime_hrs: hours from job starting time until job times out
    email_list: list of people to email on completion delimited by any of:
                white space, ',', ':', ';'
    dependency_labels: many-to-many relationship with labels corresponding to
                       job dependencies
    reboot_before: Never, If dirty, or Always
    reboot_after: Never, If all tests passed, or Always
    parse_failed_repair: if True, a failed repair launched by this job will have
    its results parsed as part of the job.
    drone_set: The set of drones to run this job on
    """
    DEFAULT_TIMEOUT = global_config.global_config.get_config_value(
        'AUTOTEST_WEB', 'job_timeout_default', default=240)
    DEFAULT_MAX_RUNTIME_HRS = global_config.global_config.get_config_value(
        'AUTOTEST_WEB', 'job_max_runtime_hrs_default', default=72)
    DEFAULT_PARSE_FAILED_REPAIR = global_config.global_config.get_config_value(
        'AUTOTEST_WEB', 'parse_failed_repair_default', type=bool,
        default=False)

    Priority = enum.Enum('Low', 'Medium', 'High', 'Urgent')
    ControlType = enum.Enum('Server', 'Client', start_value=1)

    owner = dbmodels.CharField(max_length=255)
    name = dbmodels.CharField(max_length=255)
    priority = dbmodels.SmallIntegerField(choices=Priority.choices(),
                                          blank=True, # to allow 0
                                          default=Priority.MEDIUM)
    control_file = dbmodels.TextField(null=True, blank=True)
    control_type = dbmodels.SmallIntegerField(choices=ControlType.choices(),
                                              blank=True, # to allow 0
                                              default=ControlType.CLIENT)
    created_on = dbmodels.DateTimeField()
    synch_count = dbmodels.IntegerField(null=True, default=1)
    timeout = dbmodels.IntegerField(default=DEFAULT_TIMEOUT)
    run_verify = dbmodels.BooleanField(default=True)
    email_list = dbmodels.CharField(max_length=250, blank=True)
    dependency_labels = (
            dbmodels.ManyToManyField(Label, blank=True,
                                     db_table='afe_jobs_dependency_labels'))
    reboot_before = dbmodels.SmallIntegerField(
        choices=model_attributes.RebootBefore.choices(), blank=True,
        default=DEFAULT_REBOOT_BEFORE)
    reboot_after = dbmodels.SmallIntegerField(
        choices=model_attributes.RebootAfter.choices(), blank=True,
        default=DEFAULT_REBOOT_AFTER)
    parse_failed_repair = dbmodels.BooleanField(
        default=DEFAULT_PARSE_FAILED_REPAIR)
    max_runtime_hrs = dbmodels.IntegerField(default=DEFAULT_MAX_RUNTIME_HRS)
    drone_set = dbmodels.ForeignKey(DroneSet, null=True, blank=True)

    parameterized_job = dbmodels.ForeignKey(ParameterizedJob, null=True,
                                            blank=True)


    # custom manager
    objects = JobManager()


    def is_server_job(self):
        return self.control_type == self.ControlType.SERVER


    @classmethod
    def parameterized_jobs_enabled(cls):
        return global_config.global_config.get_config_value(
                'AUTOTEST_WEB', 'parameterized_jobs', type=bool)


    @classmethod
    def check_parameterized_job(cls, control_file, parameterized_job):
        """
        Checks that the job is valid given the global config settings

        First, either control_file must be set, or parameterized_job must be
        set, but not both. Second, parameterized_job must be set if and only if
        the parameterized_jobs option in the global config is set to True.
        """
        if not (bool(control_file) ^ bool(parameterized_job)):
            raise Exception('Job must have either control file or '
                            'parameterization, but not both')

        parameterized_jobs_enabled = cls.parameterized_jobs_enabled()
        if control_file and parameterized_jobs_enabled:
            raise Exception('Control file specified, but parameterized jobs '
                            'are enabled')
        if parameterized_job and not parameterized_jobs_enabled:
            raise Exception('Parameterized job specified, but parameterized '
                            'jobs are not enabled')


    @classmethod
    def create(cls, owner, options, hosts):
        """\
        Creates a job by taking some information (the listed args)
        and filling in the rest of the necessary information.
        """
        AclGroup.check_for_acl_violation_hosts(hosts)

        control_file = options.get('control_file')
        parameterized_job = options.get('parameterized_job')
        cls.check_parameterized_job(control_file=control_file,
                                    parameterized_job=parameterized_job)

        user = User.current_user()
        if options.get('reboot_before') is None:
            options['reboot_before'] = user.get_reboot_before_display()
        if options.get('reboot_after') is None:
            options['reboot_after'] = user.get_reboot_after_display()

        drone_set = DroneSet.resolve_name(options.get('drone_set'))

        job = cls.add_object(
            owner=owner,
            name=options['name'],
            priority=options['priority'],
            control_file=control_file,
            control_type=options['control_type'],
            synch_count=options.get('synch_count'),
            timeout=options.get('timeout'),
            max_runtime_hrs=options.get('max_runtime_hrs'),
            run_verify=options.get('run_verify'),
            email_list=options.get('email_list'),
            reboot_before=options.get('reboot_before'),
            reboot_after=options.get('reboot_after'),
            parse_failed_repair=options.get('parse_failed_repair'),
            created_on=datetime.now(),
            drone_set=drone_set,
            parameterized_job=parameterized_job)

        job.dependency_labels = options['dependencies']

        if options.get('keyvals'):
            for key, value in options['keyvals'].iteritems():
                JobKeyval.objects.create(job=job, key=key, value=value)

        return job


    def save(self, *args, **kwargs):
        self.check_parameterized_job(control_file=self.control_file,
                                     parameterized_job=self.parameterized_job)
        super(Job, self).save(*args, **kwargs)


    def queue(self, hosts, atomic_group=None, is_template=False):
        """Enqueue a job on the given hosts."""
        if not hosts:
            if atomic_group:
                # No hosts or labels are required to queue an atomic group
                # Job.  However, if they are given, we respect them below.
                atomic_group.enqueue_job(self, is_template=is_template)
            else:
                # hostless job
                entry = HostQueueEntry.create(job=self, is_template=is_template)
                entry.save()
            return

        for host in hosts:
            host.enqueue_job(self, atomic_group=atomic_group,
                             is_template=is_template)


    def create_recurring_job(self, start_date, loop_period, loop_count, owner):
        rec = RecurringRun(job=self, start_date=start_date,
                           loop_period=loop_period,
                           loop_count=loop_count,
                           owner=User.objects.get(login=owner))
        rec.save()
        return rec.id


    def user(self):
        try:
            return User.objects.get(login=self.owner)
        except self.DoesNotExist:
            return None


    def abort(self):
        for queue_entry in self.hostqueueentry_set.all():
            queue_entry.abort()


    def tag(self):
        return '%s-%s' % (self.id, self.owner)


    def keyval_dict(self):
        return dict((keyval.key, keyval.value)
                    for keyval in self.jobkeyval_set.all())


    class Meta:
        db_table = 'afe_jobs'

    def __unicode__(self):
        return u'%s (%s-%s)' % (self.name, self.id, self.owner)
コード例 #10
0
class Host(model_logic.ModelWithInvalid, dbmodels.Model,
           model_logic.ModelWithAttributes):
    """\
    Required:
    hostname

    optional:
    locked: if true, host is locked and will not be queued

    Internal:
    synch_id: currently unused
    status: string describing status of host
    invalid: true if the host has been deleted
    protection: indicates what can be done to this host during repair
    locked_by: user that locked the host, or null if the host is unlocked
    lock_time: DateTime at which the host was locked
    dirty: true if the host has been used without being rebooted
    """
    Status = enum.Enum('Verifying', 'Running', 'Ready', 'Repairing',
                       'Repair Failed', 'Cleaning', 'Pending',
                       string_values=True)
    Protection = host_protections.Protection

    hostname = dbmodels.CharField(max_length=255, unique=True)
    labels = dbmodels.ManyToManyField(Label, blank=True,
                                      db_table='afe_hosts_labels')
    locked = dbmodels.BooleanField(default=False)
    synch_id = dbmodels.IntegerField(blank=True, null=True,
                                     editable=settings.FULL_ADMIN)
    status = dbmodels.CharField(max_length=255, default=Status.READY,
                                choices=Status.choices(),
                                editable=settings.FULL_ADMIN)
    invalid = dbmodels.BooleanField(default=False,
                                    editable=settings.FULL_ADMIN)
    protection = dbmodels.SmallIntegerField(null=False, blank=True,
                                            choices=host_protections.choices,
                                            default=host_protections.default)
    locked_by = dbmodels.ForeignKey(User, null=True, blank=True, editable=False)
    lock_time = dbmodels.DateTimeField(null=True, blank=True, editable=False)
    dirty = dbmodels.BooleanField(default=True, editable=settings.FULL_ADMIN)

    name_field = 'hostname'
    objects = model_logic.ModelWithInvalidManager()
    valid_objects = model_logic.ValidObjectsManager()


    def __init__(self, *args, **kwargs):
        super(Host, self).__init__(*args, **kwargs)
        self._record_attributes(['status'])


    @staticmethod
    def create_one_time_host(hostname):
        query = Host.objects.filter(hostname=hostname)
        if query.count() == 0:
            host = Host(hostname=hostname, invalid=True)
            host.do_validate()
        else:
            host = query[0]
            if not host.invalid:
                raise model_logic.ValidationError({
                    'hostname' : '%s already exists in the autotest DB.  '
                        'Select it rather than entering it as a one time '
                        'host.' % hostname
                    })
        host.protection = host_protections.Protection.DO_NOT_REPAIR
        host.locked = False
        host.save()
        host.clean_object()
        return host


    def resurrect_object(self, old_object):
        super(Host, self).resurrect_object(old_object)
        # invalid hosts can be in use by the scheduler (as one-time hosts), so
        # don't change the status
        self.status = old_object.status


    def clean_object(self):
        self.aclgroup_set.clear()
        self.labels.clear()


    def save(self, *args, **kwargs):
        # extra spaces in the hostname can be a sneaky source of errors
        self.hostname = self.hostname.strip()
        # is this a new object being saved for the first time?
        first_time = (self.id is None)
        if not first_time:
            AclGroup.check_for_acl_violation_hosts([self])
        if self.locked and not self.locked_by:
            self.locked_by = User.current_user()
            self.lock_time = datetime.now()
            self.dirty = True
        elif not self.locked and self.locked_by:
            self.locked_by = None
            self.lock_time = None
        super(Host, self).save(*args, **kwargs)
        if first_time:
            everyone = AclGroup.objects.get(name='Everyone')
            everyone.hosts.add(self)
        self._check_for_updated_attributes()


    def delete(self):
        AclGroup.check_for_acl_violation_hosts([self])
        for queue_entry in self.hostqueueentry_set.all():
            queue_entry.deleted = True
            queue_entry.abort()
        super(Host, self).delete()


    def on_attribute_changed(self, attribute, old_value):
        assert attribute == 'status'
        logging.info(self.hostname + ' -> ' + self.status)


    def enqueue_job(self, job, atomic_group=None, is_template=False):
        """Enqueue a job on this host."""
        queue_entry = HostQueueEntry.create(host=self, job=job,
                                            is_template=is_template,
                                            atomic_group=atomic_group)
        # allow recovery of dead hosts from the frontend
        if not self.active_queue_entry() and self.is_dead():
            self.status = Host.Status.READY
            self.save()
        queue_entry.save()

        block = IneligibleHostQueue(job=job, host=self)
        block.save()


    def platform(self):
        # TODO(showard): slighly hacky?
        platforms = self.labels.filter(platform=True)
        if len(platforms) == 0:
            return None
        return platforms[0]
    platform.short_description = 'Platform'


    @classmethod
    def check_no_platform(cls, hosts):
        Host.objects.populate_relationships(hosts, Label, 'label_list')
        errors = []
        for host in hosts:
            platforms = [label.name for label in host.label_list
                         if label.platform]
            if platforms:
                # do a join, just in case this host has multiple platforms,
                # we'll be able to see it
                errors.append('Host %s already has a platform: %s' % (
                              host.hostname, ', '.join(platforms)))
        if errors:
            raise model_logic.ValidationError({'labels': '; '.join(errors)})


    def is_dead(self):
        return self.status == Host.Status.REPAIR_FAILED


    def active_queue_entry(self):
        active = list(self.hostqueueentry_set.filter(active=True))
        if not active:
            return None
        assert len(active) == 1, ('More than one active entry for '
                                  'host ' + self.hostname)
        return active[0]


    def _get_attribute_model_and_args(self, attribute):
        return HostAttribute, dict(host=self, attribute=attribute)


    class Meta:
        db_table = 'afe_hosts'

    def __unicode__(self):
        return unicode(self.hostname)
コード例 #11
0
"""
import argparse
import collections
import logging
import re
import sys

import common

from autotest_lib.client.common_lib import enum
from autotest_lib.server.cros.dynamic_suite import frontend_wrappers

CHROMEOS_LABS = enum.Enum('OysterBay',
                          'Atlantis',
                          'Chaos',
                          'Destiny',
                          start_value=1)
HOST_REGX = 'chromeos(\d+)(-row(\d+))*-rack(\d+)-host(\d+)'
DeviceHostname = collections.namedtuple('DeviceHostname',
                                        ['lab', 'row', 'rack', 'host'])


class BaseLabConfig(object):
    """Base class for a lab configuration."""
    RPM_OUTLET_MAP = {}
    LAB_NUMBER = -1

    @classmethod
    def get_rpm_hostname(cls, device_hostname):
        """Get rpm hostname given a device.
コード例 #12
0
# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module contains the status enums for use by Hosts in the
database. It is a stand alone module as these status strings
are need from vairous disconnected pieces of code.
"""

from autotest_lib.client.common_lib import enum

Status = enum.Enum('Verifying',
                   'Running',
                   'Ready',
                   'Repairing',
                   'Repair Failed',
                   'Cleaning',
                   'Pending',
                   'Resetting',
                   'Provisioning',
                   string_values=True)

# States associated with a DUT that is doing nothing, whether or not
# it's eligible to run a test.
IDLE_STATES = {Status.READY, Status.REPAIR_FAILED}

# States associated with a DUT that is not available for jobs.  Note that a
# locked host is also unavailable no matter the status.
UNAVAILABLE_STATES = {Status.REPAIR_FAILED, Status.REPAIRING, Status.VERIFYING}
コード例 #13
0
import common
from autotest_lib.client.common_lib import enum, utils


def _site_host_actions_dummy():
    return []


_site_host_actions = utils.import_site_function(
    __file__, 'autotest_lib.frontend.planner.site_failure_actions',
    'site_host_actions', _site_host_actions_dummy)

HostAction = enum.Enum(string_values=True,
                       *(_site_host_actions() +
                         ['Block', 'Unblock', 'Reinstall']))

TestAction = enum.Enum('Skip', 'Rerun', string_values=True)
コード例 #14
0
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import collections
import json
import sys

from autotest_lib.client.common_lib import enum

# Return code that will be sent back to callers.
RETURN_CODES = enum.Enum(
    'OK',
    'ERROR',
    'WARNING',
    'INFRA_FAILURE',
    'SUITE_TIMEOUT',
    'BOARD_NOT_AVAILABLE',
    'INVALID_OPTIONS',
)


class SuiteResult(
        collections.namedtuple('SuiteResult', ['return_code', 'output_dict'])):
    """Result of running a suite to return."""
    def __new__(cls, return_code, output_dict=None):
        if output_dict is None:
            output_dict = dict()
        else:
            output_dict = output_dict.copy()
        output_dict['return_code'] = return_code
コード例 #15
0
# Changing this file has consequences that need to be understood.
# Adding a protection level to the enum requires you to append your change to
# the end of the enum or a database migration needs to be added to migrate
# older protections to match the layout of the new enum.
# Removing a protection level from the enum requires a database migration to
# update the integer values in the DB and migrate hosts that use the removed
# protection to a default protection level.
# IF THIS IS NOT DONE HOSTS' PROTECTION LEVELS WILL BE CHANGED RANDOMLY.

Protection = enum.Enum('No protection',          # Repair can do anything to
                                                 # this host.
                       'Repair software only',   # repair should try to fix any
                                                 # software problem
                       'Repair filesystem only', # Repair should only try to
                                                 # recover the file system.
                       'Do not repair',          # Repair should not touch this
                                                 # host.
                       'Do not verify',          # Don't even try to verify
                                                 # this host
                       )

running_client = global_config.global_config.check_stand_alone_client_run()

try:
    _bad_value = object()
    default_protection = global_config.global_config.get_config_value(
                            'HOSTS', 'default_protection', default=_bad_value)
    if default_protection == _bad_value:
        if not running_client:
            raise global_config.ConfigError(
コード例 #16
0
        exit_code = 1
        traceback.print_exc()
    finally:
        metrics.Flush()

    sys.exit(exit_code)


# Job breakdown statuses
_hs = host_states.Status
_qs = host_queue_entry_states.Status
_status_list = [
    _qs.QUEUED, _qs.RESETTING, _qs.VERIFYING, _qs.PROVISIONING, _hs.REPAIRING,
    _qs.CLEANING, _qs.RUNNING, _qs.GATHERING, _qs.PARSING
]
_JOB_OVERHEAD_STATUS = enum.Enum(*_status_list, string_values=True)


def get_job_status(options):
    """Returns the HQE Status for this run.

    @param options: parser options.
    """
    s = _JOB_OVERHEAD_STATUS
    task_mapping = {
        'reset': s.RESETTING,
        'verify': s.VERIFYING,
        'provision': s.PROVISIONING,
        'repair': s.REPAIRING,
        'cleanup': s.CLEANING,
        'collect_crashinfo': s.GATHERING
コード例 #17
0
class SpecialTask(dbmodels.Model, model_logic.ModelExtensions):
    """\
    Tasks to run on hosts at the next time they are in the Ready state. Use this
    for high-priority tasks, such as forced repair or forced reinstall.

    host: host to run this task on
    task: special task to run
    time_requested: date and time the request for this task was made
    is_active: task is currently running
    is_complete: task has finished running
    time_started: date and time the task started
    queue_entry: Host queue entry waiting on this task (or None, if task was not
                 started in preparation of a job)
    """
    Task = enum.Enum('Verify', 'Cleanup', 'Repair', string_values=True)

    host = dbmodels.ForeignKey(Host, blank=False, null=False)
    task = dbmodels.CharField(max_length=64, choices=Task.choices(),
                              blank=False, null=False)
    requested_by = dbmodels.ForeignKey(User)
    time_requested = dbmodels.DateTimeField(auto_now_add=True, blank=False,
                                            null=False)
    is_active = dbmodels.BooleanField(default=False, blank=False, null=False)
    is_complete = dbmodels.BooleanField(default=False, blank=False, null=False)
    time_started = dbmodels.DateTimeField(null=True, blank=True)
    queue_entry = dbmodels.ForeignKey(HostQueueEntry, blank=True, null=True)
    success = dbmodels.BooleanField(default=False, blank=False, null=False)

    objects = model_logic.ExtendedManager()


    def save(self, **kwargs):
        if self.queue_entry:
            self.requested_by = User.objects.get(
                    login=self.queue_entry.job.owner)
        super(SpecialTask, self).save(**kwargs)


    def execution_path(self):
        """@see HostQueueEntry.execution_path()"""
        return 'hosts/%s/%s-%s' % (self.host.hostname, self.id,
                                   self.task.lower())


    # property to emulate HostQueueEntry.status
    @property
    def status(self):
        """
        Return a host queue entry status appropriate for this task.  Although
        SpecialTasks are not HostQueueEntries, it is helpful to the user to
        present similar statuses.
        """
        if self.is_complete:
            if self.success:
                return HostQueueEntry.Status.COMPLETED
            return HostQueueEntry.Status.FAILED
        if self.is_active:
            return HostQueueEntry.Status.RUNNING
        return HostQueueEntry.Status.QUEUED


    # property to emulate HostQueueEntry.started_on
    @property
    def started_on(self):
        return self.time_started


    @classmethod
    def schedule_special_task(cls, host, task):
        """
        Schedules a special task on a host if the task is not already scheduled.
        """
        existing_tasks = SpecialTask.objects.filter(host__id=host.id, task=task,
                                                    is_active=False,
                                                    is_complete=False)
        if existing_tasks:
            return existing_tasks[0]

        special_task = SpecialTask(host=host, task=task,
                                   requested_by=User.current_user())
        special_task.save()
        return special_task


    def activate(self):
        """
        Sets a task as active and sets the time started to the current time.
        """
        logging.info('Starting: %s', self)
        self.is_active = True
        self.time_started = datetime.now()
        self.save()


    def finish(self, success):
        """
        Sets a task as completed
        """
        logging.info('Finished: %s', self)
        self.is_active = False
        self.is_complete = True
        self.success = success
        self.save()


    class Meta:
        db_table = 'afe_special_tasks'


    def __unicode__(self):
        result = u'Special Task %s (host %s, task %s, time %s)' % (
            self.id, self.host, self.task, self.time_requested)
        if self.is_complete:
            result += u' (completed)'
        elif self.is_active:
            result += u' (active)'

        return result
コード例 #18
0
from autotest_lib.client.common_lib import enum

# We include a 'Default' level just below what BVT will run at so that when
# the priority rework code is rolled out, any code that doesn't specify a
# priority, such as suites on old branches, will inherit a priority that makes
# them a best effort without lengthening important build processes.
Priority = enum.Enum('Weekly', 'Daily', 'PostBuild', 'Default', 'Build',
                     'PFQ', 'CQ', start_value=10, step=10)
コード例 #19
0
ファイル: job_overhead.py プロジェクト: AOSP8146/external
from autotest_lib.site_utils import metadata_reporter

# Metadata db type string for job time stats
DEFAULT_KEY = 'job_time_breakdown'

# Metadata db type string for suite time stats
SUITE_RUNTIME_KEY = 'suite_runtime'

# Job breakdown statuses
_hs = host_states.Status
_qs = host_queue_entry_states.Status
_status_list = [
    _qs.QUEUED, _qs.RESETTING, _qs.VERIFYING, _qs.PROVISIONING, _hs.REPAIRING,
    _qs.CLEANING, _qs.RUNNING, _qs.GATHERING, _qs.PARSING
]
STATUS = enum.Enum(*_status_list, string_values=True)


def record_state_duration(job_or_task_id,
                          hostname,
                          status,
                          duration_secs,
                          type_str=DEFAULT_KEY,
                          is_special_task=False):
    """Record state duration for a job or a task.

    @param job_or_task_id: Integer, representing a job id or a special task id.
    @param hostname: String, representing a hostname.
    @param status: One of the enum values of job_overhead.STATUS.
    @param duration_secs: Duration of the job/task in secs.
    @param is_special_task: True/Fals, whether this is a special task.
コード例 #20
0
import common
from autotest_lib.client.common_lib import enum, utils

# common enums for Host attributes
HostStatus = enum.Enum('Finished', 'Running', 'Blocked', string_values=True)

# common enums for TestRun attributes
TestRunStatus = enum.Enum('Active', 'Passed', 'Failed', string_values=True)

# common enums for SavedObject attributes
SavedObjectType = enum.Enum('support',
                            'triage',
                            'autoprocess',
                            'custom_query',
                            string_values=True)


# common enums for AdditionalParameter attributes
def _site_additional_parameter_types_dummy():
    return []


_site_additional_parameter_types = utils.import_site_function(
    __file__, 'autotest_lib.frontend.planner.site_model_attributes',
    'site_additional_parameter_types', _site_additional_parameter_types_dummy)
AdditionalParameterType = enum.Enum(string_values=True,
                                    *(_site_additional_parameter_types() +
                                      ['Verify']))
コード例 #21
0
"""
This module contains the status enums for use by HostQueueEntrys in the
database.  It is a stand alone module as these status strings are needed
from various disconnected pieces of code that should not depend on everything
that frontend.afe.models depends on such as RPC clients.
"""

from autotest_lib.client.common_lib import enum

Status = enum.Enum('Queued',
                   'Starting',
                   'Verifying',
                   'Pending',
                   'Waiting',
                   'Running',
                   'Gathering',
                   'Parsing',
                   'Archiving',
                   'Aborted',
                   'Completed',
                   'Failed',
                   'Stopped',
                   'Template',
                   string_values=True)
ACTIVE_STATUSES = (Status.STARTING, Status.VERIFYING, Status.PENDING,
                   Status.RUNNING, Status.GATHERING)
COMPLETE_STATUSES = (Status.ABORTED, Status.COMPLETED, Status.FAILED,
                     Status.STOPPED, Status.TEMPLATE)
コード例 #22
0
import common
from autotest_lib.client.common_lib import enum


# common enums for Job attributes
RebootBefore = enum.Enum('Never', 'If dirty', 'Always')
RebootAfter = enum.Enum('Never', 'If all tests passed', 'Always')


# common enums for profiler and job parameter types
ParameterTypes = enum.Enum('int', 'float', 'string', string_values=True)
コード例 #23
0
HqeStatus = models.HostQueueEntry.Status
HostStatus = models.Host.Status

class NullMethodObject(object):
    _NULL_METHODS = ()

    def __init__(self):
        def null_method(*args, **kwargs):
            pass

        for method_name in self._NULL_METHODS:
            setattr(self, method_name, null_method)

# the SpecialTask names here must match the suffixes used on the SpecialTask
# results directories
_PidfileType = enum.Enum('verify', 'cleanup', 'repair', 'job', 'gather',
                         'parse', 'archive', 'reset', 'provision')


_PIDFILE_TO_PIDFILE_TYPE = {
        drone_manager.AUTOSERV_PID_FILE: _PidfileType.JOB,
        drone_manager.CRASHINFO_PID_FILE: _PidfileType.GATHER,
        drone_manager.PARSER_PID_FILE: _PidfileType.PARSE,
        drone_manager.ARCHIVER_PID_FILE: _PidfileType.ARCHIVE,
        }


_PIDFILE_TYPE_TO_PIDFILE = dict((value, key) for key, value
                                in _PIDFILE_TO_PIDFILE_TYPE.iteritems())


class MockConnectionManager(object):
コード例 #24
0
"""
This module contains the status enums for use by HostQueueEntrys in the
database.  It is a stand alone module as these status strings are needed
from various disconnected pieces of code that should not depend on everything
that frontend.afe.models depends on such as RPC clients.
"""

from autotest_lib.client.common_lib import enum

Status_list = [
    'Queued', 'Starting', 'Resetting', 'Verifying', 'Provisioning', 'Pending',
    'Running', 'Gathering', 'Parsing', 'Aborted', 'Completed', 'Failed',
    'Stopped', 'Cleaning', 'Template'
]

Status = enum.Enum(*Status_list, string_values=True)
ACTIVE_STATUSES = (Status.STARTING, Status.RESETTING, Status.VERIFYING,
                   Status.PROVISIONING, Status.PENDING, Status.RUNNING,
                   Status.GATHERING, Status.CLEANING)
COMPLETE_STATUSES = (Status.ABORTED, Status.COMPLETED, Status.FAILED,
                     Status.STOPPED, Status.TEMPLATE)
# A state cannot both be active and complete
assert not set(ACTIVE_STATUSES) & set(COMPLETE_STATUSES)
PRE_JOB_STATUSES = (Status.RESETTING, Status.PROVISIONING, Status.VERIFYING,
                    Status.PENDING, Status.QUEUED)
IDLE_PRE_JOB_STATUSES = (Status.PENDING, Status.QUEUED)

IntStatus = enum.Enum(*Status_list)
コード例 #25
0
ファイル: control_data.py プロジェクト: wuyeliang/autotest
    # The 'compiler' module is gone in Python 3.0.  Let's not say
    # so in every log file.
    warnings.simplefilter("ignore", DeprecationWarning)
    import compiler
import logging
import textwrap
import re

from autotest_lib.client.common_lib import enum
from autotest_lib.client.common_lib import global_config
from autotest_lib.client.common_lib import priorities

REQUIRED_VARS = set(['author', 'doc', 'name', 'time', 'test_type'])
OBSOLETE_VARS = set(['experimental'])

CONTROL_TYPE = enum.Enum('Server', 'Client', start_value=1)
CONTROL_TYPE_NAMES =  enum.Enum(*CONTROL_TYPE.names, string_values=True)

_SUITE_ATTRIBUTE_PREFIX = 'suite:'

CONFIG = global_config.global_config

# Default maximum test result size in kB.
DEFAULT_MAX_RESULT_SIZE_KB = CONFIG.get_config_value(
        'AUTOSERV', 'default_max_result_size_KB', type=int, default=20000)


class ControlVariableException(Exception):
    pass

def _validate_control_file_fields(control_file_path, control_file_vars,
コード例 #26
0
# Copyright 2018 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

import logging
import time

from autotest_lib.client.common_lib import enum, error
from autotest_lib.server import test
from autotest_lib.server.cros.dark_resume_utils import DarkResumeUtils
from autotest_lib.server.cros.faft.config.config import Config as FAFTConfig
from autotest_lib.server.cros.servo import chrome_ec

# Possible states base can be forced into.
BASE_STATE = enum.Enum('ATTACH', 'DETACH', 'RESET')

# List of wake sources expected to cause a full resume.
FULL_WAKE_SOURCES = [
    'PWR_BTN', 'LID_OPEN', 'BASE_ATTACH', 'BASE_DETACH', 'INTERNAL_KB'
]

# Max time taken by the system to resume.
RESUME_DURATION_SECS = 5

# Time in future after which RTC goes off.
RTC_WAKE_SECS = 30

# Max time taken by the system to suspend.
SUSPEND_DURATION_SECS = 5

# Time to allow lid transition to take effect.